branch_name
stringclasses
149 values
text
stringlengths
23
89.3M
directory_id
stringlengths
40
40
languages
listlengths
1
19
num_files
int64
1
11.8k
repo_language
stringclasses
38 values
repo_name
stringlengths
6
114
revision_id
stringlengths
40
40
snapshot_id
stringlengths
40
40
refs/heads/master
<file_sep>package pages; import io.qameta.allure.Step; import lombok.extern.log4j.Log4j2; import org.openqa.selenium.TimeoutException; import org.openqa.selenium.WebDriver; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.FindBy; import org.openqa.selenium.support.ui.ExpectedConditions; import static org.testng.Assert.fail; @Log4j2 public class LoginPageFactory extends BasePage { @FindBy(xpath = "//*[@data-test='username']") WebElement usernameInput; @FindBy(xpath = "//*[@data-test='password']") WebElement passwordInput; @FindBy(id = "login-button") WebElement loginBtn; @FindBy(css = "[data-test=error]") WebElement errorMsg; public LoginPageFactory(WebDriver driver) { super(driver); } @Override @Step("Open Login page") public LoginPageFactory openPage() { openPage(BASE_URL); return this; } @Override public LoginPageFactory waitForPageOpened() { try { wait.until(ExpectedConditions.visibilityOf(loginBtn)); } catch (TimeoutException e) { log.error(String.format("Login page is not opened. Locator %s in not found", loginBtn)); fail("Page is not loaded. Not found locator: " + loginBtn); } return this; } @Step("Login with username: '{username}' and password: '{password}' ") public ProductPage login(String username, String password) { attemptToLogin(username, password); return new ProductPage(driver); } @Step("Attempt to login") public LoginPageFactory attemptToLogin(String username, String password) { log.info(String.format("Logging with username '%s' and password '%s'", username, password)); usernameInput.sendKeys(username); passwordInput.sendKeys(password); loginBtn.click(); return this; } public String getErrorMessage() { return errorMsg.getText(); } } <file_sep><?xml version="1.0" encoding="UTF-8"?> <project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://maven.apache.org/POM/4.0.0" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>org.example</groupId> <artifactId>SauceDemoN</artifactId> <version>1.0-SNAPSHOT</version> <properties> <selenium.version>3.141.59</selenium.version> <testng.version>7.0.0</testng.version> <maven-surefire-plugin.version>3.0.0-M4</maven-surefire-plugin.version> <maven-compiler-plugin.version>8</maven-compiler-plugin.version> <maven-site-plugin.version>3.3</maven-site-plugin.version> <maven-install-plugin.version>2.4</maven-install-plugin.version> <maven-deploy-plugin.version>2.7</maven-deploy-plugin.version> <aspectjweaver.version>1.9.1</aspectjweaver.version> <allure-maven.version>2.10.0</allure-maven.version> <allure-testng.version>2.7.0</allure-testng.version> <io.github.bonigarcia.version>4.2.2</io.github.bonigarcia.version> <!-- <suiteXmlFile></suiteXmlFile>--> </properties> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <version>3.6.1</version> <configuration> <source>${maven-compiler-plugin.version}</source> <target>${maven-compiler-plugin.version}</target> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> <version>${maven-surefire-plugin.version}</version> <configuration> <systemPropertyVariables> <allure.results.directory>target/allure-results</allure.results.directory> <allure.link.issue.pattern>https://jira.com</allure.link.issue.pattern> <allure.link.tms.pattern>https://wiki.com</allure.link.tms.pattern> </systemPropertyVariables> <argLine> <!-- -javaagent:${settings.localRepository}/org/aspectj/aspectjweaver/${aspectjweaver.version}/aspectjweaver-${aspectjweaver.version}.jar--> <!-- -Xmx256m--> -Xms512m -Xmx1024m </argLine> <!-- <suiteXmlFiles>--> <!-- <suiteXmlFile>${suiteXmlFile}</suiteXmlFile>--> <!-- </suiteXmlFiles>--> </configuration> <dependencies> <dependency> <groupId>org.aspectj</groupId> <artifactId>aspectjweaver</artifactId> <version>${aspectjweaver.version}</version> </dependency> </dependencies> </plugin> <plugin> <groupId>io.qameta.allure</groupId> <artifactId>allure-maven</artifactId> <version>${allure-maven.version}</version> </plugin> </plugins> </build> <dependencies> <!-- https://mvnrepository.com/artifact/org.testng/testng --> <dependency> <groupId>org.testng</groupId> <artifactId>testng</artifactId> <version>${testng.version}</version> <scope>compile</scope> </dependency> <!-- https://mvnrepository.com/artifact/org.seleniumhq.selenium/selenium-java --> <dependency> <groupId>org.seleniumhq.selenium</groupId> <artifactId>selenium-java</artifactId> <version>${selenium.version}</version> </dependency> <dependency> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-site-plugin</artifactId> <version>${maven-site-plugin.version}</version> <type>maven-plugin</type> </dependency> <dependency> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-install-plugin</artifactId> <version>${maven-install-plugin.version}</version> <type>maven-plugin</type> </dependency> <dependency> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-deploy-plugin</artifactId> <version>${maven-deploy-plugin.version}</version> <type>maven-plugin</type> </dependency> <!-- https://mvnrepository.com/artifact/org.projectlombok/lombok --> <dependency> <groupId>org.projectlombok</groupId> <artifactId>lombok</artifactId> <version>1.18.16</version> <scope>provided</scope> </dependency> <!-- https://mvnrepository.com/artifact/org.apache.logging.log4j/log4j-core --> <!-- <dependency>--> <!-- <groupId>org.apache.logging.log4j</groupId>--> <!-- <artifactId>log4j-core</artifactId>--> <!-- <version>2.13.0</version>--> <!-- </dependency>--> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>4.12</version> <scope>compile</scope> </dependency> <!-- https://mvnrepository.com/artifact/io.github.bonigarcia/webdrivermanager --> <dependency> <groupId>io.github.bonigarcia</groupId> <artifactId>webdrivermanager</artifactId> <version>${io.github.bonigarcia.version}</version> </dependency> <!-- https://mvnrepository.com/artifact/commons-io/commons-io --> <dependency> <groupId>commons-io</groupId> <artifactId>commons-io</artifactId> <version>2.4</version> </dependency> <!--jsoup parsing html page--> <dependency> <groupId>org.jsoup</groupId> <artifactId>jsoup</artifactId> <version>1.8.3</version> </dependency> <dependency> <groupId>io.qameta.allure</groupId> <artifactId>allure-testng</artifactId> <version>${allure-testng.version}</version> <scope>test</scope> </dependency> <!--Advanced http parsing tool--> <dependency> <groupId>net.sourceforge.htmlunit</groupId> <artifactId>htmlunit</artifactId> <version>2.19</version> </dependency> <dependency> <groupId>io.qameta.allure</groupId> <artifactId>allure-java-commons</artifactId> <version>2.7.0</version> <scope>compile</scope> </dependency> <dependency> <groupId>org.apache.logging.log4j</groupId> <artifactId>log4j-core</artifactId> <version>2.13.3</version> </dependency> <!--Для считывания yaml-файлов --> <dependency> <groupId>com.fasterxml.jackson.dataformat</groupId> <artifactId>jackson-dataformat-yaml</artifactId> <version>2.12.1</version> </dependency> </dependencies> </project><file_sep>package utils; import lombok.extern.log4j.Log4j2; import org.openqa.selenium.chrome.ChromeOptions; @Log4j2 public class CapabilitiesGenerator { public static ChromeOptions getChromeOptions() { ChromeOptions options = new ChromeOptions(); // if (System.getProperty("headless").equals(true)) { // options.addArguments("--headless"); // } log.debug(" Used browser: " + options.getBrowserName()); options.addArguments("--ignore-certificate-errors"); options.addArguments("--disable-popup-blocking"); options.addArguments("--disable-notifications"); options.addArguments("-incognito"); options.addArguments("--headless"); options.setAcceptInsecureCerts(true); options.addArguments("--disable-extensions"); return options; } } <file_sep>package tests; import io.github.bonigarcia.wdm.WebDriverManager; import lombok.extern.log4j.Log4j2; import org.openqa.selenium.SessionNotCreatedException; import org.openqa.selenium.WebDriver; import org.openqa.selenium.chrome.ChromeDriver; import org.testng.ITestContext; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Listeners; import pages.*; import steps.ProductStep; import tests.test_data.TestConstants; import utils.CapabilitiesGenerator; import utils.TestListener; import java.util.concurrent.TimeUnit; import static org.testng.Assert.fail; @Log4j2 @Listeners(TestListener.class) public class BaseTest implements TestConstants { protected ProductStep productStep; WebDriver driver; ProductPage productPage; CartPage cartPage; ItemPage itemPage; CheckoutInfoPage checkoutInfoPage; CheckoutOverviewPage checkoutOverviewPage; LoginPageFactory loginPageFactory; @BeforeMethod(description = "Setting up before test") public void setUp(ITestContext context) { WebDriverManager.chromedriver().setup(); try { driver = new ChromeDriver(CapabilitiesGenerator.getChromeOptions()); } catch (SessionNotCreatedException e) { log.fatal(e.getLocalizedMessage()); fail("Browser is not opened"); } productStep = new ProductStep(driver); driver.manage().window().maximize(); driver.manage().timeouts().implicitlyWait(IMPLICIT_WAIT, TimeUnit.SECONDS); log.debug("Setting implicit wait timeout = " + IMPLICIT_WAIT); createInstances(); String variable = "driver"; log.debug("Setting driver into context with variable name " + variable); context.setAttribute(variable, driver); } @AfterMethod(alwaysRun = true, description = "Closing browser") public void tearDown() { if (driver != null) { driver.quit(); } } public void createInstances() { productPage = new ProductPage(driver); cartPage = new CartPage(driver); itemPage = new ItemPage(driver); checkoutInfoPage = new CheckoutInfoPage(driver); checkoutOverviewPage = new CheckoutOverviewPage(driver); loginPageFactory = new LoginPageFactory(driver); } } <file_sep>package tests; import org.testng.annotations.Test; import tests.test_data.TestDataProviders; import utils.*; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; import static pages.ProductPage.PRODUCT_LABEL_LOCATOR; public class LoginTest extends BaseTest { @Test(description = "Entering valid credentials to login", retryAnalyzer = Retry.class) public void isSuccessfulLogin() { loginPageFactory.openPage() .waitForPageOpened() .login(System.getenv().getOrDefault("username", PropertyReader.getProperty("username")), System.getenv().getOrDefault("password", PropertyReader.getProperty("password"))) .waitForPageOpened(); assertTrue(productPage.isPageOpened(), "Product page is not opened. Locator is not found: " + PRODUCT_LABEL_LOCATOR); } @Test(description = "Error message should appear when logging with invalid credentials", dataProvider = "InvalidTestDataFotLogin", dataProviderClass = TestDataProviders.class) public void errorMessageShouldAppearWhenLogging(String username, String password, String errorMessage) { loginPageFactory.openPage() .waitForPageOpened() .attemptToLogin(username, password); String actualErrorMessage = loginPageFactory.getErrorMessage(); assertEquals(actualErrorMessage, errorMessage, "Invalid error message is displayed: " + actualErrorMessage); } }
43dd649d31b4ccbdaf4566c8393d5a7bf64fbe8a
[ "Java", "Maven POM" ]
5
Java
EvgeniyMakuta/SauceDemo
b175a1cfe54fa22bfa2b0647881d5c6bfdec6c86
b47060b81317f0a24597f5336970d0e7c95bbfcb
refs/heads/master
<file_sep>Laravel 5.6 Run php artisan migrate php artisan db:seed admin username: <EMAIL> pass: <PASSWORD> <file_sep><?php namespace App\Models; use Illuminate\Database\Eloquent\Model; use Illuminate\Database\Eloquent\SoftDeletes; class Voucher extends Model { use SoftDeletes; /** * The database table used by the model. * * @var string */ protected $table = 'vouchers'; /** * The attributes that are mass assignable. * * @var array */ protected $fillable = [ 'code', 'discount_percentage', 'used' ]; /** * The attributes that should be mutated to dates. * * @var array */ protected $dates = ['created_at', 'updated_at', 'deleted_at']; } <file_sep><?php return [ 'index_title' => 'Operating systems', 'create_title' => 'Add Operating system', ];<file_sep><?php namespace App\Helpers; use App\User as UserModel; use Illuminate\Support\Collection; class User { public function isAdmin(UserModel $user): bool { return $user->is_admin === 1; } }<file_sep><?php return [ 'title' => 'Pricing Plans', ];<file_sep><?php namespace App\Repositories; use App\Models\Feature as FeatureModel; use Illuminate\Support\Collection; class Feature { public function getAll(): Collection { return FeatureModel::all(); } public function getById(int $id): FeatureModel { return FeatureModel::findOrFail($id); } public function store(array $data): FeatureModel { \DB::beginTransaction(); try { $feature = with(new FeatureModel); $feature->fill($data); $feature->save(); \DB::commit(); } catch(\Exception $e) { \DB::rollBack(); throw $e; } return $feature; } public function update(array $data, int $id): FeatureModel { \DB::beginTransaction(); try { $feature = FeatureModel::findOrFail($id); $feature->fill($data); $feature->save(); \DB::commit(); } catch(\Exception $e) { \DB::rollBack(); throw $e; } return $feature; } public function destroy(int $id) { return FeatureModel::findOrFail($id)->delete(); } }<file_sep><?php use Illuminate\Database\Seeder; use App\Repositories\OperatingSystem as OperatingSystemRepo; class OsTableSeeder extends Seeder { /** * Run the database seeds. * * @return void */ public function run() { $osRepo = app(OperatingSystemRepo::class); $oses = [ 'Windows', 'macOS', 'iOS', 'Linux', 'Android' ]; foreach ($oses as $key => $osName) { $osRepo->store([ 'name' => $osName ]); } } } <file_sep><?php return [ 'index_title' => 'Pricing Plans', 'create_title' => 'Add Pricing Plan', ];<file_sep><?php namespace App\Repositories; use App\Models\Plan as PlanModel; use App\Helpers\Detection; use App\Repositories\OperatingSystem as OperatingSystemRepo; use App\Http\Resources\Plan as PlanResource; use Illuminate\Support\Collection; class Plan { public function store(array $data, array $featureIds): PlanModel { \DB::beginTransaction(); try { $plan = with(new PlanModel); $plan->fill($data); $plan->save(); if(!empty($featureIds)){ $plan->features()->sync($featureIds); } \DB::commit(); } catch(\Exception $e) { \DB::rollBack(); throw $e; } return $plan; } public function update(array $data, int $id, array $featureIds): PlanModel { \DB::beginTransaction(); try { $plan = PlanModel::findOrFail($id); $plan->fill($data); $plan->save(); if(!empty($featureIds)){ $plan->features()->sync($featureIds); } \DB::commit(); } catch(\Exception $e) { \DB::rollBack(); throw $e; } return $plan; } public function destroy(int $id) { return PlanModel::findOrFail($id)->delete(); } public function getAll(): Collection { return PlanModel::all(); } public function getById(int $id): PlanModel { return PlanModel::findOrFail($id); } public function getByOs(): Collection { $plans = $this->getAll(); $osFullName = app(Detection::class)->getOS($_SERVER['HTTP_USER_AGENT']); $osName = explode(' ', $osFullName)[0]; $os = app(OperatingSystemRepo::class)->getByName($osName); $plans = $this->filterByOs($plans, $os->id); return $plans; } public function filterByOs(Collection $plans, int $osId): Collection { return $plans->filter(function($item, $key) use ($osId){ return $item->operating_system_id === $osId; }); } public function parseForList(Collection $plans) { return PlanResource::collection($plans); } }<file_sep><?php namespace App\Repositories; use App\User as UserModel; use Illuminate\Support\Collection; class User { public function store(array $data): UserModel { \DB::beginTransaction(); try { $user = with(new UserModel); $user->fill($data); $user->save(); \DB::commit(); } catch(\Exception $e) { \DB::rollBack(); throw $e; } return $user; } }<file_sep><?php namespace App\Http\Controllers; use Illuminate\Http\Request; use App\Http\Requests\OperatingSystem\StoreOperatingSystem as StoreOperatingSystemRequest; use App\Http\Requests\OperatingSystem\UpdateOperatingSystem as UpdateOperatingSystemRequest; use App\Repositories\OperatingSystem as OperatingSystemRepo; use App\Models\OperatingSystem as OperatingSystemModel; class OperatigSystemController extends Controller { /** * Display a listing of the resource. * * @return \Illuminate\Http\Response */ public function index(Request $request, OperatingSystemRepo $osRepo) { $data = []; $data['operating_systems'] = $osRepo->getAll(); return view('pages.admin.os.index', $data); } /** * Show the form for creating a new resource. * * @return \Illuminate\Http\Response */ public function create(Request $request) { return view('pages.admin.os.create'); } /** * Store a newly created resource in storage. * * @param \Illuminate\Http\Request $request * @return \Illuminate\Http\Response */ public function store(StoreOperatingSystemRequest $request, OperatingSystemRepo $osRepo) { $data = $request->input('os'); $osRepo->store($data); return redirect()->route('admin.os.index') ->with('status-success', 'Operating System added succesfully'); } /** * Display the specified resource. * * @param int $id * @return \Illuminate\Http\Response */ public function show($id) { // } /** * Show the form for editing the specified resource. * * @param int $id * @return \Illuminate\Http\Response */ public function edit(OperatingSystemModel $os, OperatingSystemRepo $osRepo) { $data = []; $data['os'] = $os; return view('pages.admin.os.edit', $data); } /** * Update the specified resource in storage. * * @param \Illuminate\Http\Request $request * @param int $id * @return \Illuminate\Http\Response */ public function update(UpdateOperatingSystemRequest $request, OperatingSystemModel $os, OperatingSystemRepo $osRepo) { $data = $request->input('os'); $osRepo->update($data, $os->id); return redirect()->route('admin.os.index') ->with('status-success', 'Operating System updated succesfully'); } /** * Remove the specified resource from storage. * * @param int $id * @return \Illuminate\Http\Response */ public function destroy(OperatingSystemModel $os, OperatingSystemRepo $osRepo) { $osRepo->destroy($os->id); return redirect()->route('admin.os.index') ->with('status-success', 'Operating System deleted succesfully'); } } <file_sep><?php use Illuminate\Database\Seeder; use App\Repositories\Plan as PlanRepo; class PlanTableSeeder extends Seeder { /** * Run the database seeds. * * @return void */ public function run() { $planRepo = app(PlanRepo::class); $plans = [ [ 'data' => ['name' => 'Basic', 'operating_system_id' => 1], 'feature_ids' => [1] ], [ 'data' => ['name' => 'Advanced', 'operating_system_id' => 1], 'feature_ids' => [1, 2, 3] ], [ 'data' => ['name' => 'Basic', 'operating_system_id' => 2], 'feature_ids' => [4] ], [ 'data' => ['name' => 'Advanced', 'operating_system_id' => 2], 'feature_ids' => [4, 5, 6] ] ]; foreach ($plans as $plan) { $planRepo->store($plan['data'], $plan['feature_ids']); } } } <file_sep><?php return [ 'index_title' => 'Features', 'create_title' => 'Add Feature', ];<file_sep><?php use Illuminate\Database\Seeder; use App\Repositories\Voucher as VoucherRepo; class VoucherTableSeeder extends Seeder { /** * Run the database seeds. * * @return void */ public function run() { $voucherRepo = app(VoucherRepo::class); $vouchers = [ [ 'code' => 'abc', 'discount_percentage' => 5 ], [ 'code' => 'xyz', 'discount_percentage' => 10 ], [ 'code' => '123', 'discount_percentage' => 15 ] ]; foreach ($vouchers as $key => $voucher) { $voucherRepo->store($voucher); } } } <file_sep><?php namespace App\Http\Controllers; use Illuminate\Http\Request; use App\Helpers\Detection; class HomeController extends Controller { /** * Show the application dashboard. * * @return \Illuminate\Http\Response */ public function index(Request $request, Detection $detectionHelper) { $data = [ 'os' => $detectionHelper->getOS($_SERVER['HTTP_USER_AGENT']), 'browser' => $_SERVER['HTTP_USER_AGENT'], 'ip' => $_SERVER['REMOTE_ADDR'], 'mobile_device'=> $detectionHelper->isMobileDevice($_SERVER['HTTP_USER_AGENT']) ? 'Yes' :'No', 'source_cookie_data' => $request->cookie('source') ?? null, 'campaing_cookie_data' => $request->cookie('campaing') ?? null, 'voucher_code_cookie_data' => $request->cookie('voucher_code') ?? null ]; return view('pages.home', $data); } public function setCookieValues(Request $request, string $source, string $campaing, string $voucherCode = '') { $campaignCookie = cookie('campaing', $campaing); $sourceCookie = cookie('source', $source); $voucherCookie = cookie('voucher_code', $voucherCode); return redirect('home')->with('status-success', 'Cookies set') ->cookie($sourceCookie) ->cookie($campaignCookie) ->cookie($voucherCookie); } } <file_sep><?php namespace App\Http\Controllers; use Illuminate\Http\Request; use App\Http\Requests\Feature\StoreFeature as StoreFeatureRequest; use App\Http\Requests\Feature\UpdateFeature as UpdateFeatureRequest; use App\Repositories\Feature as FeatureRepo; use App\Models\Feature as FeatureModel; class FeatureController extends Controller { /** * Display a listing of the resource. * * @return \Illuminate\Http\Response */ public function index(Request $request, FeatureRepo $featureRepo) { $data = []; $data['features'] = $featureRepo->getAll(); return view('pages.admin.feature.index', $data); } /** * Show the form for creating a new resource. * * @return \Illuminate\Http\Response */ public function create(Request $request) { return view('pages.admin.feature.create'); } /** * Store a newly created resource in storage. * * @param \Illuminate\Http\Request $request * @return \Illuminate\Http\Response */ public function store(StoreFeatureRequest $request, FeatureRepo $featureRepo) { $data = $request->input('feature'); $featureRepo->store($data); return redirect()->route('admin.feature.index') ->with('status-success', 'Feature added succesfully'); } /** * Display the specified resource. * * @param int $id * @return \Illuminate\Http\Response */ public function show($id) { // } /** * Show the form for editing the specified resource. * * @param int $id * @return \Illuminate\Http\Response */ public function edit(FeatureModel $feature, FeatureRepo $featureRepo) { $data = []; $data['feature'] = $feature; return view('pages.admin.feature.edit', $data); } /** * Update the specified resource in storage. * * @param \Illuminate\Http\Request $request * @param int $id * @return \Illuminate\Http\Response */ public function update(UpdateFeatureRequest $request, FeatureModel $feature, FeatureRepo $featureRepo) { $data = $request->input('feature'); $featureRepo->update($data, $feature->id); return redirect()->route('admin.feature.index') ->with('status-success', 'Feature updated succesfully'); } /** * Remove the specified resource from storage. * * @param int $id * @return \Illuminate\Http\Response */ public function destroy(FeatureModel $feature, FeatureRepo $featureRepo) { $featureRepo->destroy($feature->id); return redirect()->route('admin.feature.index') ->with('status-success', 'Feature deleted succesfully'); } } <file_sep><?php return [ 'index_title' => 'Vouchers', 'create_title' => 'Add Voucher', //form labels and messages 'code' => 'Code', 'apply_button' => 'Apply', 'apply_success_message' => 'Voucher Succesfully applied', 'apply_error_message' => 'Error ', 'apply_voucher_header' => 'Apply voucher' ];<file_sep><?php namespace App\Repositories; use App\Models\OperatingSystem as OperatingSystemModel; use Illuminate\Support\Collection; class OperatingSystem { public function getAll(): Collection { return OperatingSystemModel::all(); } public function getById(int $id): OperatingSystemModel { return OperatingSystemModel::findOrFail($id); } public function store(array $data): OperatingSystemModel { \DB::beginTransaction(); try { $os = with(new OperatingSystemModel); $os->fill($data); $os->save(); \DB::commit(); } catch(\Exception $e) { \DB::rollBack(); throw $e; } return $os; } public function update(array $data, int $id): OperatingSystemModel { \DB::beginTransaction(); try { $os = OperatingSystemModel::findOrFail($id); $os->fill($data); $os->save(); \DB::commit(); } catch(\Exception $e) { \DB::rollBack(); throw $e; } return $os; } public function destroy(int $id) { return OperatingSystemModel::findOrFail($id)->delete(); } public function getByName(string $name) { return OperatingSystemModel::where('name', $name)->get()->first(); } }<file_sep><?php namespace App\Repositories; use App\Models\Voucher as VoucherModel; use Illuminate\Support\Collection; class Voucher { public function store(array $data): VoucherModel { \DB::beginTransaction(); try { $voucher = with(new VoucherModel); $voucher->fill($data); $voucher->save(); \DB::commit(); } catch(\Exception $e) { \DB::rollBack(); throw $e; } return $voucher; } public function update(array $data, int $id): VoucherModel { \DB::beginTransaction(); try { $voucher = VoucherModel::findOrFail($id); $voucher->fill($data); $voucher->save(); \DB::commit(); } catch(\Exception $e) { \DB::rollBack(); throw $e; } return $voucher; } public function destroy(int $id) { return VoucherModel::findOrFail($id)->delete(); } public function getAll(): Collection { return VoucherModel::all(); } public function getById(int $id): VoucherModel { return VoucherModel::findOrFail($id); } public function getByCode(string $code) { return VoucherModel::where('code', '=', $code)->get()->first(); } public function parseForList(Collection $vouchers): Collection { return $vouchers->map(function ($item, $key) { $voucher = app(\stdClass::class); $voucher->id = $item->id; $voucher->code = $item->code; $voucher->discount_percentage = $item->discount_percentage; $voucher->used = ($item->used === 1) ? true : false; return $voucher; }); } }<file_sep><?php use Illuminate\Database\Seeder; use App\Repositories\User as UserRepo; class UsersTableSeeder extends Seeder { /** * Run the database seeds. * * @return void */ public function run() { $adminUserData = [ 'name' => 'admin', 'email' => '<EMAIL>', 'password' => bcrypt('<PASSWORD>'), 'is_admin' => 1 ]; $userRepo = app(UserRepo::class); $userRepo->store($adminUserData); } } <file_sep><?php namespace App\Http\Controllers; use Illuminate\Http\Request; use App\Http\Requests\Plan\StorePlan as StorePlanRequest; use App\Http\Requests\Plan\UpdatePlan as UpdatePlanRequest; use App\Repositories\Plan as PlanRepo; use App\Repositories\Feature as FeatureRepo; use App\Repositories\OperatingSystem as OperatingSystemRepo; use App\Models\Plan as PlanModel; class PlanController extends Controller { /** * Display a listing of the resource. * * @return \Illuminate\Http\Response */ public function index(Request $request, PlanRepo $planRepo) { $data = []; $plans = $planRepo->getAll(); $data['plans'] = $planRepo->parseForList($plans)->toArray($request); return view('pages.admin.plan.index', $data); } /** * Show the form for creating a new resource. * * @return \Illuminate\Http\Response */ public function create(Request $request, OperatingSystemRepo $osRepo, FeatureRepo $featureRepo) { $data = []; $data['operating_systems'] = $osRepo->getAll(); $data['features'] = $featureRepo->getAll(); return view('pages.admin.plan.create', $data); } /** * Store a newly created resource in storage. * * @param \Illuminate\Http\Request $request * @return \Illuminate\Http\Response */ public function store(StorePlanRequest $request, PlanRepo $planRepo) { $data = $request->input('plan'); $featureIds = $request->input('feature_ids'); $planRepo->store($data, $featureIds); return redirect()->route('admin.plan.index') ->with('status-success', 'Plan added succesfully'); } /** * Display the specified resource. * * @param int $id * @return \Illuminate\Http\Response */ public function show($id) { // } /** * Show the form for editing the specified resource. * * @param int $id * @return \Illuminate\Http\Response */ public function edit(PlanModel $plan, PlanRepo $planRepo, OperatingSystemRepo $osRepo, FeatureRepo $featureRepo) { $data = []; $data['plan'] = $plan; $data['operating_systems'] = $osRepo->getAll(); $data['features'] = $featureRepo->getAll(); return view('pages.admin.plan.edit', $data); } /** * Update the specified resource in storage. * * @param \Illuminate\Http\Request $request * @param int $id * @return \Illuminate\Http\Response */ public function update(UpdatePlanRequest $request, PlanModel $plan, PlanRepo $planRepo) { $data = $request->input('plan'); $featureIds = $request->input('feature_ids'); $planRepo->update($data, $plan->id, $featureIds); return redirect()->route('admin.plan.index') ->with('status-success', 'Plan updated succesfully'); } /** * Remove the specified resource from storage. * * @param int $id * @return \Illuminate\Http\Response */ public function destroy(PlanModel $plan, PlanRepo $planRepo) { $planRepo->destroy($plan->id); return redirect()->route('admin.plan.index') ->with('status-success', 'Plan deleted succesfully'); } public function planList(Request $request) { $data = []; $data['applyVoucherUrl'] = route('voucher.apply'); $data['getPlanListUrl'] = route('get-plan-list'); return view('pages.pricing_plans', $data); } public function getPlanList(Request $request, PlanRepo $planRepo) { $data = []; $plans = $planRepo->getByOs(); $data['plans'] = $planRepo->parseForList($plans)->toArray($request); return $data; } } <file_sep><?php use Illuminate\Database\Seeder; use App\Repositories\Feature as FeatureRepo; class FeatureTableSeeder extends Seeder { /** * Run the database seeds. * * @return void */ public function run() { $featureRepo = app(FeatureRepo::class); $features = [ [ 'name' => 'Hide Your IP', 'price' => 50 ], [ 'name' => 'WiFi Protection', 'price' => 51 ], [ 'name' => 'No Logs Policy', 'price' => 52 ], [ 'name' => 'Secure Transactions and Conversations', 'price' => 53 ], [ 'name' => 'Access Restricted Content', 'price' => 54 ], [ 'name' => 'Block Ads', 'price' => 55 ], [ 'name' => 'Block Malicious Content', 'price' => 56 ], [ 'name' => 'Block Online Tracking', 'price' => 57 ] ]; foreach ($features as $key => $feature) { $featureRepo->store($feature); } } } <file_sep><?php namespace App\Helpers; class Detection { /** * https://gist.github.com/philipptempel/4322656 * Get the user's operating system * * @param string $userAgent The user's user agent * * @return string Returns the user's operating system as human readable string, * if it cannot be determined 'n/a' is returned. */ public function getOS($userAgent) { // Create list of operating systems with operating system name as array key $oses = array ( 'iPhone' => '(iPhone)', 'Windows 3.11' => 'Win16', 'Windows 95' => '(Windows 95)|(Win95)|(Windows_95)', 'Windows 98' => '(Windows 98)|(Win98)', 'Windows 2000' => '(Windows NT 5.0)|(Windows 2000)', 'Windows XP' => '(Windows NT 5.1)|(Windows XP)', 'Windows 2003' => '(Windows NT 5.2)', 'Windows Vista' => '(Windows NT 6.0)|(Windows Vista)', 'Windows 7' => '(Windows NT 6.1)|(Windows 7)', 'Windows NT 4.0' => '(Windows NT 4.0)|(WinNT4.0)|(WinNT)|(Windows NT)', 'Windows ME' => 'Windows ME', 'Open BSD' => 'OpenBSD', 'Sun OS' => 'SunOS', 'Linux' => '(Linux)|(X11)', 'Safari' => '(Safari)', 'Mac OS' => '(Mac_PowerPC)|(Macintosh)', 'QNX' => 'QNX', 'BeOS' => 'BeOS', 'OS/2' => 'OS/2', 'Search Bot' => '(nuhk)|(Googlebot)|(Yammybot)|(Openbot)|(Slurp/cat)|(msnbot)|(ia_archiver)' ); // Loop through $oses array foreach($oses as $os => $preg_pattern) { // Use regular expressions to check operating system type if ( preg_match('@' . $preg_pattern . '@', $userAgent) ) { // Operating system was matched so return $oses key return $os; } } // Cannot find operating system so return Unknown return 'n/a'; } /** * https://www.codespeedy.com/simple-php-code-to-detect-mobile-device/ */ public function isMobileDevice($userAgent): bool { $devicesString = "/(android|avantgo|blackberry|bolt|boost|cricket|docomo|fone|hiptop|mini|mobi|palm|phone|pie|tablet|up\.browser|up\.link|webos|wos)/i"; return preg_match($devicesString, $userAgent); } }<file_sep><?php namespace App\Http\Controllers; use Illuminate\Http\Request; use App\Http\Requests\Voucher\StoreVoucher as StoreVoucherRequest; use App\Http\Requests\Voucher\UpdateVoucher as UpdateVoucherRequest; use App\Http\Requests\Voucher\ApplyVoucher as ApplyVoucherRequest; use App\Repositories\Voucher as VoucherRepo; use App\Repositories\OperatingSystem as OperatingSystemRepo; use App\Models\Voucher as VoucherModel; class VoucherController extends Controller { /** * Display a listing of the resource. * * @return \Illuminate\Http\Response */ public function index(Request $request, VoucherRepo $voucherRepo) { $data = []; $vouchers = $voucherRepo->getAll(); $data['vouchers'] = $voucherRepo->parseForList($vouchers); return view('pages.admin.voucher.index', $data); } /** * Show the form for creating a new resource. * * @return \Illuminate\Http\Response */ public function create(Request $request) { return view('pages.admin.voucher.create'); } /** * Store a newly created resource in storage. * * @param \Illuminate\Http\Request $request * @return \Illuminate\Http\Response */ public function store(StoreVoucherRequest $request, VoucherRepo $voucherRepo) { $data = $request->input('voucher'); $voucherRepo->store($data); return redirect()->route('admin.voucher.index') ->with('status-success', 'Voucher added succesfully'); } /** * Display the specified resource. * * @param int $id * @return \Illuminate\Http\Response */ public function show($id) { // } /** * Show the form for editing the specified resource. * * @param int $id * @return \Illuminate\Http\Response */ public function edit(VoucherModel $voucher, VoucherRepo $voucherRepo) { $data = []; $data['voucher'] = $voucher; return view('pages.admin.voucher.edit', $data); } /** * Update the specified resource in storage. * * @param \Illuminate\Http\Request $request * @param int $id * @return \Illuminate\Http\Response */ public function update(UpdateVoucherRequest $request, VoucherModel $voucher, VoucherRepo $voucherRepo) { $data = $request->input('voucher'); $voucherRepo->update($data, $voucher->id); return redirect()->route('admin.voucher.index') ->with('status-success', 'Voucher updated succesfully'); } /** * Remove the specified resource from storage. * * @param int $id * @return \Illuminate\Http\Response */ public function destroy(VoucherModel $voucher, VoucherRepo $voucherRepo) { $voucherRepo->destroy($voucher->id); return redirect()->route('admin.voucher.index') ->with('status-success', 'Voucher deleted succesfully'); } /** * Check if vocher is available * * @param int $id * @return \Illuminate\Http\Response */ public function applyVoucher(ApplyVoucherRequest $request, VoucherRepo $voucherRepo) { $code = $request->input('code'); $voucher = $voucherRepo->getByCode($code); $data['discount_percentage'] = $voucher->discount_percentage; return $data; } } <file_sep>import $ from 'jquery'; import Vue from 'vue'; const vm = new Vue({ el: '#pricing-plans', data: pricingPlansPageData, methods: { refreshPlanList: function(discountPercentage){ this.$refs.planList.applyDiscountToPlans(discountPercentage); } }, components: { 'plans-list' : require('./plans/plans-list.vue'), 'apply-voucher-form': require('./voucher/apply-voucher-form.vue'), } });<file_sep><?php /* |-------------------------------------------------------------------------- | Web Routes |-------------------------------------------------------------------------- | | Here is where you can register web routes for your application. These | routes are loaded by the RouteServiceProvider within a group which | contains the "web" middleware group. Now create something great! | */ Route::get( '/', [ 'as' => 'index', 'uses' => 'HomeController@index', ] ); Route::get( '/home', [ 'as' => 'home', 'uses' => 'HomeController@index', ] ); Route::get( '/plan-list', [ 'as' => 'plan-list', 'uses' => 'PlanController@planList', ] ); Route::post( '/get-plan-list', [ 'as' => 'get-plan-list', 'uses' => 'PlanController@getPlanList', ] ); Route::get( '/set-cookie-values/source/{source}/campaign/{campaign}/voucher_code/{voucher_code?}', [ 'as' => 'home.set-cookie-values', 'uses' => 'HomeController@setCookieValues', ] ); Route::post( '/voucher/apply', [ 'as' => 'voucher.apply', 'uses' => 'VoucherController@applyVoucher', ] ); Route::group(['prefix' => 'admin', 'middleware' => ['is_admin_user']], function () { Route::get( '/', [ 'as' => 'admin.index', 'uses' => 'AdminController@index', ] ); //Operating Systems Routes Route::get( '/os', [ 'as' => 'admin.os.index', 'uses' => 'OperatigSystemController@index' ] ); Route::get( '/os/create', [ 'as' => 'admin.os.create', 'uses' => 'OperatigSystemController@create', ] ); Route::post( '/os/store', [ 'as' => 'admin.os.store', 'uses' => 'OperatigSystemController@store', ] ); Route::get( '/os/edit/{os}', [ 'as' => 'admin.os.edit', 'uses' => 'OperatigSystemController@edit', ] ); Route::post( '/os/update/{os}', [ 'as' => 'admin.os.update', 'uses' => 'OperatigSystemController@update', ] ); Route::get( '/os/destroy/{os}', [ 'as' => 'admin.os.destroy', 'uses' => 'OperatigSystemController@destroy', ] ); //Features Routes Route::get( '/feature', [ 'as' => 'admin.feature.index', 'uses' => 'FeatureController@index', ] ); Route::get( '/feature/create', [ 'as' => 'admin.feature.create', 'uses' => 'FeatureController@create', ] ); Route::post( '/feature/store', [ 'as' => 'admin.feature.store', 'uses' => 'FeatureController@store', ] ); Route::get( '/feature/edit/{feature}', [ 'as' => 'admin.feature.edit', 'uses' => 'FeatureController@edit', ] ); Route::post( '/feature/update/{feature}', [ 'as' => 'admin.feature.update', 'uses' => 'FeatureController@update', ] ); Route::get( '/feature/destroy/{feature}', [ 'as' => 'admin.feature.destroy', 'uses' => 'FeatureController@destroy', ] ); //Voucher Routes Route::get( '/voucher', [ 'as' => 'admin.voucher.index', 'uses' => 'VoucherController@index', ] ); Route::get( '/voucher/create', [ 'as' => 'admin.voucher.create', 'uses' => 'VoucherController@create', ] ); Route::post( '/voucher/store', [ 'as' => 'admin.voucher.store', 'uses' => 'VoucherController@store', ] ); Route::get( '/voucher/edit/{voucher}', [ 'as' => 'admin.voucher.edit', 'uses' => 'VoucherController@edit', ] ); Route::post( '/voucher/update/{voucher}', [ 'as' => 'admin.voucher.update', 'uses' => 'VoucherController@update', ] ); Route::get( '/voucher/destroy/{voucher}', [ 'as' => 'admin.voucher.destroy', 'uses' => 'VoucherController@destroy', ] ); //Plans Routes Route::get( '/plan', [ 'as' => 'admin.plan.index', 'uses' => 'PlanController@index', ] ); Route::get( '/plan/create', [ 'as' => 'admin.plan.create', 'uses' => 'PlanController@create', ] ); Route::post( '/plan/store', [ 'as' => 'admin.plan.store', 'uses' => 'PlanController@store', ] ); Route::get( '/plan/edit/{plan}', [ 'as' => 'admin.plan.edit', 'uses' => 'PlanController@edit', ] ); Route::post( '/plan/update/{plan}', [ 'as' => 'admin.plan.update', 'uses' => 'PlanController@update', ] ); Route::get( '/plan/destroy/{plan}', [ 'as' => 'admin.plan.destroy', 'uses' => 'PlanController@destroy', ] ); }); Auth::routes();
4228574628a0597c360e4181359f474b2b51c9b0
[ "Markdown", "JavaScript", "PHP" ]
26
Markdown
alex-alg/vpnstore
0687e9c5a34aefc495ca45df5dffba15da8fcd30
c0722a053ab49979f73a75b38926f963bd4782a5
refs/heads/master
<file_sep># CS153 ####Member 1: <NAME> ####Member 2: <NAME> ------------------------------ Covers the principles and practice of operating system design. Includes concurrency, memory management, file systems, protection, security, command languages, scheduling, and system performance. Lab 1: Fun with Processes Lab 2: Memory Management <file_sep>#Lab 2: Memory Management ##### Handed out Monday, October 30th, 2016 ##### Walkthrough due November 17th, 2016 ##### Due Thuesday December 1st, 2016 ###Objectives There are two components to this assignment: ``` - Understand virtual memory (Part 1) - Make Simple modifications to the memory layout (Part 2 and Part 3) ``` ### Part 1: Memory translation system call ####Overview The goal of this part is to learn about the data structures used to manage virtual memory. #### Details Add system call v2p (int virtual, int physical) v2p takes a virtual address and returns a phydicaal address. v2p should return an error if the virtual address is not valid (e.g., out of range) or not mapped. ###Part 2: Null pointer dereference #### Overview In this part, you will be changing xv6 to support a feature available in virtually every modern OS: raising an exception when your program dereferences a null pointer. Sound simple? Well, there are a few tricky details. #### Details In xv6, the VM system uses a simple two-level page table. If you do not remember the details, read [Section 20.3 of OS 3 easy Steps](http://pages.cs.wisc.edu/~remzi/OSTEP/vm-smalltables.pdf). However, you may find the description in Chapter 1 of the xv6 manual sufficient (and more relevant to the assignment). As it currently is structured, user code is loaded into the very first part of the address space. Thus, if you dereference a null pointer, you will not see an exception (as you might expect); rather, you will see whatever code is the first bit of code in the program that is running. Try it and see! Thus, the first thing you might do is create a program that dereferences a null pointer. It is simple! See if you can do it. Then run it on Linux as well as xv6, to see the difference. You job here will be to figure out how xv6 sets up a page table. Thus, once again, this project is mostly understanding the code, and not writing very much. Look at how exec() works to better understand how address spaces get filled with code and in general initialized. that will get you most of the way. You should also look at fork(), in particular the part where the address space of the child is created by copying the address space of the parent. What needs to change in there/ The rest of your task will be completed by looking through the code to figure out where there are checks or assumptions made about the address space. Think about what happens when you pass a parameter into the kernel (using a system call), for example; if passing a pointer, the kernel needs to be very careful with it, to ensure you haven't passed it a bad pointer. How does it do this now? Does this code need to change in order to work in your new version of xv6? One last hint: you will have to look at the xv6 makefile as well. In there user programs are compiled so as to set their entry point (where the first instruction is) to 0. If you change xv6 to make the first page invalid, clearly the entry point will have to be somewhere else(e.g., the next page, or 0x1000). thus, something in the makefile will need to change to reflect as well. You should be able to demonstrate what happens when user code tries to access a null pointer. If you do this part correctly, xv6 should trap and kill the process without too much trouble on your part. ###Part 3: Stack Rearrangement The xv6 address space is currently set up like this: ``` code stack (fixed-sized, one page) heap (grows towards the high-end of the address space) ``` In this part of the xv6 project, you will rearrange the address space to look more like Linux: ``` code heap(grows towards the high-end of the address space) ...(gap) stack (at end of address space; grows backwards) ``` this will take a little work on your part. First, you will have to figure out where xv6 allocates and initializes the user stack; then, you will have to figure out how to change that to use a page at the high-end of the xv6 user address space, instead of one between the code and heap. Some tricky parts: one thing you will have to be very careful with is how xv6 currently tracks the size of a process's address space (currently with the sz field in the proc struct). there are a number of places in the code where this is used (e.g., to check whether an argument passed into the kernel is valid; to copy the address space. We recommend keeping this field to track the size of the code and heap, but doing some other accounting to track the stack, and changing all relevant code (i.e., that used to deal with sz) to now work with your new accounting. You should also be wary of growing your heap and overwriting your stack. In fact, you should always leave an unallocated (invalid) page between teh stack and heap. The high end of the xv6 user address space is 640KB (see the USERTOP value defined in the xv6 code). Thus your stack page shoudl live at 636KB-640KB. One final part of this project, which is challenging: automaticall growing the stack backwards when needed. Doing so would require you to see if a fault occured on the page above the stack and tehn, instead of killing the offending process, allocating a new page, mapping it into the address space, and continuing to run. Getting this to work will make you into a kernel boss, and also get you those last 10% of credit. Bonus (5%): Write code to try and get the stack to grow into the heap. Were you able to? If not explain why. ##The Code You may also find the following readings about xv6 useful: [xv6 book](https://pdos.csail.mit.edu/6.828/2011/xv6/book-rev6.pdf). **Particularly useful for this project:** Chapter 1 + anything else about fork() and exec(), as well as virtual memory. ## Submission(s) Like Lab1, there are two submissions, a walkthrough/design document and the final submission. The walkthrough tentatively consists of the following questions (may be subject to modifications until November 7th). Where an outline of an implementation is requested, you have to list all the major items clearly: ``` - read chapter 2 in the xv6 book. Briefly explain the operation of kvmalloc() and mappages() and Figure 2-2. - What is the size of the virtual address space used in xv6? - How does a virtual address get translated to a physical address? - show where in the code we can figure out the location of the stack. ``` <file_sep>#include "types.h" #include "x86.h" #include "defs.h" #include "date.h" #include "param.h" #include "memlayout.h" #include "mmu.h" #include "proc.h" int sys_fork(void) { return fork(); } int sys_exit(void) { exit(); return 0; // not reached } int sys_wait(void) { return wait(); } int sys_kill(void) { int pid; if(argint(0, &pid) < 0) return -1; return kill(pid); } int sys_getpid(void) { return proc->pid; } int sys_sbrk(void) { int addr, n; if(argint(0, &n) < 0) return -1; addr = proc->sz; if(growproc(n) < 0) return -1; return addr; } int sys_sleep(void) { int n; uint ticks0; if(argint(0, &n) < 0) return -1; acquire(&tickslock); ticks0 = ticks; while(ticks - ticks0 < n){ if(proc->killed){ release(&tickslock); return -1; } sleep(&ticks, &tickslock); } release(&tickslock); return 0; } int sys_uptime(void) { uint xticks; acquire(&tickslock); xticks = ticks; release(&tickslock); return xticks; } int sys_v2p(void) { int *physical, *virtual; if((argptr(0, (char**) &physical, sizeof(physical)) < 0) || (argptr(0, (char**) &virtual, sizeof(virtual)) < 0)) return 0; return v2p(virtual, physical); }
4aa43801c3395a41fa5a794846bd9b460dc22bc2
[ "Markdown", "C" ]
3
Markdown
preethask/CS153
71be898a579899b530af159719cb8c6c2bb8958c
82f966f3e691104584c862dc9303dad8038a48f1
refs/heads/master
<repo_name>shpringr/assignment2<file_sep>/src/main/java/bgu/spl/a2/sim/Warehouse.java package bgu.spl.a2.sim; import bgu.spl.a2.sim.tools.Tool; import bgu.spl.a2.sim.conf.ManufactoringPlan; import bgu.spl.a2.Deferred; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; /** * A class representing the warehouse in your simulation * * Note for implementors: you may add methods and synchronize any of the * existing methods in this class *BUT* you must be able to explain why the * synchronization is needed. In addition, the methods you add to this class can * only be private!!! * */ public class Warehouse { private Map<Tool, Integer> toolsAndQuantities; private List<ManufactoringPlan> plans; private final Object lock = new Object(); private Map<String, List<Deferred<Tool>>> toolsToWaitingDeffereds; /** * Constructor */ public Warehouse() { toolsAndQuantities = new ConcurrentHashMap<>(); plans = new ArrayList<>(); toolsToWaitingDeffereds = new HashMap<>(); } /** * Tool acquisition procedure * Note that this procedure is non-blocking and should return immediately * * * @param type - string describing the required tool * @return a deferred promise for the requested tool */ public Deferred<Tool> acquireTool(String type) { synchronized (lock) { Deferred<Tool> toolDeferred = new Deferred<>(); for (Tool tool : toolsAndQuantities.keySet()) { if (tool.getType().equals(type)) { reduceToolFromInventory(tool); toolDeferred.resolve(tool); } } if (!toolDeferred.isResolved()) { putDeferredInMap(type, toolDeferred); } return toolDeferred; } } private void putDeferredInMap(String type, Deferred<Tool> toolDeferred) { if (toolsToWaitingDeffereds.containsKey(type)) toolsToWaitingDeffereds.get(type).add(toolDeferred); else { List<Deferred<Tool>> deferredTools = new ArrayList<>(); deferredTools.add(toolDeferred); toolsToWaitingDeffereds.put(type, deferredTools); } } private void reduceToolFromInventory(Tool tool) { if (toolsAndQuantities.get(tool).equals(0)) toolsAndQuantities.remove(tool); else toolsAndQuantities.put(tool, toolsAndQuantities.get(tool) - 1); } /** * Tool return procedure - releases a tool which becomes available in the warehouse upon completion. * * @param tool - The tool to be returned */ public void releaseTool(Tool tool) { synchronized (lock) { addToolToInventory(tool); resolveWaiting(tool); } } private void resolveWaiting(Tool tool) { if (toolsToWaitingDeffereds.containsKey(tool.getType())) { Deferred<Tool> deferredToResolve = toolsToWaitingDeffereds.get(tool.getType()).remove(0); if (toolsToWaitingDeffereds.get(tool.getType()).isEmpty()) toolsToWaitingDeffereds.remove(tool.getType()); deferredToResolve.resolve(tool); } } private void addToolToInventory(Tool tool) { if (!toolsAndQuantities.containsKey(tool)) addTool(tool, 1); else addTool(tool, toolsAndQuantities.get(tool) + 1); } /** * Getter for ManufactoringPlans * * @param product - a string with the product name for which a ManufactoringPlan is desired * @return A ManufactoringPlan for product */ public ManufactoringPlan getPlan(String product) { for (ManufactoringPlan plan : plans) { if (plan.getProductName().equals(product)) return plan; } return null; } /** * Store a ManufactoringPlan in the warehouse for later retrieval * * @param plan - a ManufactoringPlan to be stored */ public void addPlan(ManufactoringPlan plan) { plans.add(plan); } /** * Store a qty Amount of tools of type tool in the warehouse for later retrieval * * @param tool - type of tool to be stored * @param qty - amount of tools of type tool to be stored */ public void addTool(Tool tool, int qty) { toolsAndQuantities.put(tool, qty); } }<file_sep>/src/main/java/bgu/spl/a2/sim/tools/RandomSumPliers.java package bgu.spl.a2.sim.tools; import bgu.spl.a2.sim.Product; import java.util.Random; public class RandomSumPliers implements Tool { public String getType() { return "rs-pliers"; } public long useOn(Product p) { long value=0; for(Product part : p.getParts()){ value+=Math.abs(randomP(part.getFinalId())); } return value; } private long randomP(long p) { Random rnd = new Random(p); int count = (int) (p % 10000); long sum = 0; for (int i=0; i<count; i++){ long tmp = rnd.nextInt(); sum += tmp; } return sum; } } <file_sep>/src/main/java/bgu/spl/a2/sim/tools/NextPrimeHammer.java package bgu.spl.a2.sim.tools; import bgu.spl.a2.sim.Product; public class NextPrimeHammer implements Tool { public String getType() { return "np-hammer"; } public long useOn(Product p) { long value=0; for(Product part : p.getParts()){ value+=Math.abs(nextPrime(part.getFinalId())); } return value; } private long nextPrime(long next) { boolean isPrime = false; long start = 2; while (!isPrime) { next += 1; long m = (int) Math.ceil(Math.sqrt(next)); isPrime = true; for (long i = start; i <= m ; i++) { if (next % i == 0) { isPrime = false; break; } } } return next; } } <file_sep>/src/main/java/bgu/spl/a2/Processor.java package bgu.spl.a2; import java.util.concurrent.ConcurrentLinkedDeque; /** * this class represents a single work stealing processor, it is * {@link Runnable} so it is suitable to be executed by threads. * <p> * Note for implementors: you may add methods and synchronize any of the * existing methods in this class *BUT* you must be able to explain why the * synchronization is needed. In addition, the methods you add can only be * private, protected or package protected - in other words, no new public * methods */ public class Processor implements Runnable { private final int id; private final WorkStealingThreadPool pool; private final Object lockInc = new Object(); /** * constructor for this class * <p> * IMPORTANT: * 1) this method is package protected, i.e., only classes inside * the same package can access it - you should *not* change it to * public/private/protected * <p> * 2) you may not add other constructors to this class * nor you allowed to add any other parameter to this constructor - changing * this may cause automatic tests to fail.. * * @param id - the processor id (every processor need to have its own unique * id inside its thread pool) * @param pool - the thread pool which owns this processor */ /*package*/Processor(int id, WorkStealingThreadPool pool) { this.id = id; this.pool = pool; } @Override public void run() { try { ConcurrentLinkedDeque<Task> tasks; while (true) { int versionBefore = pool.getVm().getVersion(); tasks = pool.getQueue(id); if (!tasks.isEmpty()) { Task t = tasks.pollFirst(); if (t != null) t.handle(this); } else { if (!tryStealTasks()) { pool.getVm().await(versionBefore); } } } } catch (InterruptedException ignored){ } } private boolean tryStealTasks() throws InterruptedException { int nextToSteal = (id + 1) % pool.getProcessors().size(); boolean isFound = false; while (!isFound && nextToSteal != id) { ConcurrentLinkedDeque<Task> victimQueue = pool.getQueue(nextToSteal); if (victimQueue.size() > 1 ) { isFound = true; for (int i = 0; i < victimQueue.size() / 2 && victimQueue.size() > 0; i++) { Task task = victimQueue.pollLast(); if (task != null) { addTaskToQueue(task); } } } else { nextToSteal = (nextToSteal + 1) % pool.getProcessors().size(); } } return isFound; } void addTaskToQueue(Task task) { synchronized (lockInc) { pool.getQueue(id).addFirst(task); pool.getVm().inc(); } } }
c64d4f5d5014669f56a0db403beed0357e7afd36
[ "Java" ]
4
Java
shpringr/assignment2
5263f26fe88b8e6e0e3cc8988387e5f9bfe9d985
eba920ee3d248327df3f8dae88ba830830fc9953
refs/heads/master
<file_sep>include ':app' rootProject.name='Markertest' <file_sep>package com.jinasoft.markertest; import androidx.annotation.NonNull; import androidx.annotation.UiThread; import androidx.appcompat.app.AppCompatActivity; import androidx.constraintlayout.solver.widgets.Rectangle; import androidx.fragment.app.FragmentActivity; import androidx.fragment.app.FragmentManager; import android.annotation.SuppressLint; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.os.AsyncTask; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.util.Log; import android.view.View; import com.naver.maps.geometry.LatLng; import com.naver.maps.geometry.LatLngBounds; import com.naver.maps.map.CameraPosition; import com.naver.maps.map.CameraUpdate; import com.naver.maps.map.MapFragment; import com.naver.maps.map.NaverMap; import com.naver.maps.map.OnMapReadyCallback; import com.naver.maps.map.overlay.GroundOverlay; import com.naver.maps.map.overlay.Marker; import com.naver.maps.map.overlay.OverlayImage; import com.naver.maps.map.overlay.PolygonOverlay; import java.util.ArrayList; import java.util.List; import java.util.Vector; import java.util.concurrent.Executor; import java.util.concurrent.Executors; public class MainActivity extends FragmentActivity implements OnMapReadyCallback { Rectangle rect; LatLngBounds bounds; NaverMap naverMapback; LatLng MarkerPosition; Bitmap markerBubble; Marker marker; final List<Marker> markers = new ArrayList<>(); private Handler mHandler = new Handler(); @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); FragmentManager fm = getSupportFragmentManager(); MapFragment mapFragment = (MapFragment)fm.findFragmentById(R.id.map); if (mapFragment == null) { mapFragment = MapFragment.newInstance(); fm.beginTransaction().add(R.id.map, mapFragment).commit(); } mapFragment.getMapAsync(this); } @UiThread @Override public void onMapReady(@NonNull final NaverMap naverMap) { bounds = naverMap.getContentBounds(); final GroundOverlay ground = new GroundOverlay(); naverMapback = naverMap; // 카메라 초기 위치 설정 LatLng initialPosition = new LatLng(37.566288, 127.977980); CameraUpdate cameraUpdate = CameraUpdate.scrollTo(initialPosition); // naverMap.moveCamera(CameraUpdate.fitBounds(bounds)); // 마커들 위치 정의 (대충 1km 간격 동서남북 방향으로 만개씩, 총 4만개) // 카메라 이동 되면 호출 되는 이벤트 naverMap.addOnCameraIdleListener(new NaverMap.OnCameraIdleListener() { @Override public void onCameraIdle() { bounds = naverMap.getContentBounds(); freeActiveMarkers(); // loop(); backMarker task = new backMarker(); task.execute(); // markerBubble = BitmapFactory.decodeResource(getResources(), R.drawable.bluemaker); // backMarker task = new backMarker(); // task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR); // new Thread(new Runnable() { // // // @Override // public void run() { // mHandler.post(new Runnable() { // @Override // public void run() { // for(int i=0; i<markers.size();i++) { // Marker marker = markers.get(i); // marker.setMap(naverMapback); // } // } // }); // // // } // }).start(); } }); } public void loop(){ new Thread(new Runnable() { @Override public void run() { try { for(int i = 0; i <1000 ; i++) { MarkerPosition = new LatLng(37.566288 + 0.001 * i, 126.977980 + 0.001 * i); marker = new Marker(); if (bounds.contains(MarkerPosition)) { // markerBubble = Bitmap.createScaledBitmap(markerBubble, 100, 100, true); marker.setPosition(new LatLng(37.566288 + 0.001 * i, 126.977980 + 0.001 * i)); // marker.setIcon(OverlayImage.fromBitmap(markerBubble)); activeMarkers.add(marker); } // else { // marker.setMap(null); // } } } catch (Exception e) { e.printStackTrace(); } mHandler.post(new Runnable() { @Override public void run() { for (int i = 0; i < activeMarkers.size(); i++) { Marker marker = activeMarkers.get(i); marker.setMap(naverMapback); } } }); } }).start(); } class backMarker extends AsyncTask<String, Void,String>{ @Override protected void onPreExecute() { super.onPreExecute(); } @SuppressLint("WrongThread") @Override protected String doInBackground(String... strings) { for(int i = 0; i <1000 ; i++) { MarkerPosition = new LatLng(37.566288 + 0.001 * i, 126.977980 + 0.001 * i); marker = new Marker(); if (bounds.contains(MarkerPosition)) { // markerBubble = Bitmap.createScaledBitmap(markerBubble, 100, 100, true); marker.setPosition(new LatLng(37.566288 + 0.001 * i, 126.977980 + 0.001 * i)); // marker.setIcon(OverlayImage.fromBitmap(markerBubble)); activeMarkers.add(marker); } // else { // marker.setMap(null); // } } // marker.setMap(null); return null; } @Override protected void onPostExecute(String result) { super.onPostExecute(result); // marker.setMap(naverMapback); for (int i = 0; i < activeMarkers.size(); i++) { Marker marker = activeMarkers.get(i); marker.setMap(naverMapback); } } } // 마커 정보 저장시킬 변수들 선언 private Vector<LatLng> markersPosition; private Vector<Marker> activeMarkers; // 현재 카메라가 보고있는 위치 public LatLng getCurrentPosition(NaverMap naverMap) { CameraPosition cameraPosition = naverMap.getCameraPosition(); return new LatLng(cameraPosition.target.latitude, cameraPosition.target.longitude); } // 지도상에 표시되고있는 마커들 지도에서 삭제 private void freeActiveMarkers() { if (activeMarkers == null) { activeMarkers = new Vector<Marker>(); return; } for (Marker activeMarker: activeMarkers) { activeMarker.setMap(null); } activeMarkers = new Vector<Marker>(); } }
a44b0dceab848dc4cf4540c01fff7ffd9f6a8f24
[ "Java", "Gradle" ]
2
Gradle
ffgoo/Markertest
fe6f69a409ba83b2b86e195e2be505abf8f95a7e
59ea153bf074743ae5a84dc66a3a2609a5f1d7f5
refs/heads/master
<file_sep>package com.yctu.bbs.service; import com.yctu.bbs.model.User; import com.yctu.bbs.util.Serverutil; import okhttp3.Callback; import okhttp3.FormBody; import okhttp3.Request; import okhttp3.RequestBody; /** * Created by qigang on 2017/1/14. */ public class CRegister { public void register(User user, Callback callback){ RequestBody requestBody = new FormBody.Builder() .add("user.account", user.getAccount()) .add("user.password", user.getPassword()) .add("user.username", user.getUsername()) .add("user.aword", user.getAword()) .build(); Request request = new Request.Builder() .url(Serverutil.url+"register") .post(requestBody) .build(); Serverutil.GetOkHttpClient().newCall(request).enqueue(callback); } } <file_sep>package com.yctu.bbs.modelutil; public class Blogz { private String account; private String username; private int id; private String title; private String content; private String date; private int statue; private String headpic; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getContent() { return content; } public void setContent(String content) { this.content = content; } public String getDate() { return date; } public void setDate(String date) { this.date = date; } public int getStatue() { return statue; } public void setStatue(int statue) { this.statue = statue; } public String getAccount() { return account; } public void setAccount(String account) { this.account = account; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getHeadpic() { return headpic; } public void setHeadpic(String headpic) { this.headpic = headpic; } @Override public String toString() { return "Blogz [account=" + account + ", username=" + username + ", id=" + id + ", title=" + title + ", content=" + content + ", date=" + date + ", statue=" + statue + ", headpic=" + headpic + "]"; } } <file_sep>package com.yctu.bbs.utilactivity; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import com.yctu.bbs.R; public class BBXXActivity extends AppCompatActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_bbxx); setTitle("版本信息"); } } <file_sep>package com.yctu.bbs.activity; import android.graphics.Color; import android.os.Handler; import android.os.Message; import android.support.design.widget.FloatingActionButton; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.View; import android.widget.EditText; import com.yctu.bbs.R; import com.yctu.bbs.model.Blog; import com.yctu.bbs.model.Reply; import com.yctu.bbs.service.CSendMessage; import com.yctu.bbs.util.MyApplication; import java.io.IOException; import jp.wasabeef.richeditor.RichEditor; import okhttp3.Call; import okhttp3.Callback; import okhttp3.Response; public class SendReplyActivity extends AppCompatActivity { private RichEditor replycontent; private FloatingActionButton sendbt; private String id; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_sendreply); setTitle("回复"); id = getIntent().getStringExtra("id"); initview(); } public void initview(){ replycontent = (RichEditor) findViewById(R.id.replycontent); replycontent.setEditorHeight(200); replycontent.setEditorFontSize(22); replycontent.setEditorFontColor(Color.red(000000)); replycontent.setEditorBackgroundColor(Color.WHITE); replycontent.setPadding(10, 10, 10, 10); replycontent.setPlaceholder("Insert text here..."); sendbt = (FloatingActionButton) findViewById(R.id.sendreply); sendbt.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Reply reply = new Reply(); reply.setAccount(((MyApplication) getApplication()).getAccount()); reply.setBlogid(id); reply.setContent(replycontent.getHtml()); CSendMessage sendMessage = new CSendMessage(); sendMessage.sendreply(reply,callback); } }); } private Callback callback = new Callback() { @Override public void onFailure(Call call, IOException e) { e.printStackTrace(); nethandler.sendEmptyMessage(0); } @Override public void onResponse(Call call, Response response) throws IOException { nethandler.sendEmptyMessage(1); } }; Handler nethandler = new Handler(){ @Override public void handleMessage(Message msg) { onBackPressed(); } }; } <file_sep>package com.yctu.bbs.fragment; import android.app.Fragment; import android.content.Intent; import android.content.SharedPreferences; import android.os.*; import android.support.annotation.Nullable; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import android.widget.Toast; import com.google.gson.Gson; import com.squareup.picasso.Picasso; import com.yctu.bbs.R; import com.yctu.bbs.activity.BlogsListActivity; import com.yctu.bbs.activity.PerInfoActivity; import com.yctu.bbs.model.User; import com.yctu.bbs.service.CUser; import com.yctu.bbs.util.MyApplication; import com.yctu.bbs.util.Serverutil; import com.yctu.bbs.utilactivity.BBXXActivity; import com.yctu.bbs.utilactivity.KYXKActivity; import com.yctu.bbs.utilactivity.YHXYActivity; import java.io.IOException; import okhttp3.Call; import okhttp3.Callback; import okhttp3.Response; import static android.content.Context.MODE_PRIVATE; import static java.lang.System.in; import static java.lang.System.load; /** * Created by qigang on 2017/1/14. */ public class Myhome extends Fragment { private LinearLayout linearLayout; private ImageView imageView; private TextView textView,myblog,mycollect; private TextView kyxk,bbxx,myhome_logout,yhxy; @Nullable @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.myhone_fragment, container, false); linearLayout = (LinearLayout)view.findViewById(R.id.myhome_head); linearLayout.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Intent intent = new Intent(getActivity(), PerInfoActivity.class); startActivity(intent); } }); imageView = (ImageView)view.findViewById(R.id.myhome_headpic); textView = (TextView)view.findViewById(R.id.myhome_headname); myblog = (TextView)view.findViewById(R.id.myblog); mycollect = (TextView)view.findViewById(R.id.mycollect); kyxk = (TextView)view.findViewById(R.id.kyxk); kyxk.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Intent intent = new Intent(getActivity(), KYXKActivity.class); startActivity(intent); } }); bbxx = (TextView)view.findViewById(R.id.bbxx); bbxx.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Intent intent = new Intent(getActivity(), BBXXActivity.class); startActivity(intent); } }); yhxy = (TextView)view.findViewById(R.id.yhxy); yhxy.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Intent intent = new Intent(getActivity(), YHXYActivity.class); startActivity(intent); } }); myhome_logout = (TextView)view.findViewById(R.id.myhome_logout); myhome_logout.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { SharedPreferences preferences = getActivity().getSharedPreferences("qibbs",MODE_PRIVATE); SharedPreferences.Editor editor = preferences.edit(); editor.putString("account","1"); editor.commit(); getActivity().onBackPressed(); } }); MyApplication myApplication = (MyApplication)getActivity().getApplication(); CUser cUser = new CUser(); cUser.GetUser(myApplication.getAccount(),callback); myblog.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Intent intent = new Intent(getActivity(), BlogsListActivity.class); intent.putExtra("account",((MyApplication)getActivity().getApplication()).getAccount()); startActivity(intent); } }); mycollect.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Intent intent = new Intent(getActivity(), BlogsListActivity.class); intent.putExtra("type","collect"); intent.putExtra("account",((MyApplication)getActivity().getApplication()).getAccount()); startActivity(intent); } }); return view; } private Callback callback = new Callback() { @Override public void onFailure(Call call, IOException e) { e.printStackTrace(); handler.sendEmptyMessage(0); } @Override public void onResponse(Call call, Response response) throws IOException { android.os.Message message = new android.os.Message(); message.obj = response.body().string().toString().trim(); message.what = 1; handler.sendMessage(message); } }; Handler handler = new Handler(){ @Override public void handleMessage(android.os.Message msg) { if(msg.what == 1){ Gson gson = new Gson(); User user = gson.fromJson(msg.obj.toString(), User.class); textView.setText(user.getUsername()); Picasso.with(getActivity()).load(Serverutil.url + "headpic/" + user.getHeadpic() + ".jpg").into(imageView); } } }; } <file_sep>package com.yctu.bbs.recycleutil; import android.content.Context; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.LinearLayout; import com.squareup.picasso.Picasso; import com.yctu.bbs.R; import com.yctu.bbs.activity.PerInfoActivity; import com.yctu.bbs.modelutil.Blogx; import com.yctu.bbs.util.OnRecyclerViewListener; import com.yctu.bbs.util.Serverutil; import java.util.ArrayList; /** * Created by qigang on 2017/1/16. */ public class BlogAdapter extends RecyclerView.Adapter{ private Context context; private ArrayList<Blogx> list; private OnRecyclerViewListener onRecyclerViewListener; public BlogAdapter(ArrayList<Blogx> list,Context context) { this.list = list; this.context = context; } @Override public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) { View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.recycler_blog_item, null); LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT); lp.setMargins(31,32,32,1); view.setLayoutParams(lp); return new BlogHolder(view,onRecyclerViewListener); } @Override public void onBindViewHolder(RecyclerView.ViewHolder viewHolder, int position) { BlogHolder holder = (BlogHolder) viewHolder; holder.position = position; Blogx blogx = list.get(position); holder.blog_username.setText(blogx.getUsername()); holder.blog_title.setText(blogx.getTitle()); holder.blog_date.setText(blogx.getDate()); Picasso.with(context).load(Serverutil.url + "headpic/" + blogx.getHeadpic() + ".jpg").into(holder.blog_headpic); } @Override public int getItemCount() { return list.size(); } public void setOnItemClickListener(OnRecyclerViewListener onRecyclerViewListener){ this.onRecyclerViewListener = onRecyclerViewListener; } } <file_sep>package com.yctu.bbs.activity; import android.os.Handler; import android.os.Message; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.View; import android.widget.Button; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import com.google.gson.Gson; import com.squareup.picasso.Picasso; import com.yctu.bbs.R; import com.yctu.bbs.model.User; import com.yctu.bbs.service.CUser; import com.yctu.bbs.util.MyApplication; import com.yctu.bbs.util.Serverutil; import java.io.IOException; import okhttp3.Call; import okhttp3.Callback; import okhttp3.Response; public class PerShowActivity extends AppCompatActivity { private TextView account,username,aword; private String focusedid; private ImageView headpic; private String accountString; private Button bt_pershow; private CUser cUser = new CUser(); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_per_show); setTitle("个人信息展示"); accountString = getIntent().getStringExtra("account"); initview(); cUser.GetUser(accountString,callback); } public void initview(){ account = (TextView)findViewById(R.id.account); username = (TextView)findViewById(R.id.username); aword = (TextView)findViewById(R.id.aword); headpic = (ImageView)findViewById(R.id.headpic); bt_pershow = (Button)findViewById(R.id.bt_pershow); } private Callback callback = new Callback() { @Override public void onFailure(Call call, IOException e) { e.printStackTrace(); handler.sendEmptyMessage(0); } @Override public void onResponse(Call call, Response response) throws IOException { Message message = new Message(); message.obj = response.body().string().toString().trim(); message.what = 1; handler.sendMessage(message); } }; Handler handler = new Handler(){ @Override public void handleMessage(Message msg) { if(msg.what == 1){ Gson gson = new Gson(); User user = gson.fromJson(msg.obj.toString(), User.class); focusedid = user.getId()+""; account.setText("账号:"+user.getAccount()); username.setText("昵称:"+user.getUsername()); aword.setText(user.getAword()); Picasso.with(PerShowActivity.this).load(Serverutil.url + "headpic/" + user.getHeadpic() + ".jpg").into(headpic); } } }; public void foucs(View v){ cUser.SetFocus(((MyApplication)getApplication()).getAccount(),focusedid,callbackx); } private Callback callbackx = new Callback() { @Override public void onFailure(Call call, IOException e) { e.printStackTrace(); Toast.makeText(PerShowActivity.this,"error",Toast.LENGTH_LONG).show(); handlerx.sendEmptyMessage(0); } @Override public void onResponse(Call call, Response response) throws IOException { Message message = new Message(); message.obj = response.body().string().toString().trim(); message.what = 1; handlerx.sendMessage(message); } }; Handler handlerx = new Handler(){ @Override public void handleMessage(Message msg) { if(msg.what == 1){ if(msg.obj.toString().equals("exited")){ Toast.makeText(PerShowActivity.this,"以关注",Toast.LENGTH_LONG).show(); }else if(msg.obj.toString().equals("success")){ bt_pershow.setEnabled(false); Toast.makeText(PerShowActivity.this,"关注成功",Toast.LENGTH_LONG).show(); } } } }; } <file_sep>package com.yctu.bbs.activity; import android.content.Intent; import android.os.Handler; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.view.View; import android.widget.Toast; import com.yctu.bbs.R; import com.yctu.bbs.modelutil.Blogx; import com.yctu.bbs.recycleutil.BlogAdapter; import com.yctu.bbs.service.CBlog; import com.yctu.bbs.util.OnRecyclerViewListener; import java.io.IOException; import java.util.ArrayList; import okhttp3.Call; import okhttp3.Callback; import okhttp3.Response; public class BlogsListActivity extends AppCompatActivity { private String account; private String type; private RecyclerView recyclerView; private ArrayList infolist = null; private CBlog cBlog = new CBlog(); @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_blogs_list); account = getIntent().getStringExtra("account"); type = getIntent().getStringExtra("type"); setTitle("帖子展示"); recyclerView = (RecyclerView)findViewById(R.id.bloglist_rlv); RecyclerView.LayoutManager layoutManager = new LinearLayoutManager(BlogsListActivity.this); recyclerView.setLayoutManager(layoutManager); if("collect".equals(type)){ cBlog.GetCollectBlogByAccount(account,callback); }else{ cBlog.GetAllBlogByAccount(account,callback); } } Handler handler = new Handler(){ @Override public void handleMessage(android.os.Message msg) { super.handleMessage(msg); if(msg.what == 0){ Toast.makeText(BlogsListActivity.this,"网络异常",Toast.LENGTH_LONG).show(); return; } if(msg.obj.toString().equals("[]")){ Toast.makeText(BlogsListActivity.this,"没有数据",Toast.LENGTH_LONG).show(); return; } infolist = cBlog.GetBlogxList(msg.obj.toString()); BlogAdapter adapter = new BlogAdapter(infolist,BlogsListActivity.this); //调用infoadapter来完成adapter的设置 adapter.setOnItemClickListener(new OnRecyclerViewListener() { @Override public void onItemClick(View v, int position) { Intent intent = new Intent(BlogsListActivity.this, BlogDetailActivity.class); intent.putExtra("id",((Blogx)infolist.get(position)).getId()); startActivity(intent); } }); recyclerView.setAdapter(adapter); } }; private Callback callback = new Callback() { @Override public void onFailure(Call call, IOException e) { e.printStackTrace(); handler.sendEmptyMessage(0); } @Override public void onResponse(Call call, Response response) throws IOException { android.os.Message message = new android.os.Message(); message.what = 1; message.obj = response.body().string().toString().trim(); handler.sendMessage(message); } }; } <file_sep>package com.yctu.bbs.service; import com.google.gson.Gson; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonParser; import com.yctu.bbs.model.Reply; import com.yctu.bbs.model.User; import com.yctu.bbs.util.Serverutil; import java.util.ArrayList; import okhttp3.Callback; import okhttp3.FormBody; import okhttp3.Request; import okhttp3.RequestBody; /** * Created by qigang on 2017/1/16. */ public class CUser { public void GetFocusUser(String account, Callback callback){ RequestBody requestBody = new FormBody.Builder() .add("account", account) .build(); Request request = new Request.Builder() .url(Serverutil.url+"getfocususer") .post(requestBody) .build(); Serverutil.GetOkHttpClient().newCall(request).enqueue(callback); } public ArrayList<User> GetUserList(String obj){ ArrayList<User> array = new ArrayList<User>(); Gson gson = new Gson(); JsonParser parser = new JsonParser(); JsonArray Jarray = parser.parse(obj).getAsJsonArray(); for(JsonElement object : Jarray ){ User cse = gson.fromJson( object , User.class); array.add(cse); } //Collections.reverse(array); return array; } public void SetFocus(String userid,String focusedid, Callback callback){ RequestBody requestBody = new FormBody.Builder() .add("userid", userid) .add("focusedid", focusedid) .build(); Request request = new Request.Builder() .url(Serverutil.url+"setfocus") .post(requestBody) .build(); Serverutil.GetOkHttpClient().newCall(request).enqueue(callback); } public void GetUser(String account, Callback callback){ RequestBody requestBody = new FormBody.Builder() .add("account", account) .build(); Request request = new Request.Builder() .url(Serverutil.url+"getUserByAccount") .post(requestBody) .build(); Serverutil.GetOkHttpClient().newCall(request).enqueue(callback); } public void updateu(String username,String account,Callback callback){ RequestBody requestBody = new FormBody.Builder() .add("username", username) .add("type","1") .add("account",account) .build(); Request request = new Request.Builder() .url(Serverutil.url+"resetUser") .post(requestBody) .build(); Serverutil.GetOkHttpClient().newCall(request).enqueue(callback); } public void updatea(String aword,String account,Callback callback){ RequestBody requestBody = new FormBody.Builder() .add("aword", aword) .add("type","2") .add("account",account) .build(); Request request = new Request.Builder() .url(Serverutil.url+"resetUser") .post(requestBody) .build(); Serverutil.GetOkHttpClient().newCall(request).enqueue(callback); } } <file_sep>package com.yctu.bbs.activity; import android.app.AlertDialog; import android.content.DialogInterface; import android.graphics.Color; import android.graphics.Interpolator; import android.os.Handler; import android.os.Message; import android.support.design.widget.FloatingActionButton; import android.support.v7.app.AppCompatActivity; import android.os.Bundle; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.Toast; import com.yctu.bbs.R; import com.yctu.bbs.model.Blog; import com.yctu.bbs.service.CSendMessage; import com.yctu.bbs.util.MyApplication; import java.io.IOException; import jp.wasabeef.richeditor.RichEditor; import okhttp3.Call; import okhttp3.Callback; import okhttp3.Response; public class SendMess extends AppCompatActivity { private RichEditor mEditor; private EditText title; private String resString; private String resUrl; private FloatingActionButton sendbt; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_sendmess); setTitle("信息编写"); initview(); } public void initview(){ title = (EditText)findViewById(R.id.send_title) ; mEditor = (RichEditor) findViewById(R.id.editor); mEditor.setEditorHeight(200); mEditor.setEditorFontSize(22); mEditor.setEditorFontColor(Color.red(000000)); mEditor.setEditorBackgroundColor(Color.WHITE); mEditor.setPadding(10, 10, 10, 10); mEditor.setPlaceholder("正文部分..."); sendbt = (FloatingActionButton) findViewById(R.id.sendnow); sendbt.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Blog blog = new Blog(); blog.setTitle(title.getText().toString().trim()); blog.setContent(mEditor.getHtml()); CSendMessage sendMessage = new CSendMessage(); sendMessage.sendmess(blog,((MyApplication) getApplication()).getAccount(),callback); } }); } private Callback callback = new Callback() { @Override public void onFailure(Call call, IOException e) { e.printStackTrace(); nethandler.sendEmptyMessage(0); } @Override public void onResponse(Call call, Response response) throws IOException { nethandler.sendEmptyMessage(1); } }; Handler nethandler = new Handler(){ @Override public void handleMessage(Message msg) { onBackPressed(); } }; @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.sendmessmeun, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); if(id == R.id.alertpic){ SetPicUrl(); return true; }else if(id == R.id.setbold){ mEditor.setBold(); return true; }else if(id == R.id.setitalic){ mEditor.setItalic(); return true; } return super.onOptionsItemSelected(item); } public void SetPicUrl(){ final String[] res = {null}; final View view = LayoutInflater.from(SendMess.this).inflate(R.layout.alert_setpicurl, null);//这里必须是final的 final EditText edit=(EditText)view.findViewById(R.id.editText);//获得输入框对象 new AlertDialog.Builder(SendMess.this) .setTitle("插入图片的网络地址")//提示框标题 .setView(view) .setPositiveButton("确定",//提示框的两个按钮 new android.content.DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { Message message = new Message(); message.obj = edit.getText().toString().trim(); handler.sendMessage(message); } }).setNegativeButton("取消", null).create().show(); } Handler handler = new Handler(){ @Override public void handleMessage(Message msg) { mEditor.insertImage(msg.obj.toString(),"image"); } }; } <file_sep>package com.yctu.bbs.model; /** * Created by qigang on 2017/1/15. */ public class Blog { private int id; private String title; private String content; private User user; private String date; private int statue; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getContent() { return content; } public void setContent(String content) { this.content = content; } public User getUser() { return user; } public void setUser(User user) { this.user = user; } public String getDate() { return date; } public void setDate(String date) { this.date = date; } public int getStatue() { return statue; } public void setStatue(int statue) { this.statue = statue; } @Override public String toString() { return "Blog [id=" + id + ", title=" + title + ", content=" + content + ", user=" + user + ", date=" + date + ", statue=" + statue + "]"; } } <file_sep>package com.yctu.bbs.service; import com.google.gson.Gson; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonParser; import com.yctu.bbs.model.Reply; import com.yctu.bbs.modelutil.Blogx; import com.yctu.bbs.util.Serverutil; import java.util.ArrayList; import java.util.Collections; import okhttp3.Callback; import okhttp3.FormBody; import okhttp3.Request; import okhttp3.RequestBody; /** * Created by qigang on 2017/1/16. */ public class CBlog { public void GetAllBlogByAccount(String account, Callback callback){ RequestBody requestBody = new FormBody.Builder() .add("account",account) .build(); Request request = new Request.Builder() .url(Serverutil.url+"getblogsbyaccount") .post(requestBody) .build(); Serverutil.GetOkHttpClient().newCall(request).enqueue(callback); } public void GetCollectBlogByAccount(String account, Callback callback){ RequestBody requestBody = new FormBody.Builder() .add("account",account) .build(); Request request = new Request.Builder() .url(Serverutil.url+"getcollect") .post(requestBody) .build(); Serverutil.GetOkHttpClient().newCall(request).enqueue(callback); } public void report(String blogid,String account,Callback callback){ RequestBody requestBody = new FormBody.Builder() .add("blogid",blogid) .add("account",account) .build(); Request request = new Request.Builder() .url(Serverutil.url+"report") .post(requestBody) .build(); Serverutil.GetOkHttpClient().newCall(request).enqueue(callback); } public void coolect(String blogid,String account,Callback callback){ RequestBody requestBody = new FormBody.Builder() .add("blogid",blogid) .add("account",account) .build(); Request request = new Request.Builder() .url(Serverutil.url+"collect") .post(requestBody) .build(); Serverutil.GetOkHttpClient().newCall(request).enqueue(callback); } public void GetAllBlog( Callback callback){ RequestBody requestBody = new FormBody.Builder() .build(); Request request = new Request.Builder() .url(Serverutil.url+"getAllBlog") .post(requestBody) .build(); Serverutil.GetOkHttpClient().newCall(request).enqueue(callback); } public void GetBlogById( String id,Callback callback){ RequestBody requestBody = new FormBody.Builder() .add("id",id) .build(); Request request = new Request.Builder() .url(Serverutil.url+"getblogbyid") .post(requestBody) .build(); Serverutil.GetOkHttpClient().newCall(request).enqueue(callback); } public void GetreplyById(String id,Callback callback){ RequestBody requestBody = new FormBody.Builder() .add("id",id) .build(); Request request = new Request.Builder() .url(Serverutil.url+"getReply") .post(requestBody) .build(); Serverutil.GetOkHttpClient().newCall(request).enqueue(callback); } public ArrayList<Reply> GetReplyList(String obj){ //用来解析请假信息 ArrayList<Reply> array = new ArrayList<Reply>(); Gson gson = new Gson(); JsonParser parser = new JsonParser(); JsonArray Jarray = parser.parse(obj).getAsJsonArray(); for(JsonElement object : Jarray ){ Reply cse = gson.fromJson( object , Reply.class); array.add(cse); } //Collections.reverse(array); return array; } public ArrayList<Blogx> GetBlogxList(String obj){ //用来解析请假信息 ArrayList<Blogx> array = new ArrayList<Blogx>(); Gson gson = new Gson(); JsonParser parser = new JsonParser(); JsonArray Jarray = parser.parse(obj).getAsJsonArray(); for(JsonElement object : Jarray ){ Blogx cse = gson.fromJson( object , Blogx.class); array.add(cse); } //Collections.reverse(array); return array; } } <file_sep>package com.yctu.bbs.model; /** * Created by qigang on 2017/1/14. */ public class User { private int id; private String account; private String password; private String username; private String headpic; private String aword; private int statue; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getAccount() { return account; } public void setAccount(String account) { this.account = account; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getUsername() { return username; } public void setUsername(String username) { this.username = username; } public String getHeadpic() { return headpic; } public void setHeadpic(String headpic) { this.headpic = headpic; } public String getAword() { return aword; } public void setAword(String aword) { this.aword = aword; } public int getStatue() { return statue; } public void setStatue(int statue) { this.statue = statue; } @Override public String toString() { return "User [id=" + id + ", account=" + account + ", password=" + password + ", username=" + username + ", headpic=" + headpic + ", aword=" + aword + ", statue=" + statue + "]"; } }
5454e85df69d6411329fbc9e77ae9016d46ce7bd
[ "Java" ]
13
Java
QiGangs/BBS
d3acb3d19bf718db1d0433f0f22cb13e701297b9
f8614dfd687941d27fc6a016118d64fa9c059a51
refs/heads/master
<repo_name>Chimaytric/ParisWifiFinder<file_sep>/comp/root.component.js function RootController(rootFactory, $mdDialog, $mdSidenav){ console.log('Root component'); var vm = this; vm.records = []; vm.markers = []; vm.initMap = function() { var mapCenter = {lat: 48.8611171, lng: 2.3347824} vm.map = new google.maps.Map(document.getElementById('map'), { center: mapCenter, zoom: 13 }); }; vm.initMap(); vm.clearMarkers = function(callback){ vm.map.setCenter(new google.maps.LatLng(48.8611171, 2.3347824)); vm.map.setZoom(13); vm.markers.forEach(function(marker){ marker.setMap(null); }); vm.markers = []; callback(); } vm.placeMarker = function(hotspot){ $mdSidenav('left').toggle(); vm.clearMarkers(function(){ var marker = new google.maps.Marker({ position: hotspot.location, title: hotspot.name, animation: google.maps.Animation.DROP, icon: 'assets/images/wifiSpot.png', infoWindow: new google.maps.InfoWindow({ content: "<b>"+hotspot.name+"</b><br><em>"+hotspot.address+"</em>" }) }); marker.addListener('click', function() { marker.infoWindow.open(vm.map, marker); }); marker.setMap(vm.map); vm.markers.push(marker); }); } vm.showMenu = function($mdMenu, ev) { $mdMenu.open(ev); }; vm.openSidenav = function() { $mdSidenav('left').toggle(); }; vm.getAroundMe = function(){ vm.clearMarkers(function(){ if (navigator.geolocation) { navigator.geolocation.getCurrentPosition(function(position){ var position = {lat: position.coords.latitude, lng: position.coords.longitude}; vm.map.setCenter(new google.maps.LatLng(position.lat, position.lng)); vm.map.setZoom(15); var userLocationSpot = new google.maps.Marker({ position: position, map: vm.map, icon: 'assets/images/userLocation.png', title: 'You are here !' }); vm.markers.push(userLocationSpot); vm.records.forEach(function(district){ district.forEach(function(spot){ if(google.maps.geometry.spherical.computeDistanceBetween(new google.maps.LatLng(spot.location.lat, spot.location.lng), new google.maps.LatLng(position.lat, position.lng)) <= 1000){ var marker = new google.maps.Marker({ position: spot.location, title: spot.name, map: vm.map, animation: google.maps.Animation.DROP, icon: 'assets/images/wifiSpot.png', infoWindow: new google.maps.InfoWindow({ content: "<b>"+spot.name+"</b><br><em>"+spot.address+"</em>" }) }); marker.addListener('click', function() { marker.infoWindow.open(vm.map, marker); }); vm.markers.push(marker); } }); }); }); } }); } rootFactory.getNbResults().then(function(hits){ rootFactory.getAllResults(hits.nhits).then(function(data){ var unsorted = []; data.records.forEach(function(spot){ var temp = { name: spot.fields.nom_site.substring(0, 1)+spot.fields.nom_site.substring(1, spot.fields.nom_site.length).toLowerCase(), address: spot.fields.adresse.toLowerCase()+" "+spot.fields.arrondissement, district: spot.fields.arrondissement, location: {lat: spot.fields.geo_point_2d[0], lng: spot.fields.geo_point_2d[1]} }; var district = temp.district.substring(3, 5); if(district.substring(0, 1) === "0") district = district.substring(1, 2); district = district - 1; if(!(district in vm.records)){ vm.records[district] = [temp]; } else vm.records[district].push(temp); }); console.log(vm.records); }); }); } RootController.$inject = ['rootFactory', '$mdDialog', '$mdSidenav']; angular.module('characterSheetmanager.rootComponent', []).component('rootComponent', { templateUrl: 'comp/root.component.html', controller: RootController, controllerAs: "rootCtrl", bindings: {} }).factory('rootFactory', function($http){ return { getNbResults: function(){ var url = "https://opendata.paris.fr/api/records/1.0/search/?dataset=liste_des_sites_des_hotspots_paris_wifi&rows=0"; return $http({ method: 'GET', url: url }).then(function(response){ return response.data; }).catch(function(response){ console.log("[Error] [getNbResults] . "+response.status+" : "+response.statusText); }); }, getAllResults: function(total){ var url = "https://opendata.paris.fr/api/records/1.0/search/?dataset=liste_des_sites_des_hotspots_paris_wifi&rows="+total; return $http({ method: 'GET', url: url }).then(function(response){ return response.data; }).catch(function(response){ console.log("[Error] [getNbResults] . "+response.status+" : "+response.statusText); }); } } });<file_sep>/app.js 'use strict' angular.module('characterSheetmanager', [ 'ngMaterial', 'characterSheetmanager.rootComponent' ]);<file_sep>/README.md # ParisWifiFinder ## Description This project is a Proof of Concept of a functionnal app using [Angular Material](https://material.angularjs.org). It uses an [API](https://opendata.paris.fr/page/home/) provided by Paris city hall to locate open wireless networks provided by the city public services (museums, parks etc.) Check the demo right [Here](https://chimaytric.github.io/ParisWifiFinder/) ! **WARNING :** This app is for test and demo purpose only, I cannot be held responsible of it's accuracy and/or correctness ## Work in progress - [x] General interface - [x] Wireless spots list display - [x] Wireless spots map display - [x] Find networks around my position - [ ] Search function - [ ] Options
7973eec68fe7268192f055c9e13445ea87468d36
[ "JavaScript", "Markdown" ]
3
JavaScript
Chimaytric/ParisWifiFinder
f018bfb8d64fc51fb0395e5c4115a361a602416d
a59178067a9857cb8d4155414844b4ea609a2fff
refs/heads/master
<repo_name>Vitamal/vokss<file_sep>/atelier/models/client.py from django.db import models from django.urls import reverse from django.utils.translation import gettext_lazy as _ from atelier.models import Atelier from atelier.models.abstract_base import AbstractBaseModel class Client(AbstractBaseModel): first_name = models.CharField( max_length=30, verbose_name=_('first Name') ) last_name = models.CharField( max_length=30, verbose_name=_('second Name') ) tel_number = models.CharField( max_length=30, blank=True, verbose_name=_('tel. number') ) place = models.CharField( max_length=30, verbose_name=_('place') ) atelier = models.ForeignKey( Atelier, on_delete=models.CASCADE, verbose_name=_('atelier'), ) def __str__(self): """ to display an object in the Django admin site and as the value inserted into a template when it displays an object """ return '{} {}'.format(self.first_name, self.last_name) class Meta: ordering = ['first_name'] def get_absolute_url(self): """ Returns the url to access a particular client instance. """ return reverse('atelier:client_detail', args=[str(self.id)]) <file_sep>/atelier/tests/tests_views/test_profile_view.py import htmls from django.contrib.auth.models import User from model_mommy import mommy from atelier.models import Profile from django.urls import reverse_lazy from atelier.tests.tests_views.setup_premixin import SetUpPreMixin class ProfileDetaileViewTests(SetUpPreMixin): def test_profile_detail_view_not_logged_in(self): item = mommy.make('atelier.Profile') response = self.client.get(reverse_lazy('atelier:profile_detail', kwargs={'pk': item.pk, })) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertEqual(response.status_code, 404) def test_profile_detail_view_user(self): self.client.login(username='user', password='<PASSWORD>') item = mommy.make('atelier.Profile', atelier=self.user.profile.atelier) response = self.client.get(reverse_lazy('atelier:profile_detail', kwargs={'pk': item.pk, })) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertEqual(response.status_code, 404) def test_profile_detail_view_tailor_not_this_atelier(self): self.client.login(username='tailor', password='<PASSWORD>') item = mommy.make('atelier.Profile') response = self.client.get(reverse_lazy('atelier:profile_detail', kwargs={'pk': item.pk, })) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertEqual(response.status_code, 404) def test_profile_detail_view_tailor(self): ''' test by the help of htmls module. (see https://github.com/espenak/htmls/) ''' user_2 = mommy.make('User') self.client.login(username='tailor', password='<PASSWORD>') kwargs = { 'id': 1, 'user': user_2, 'is_tailor': False, 'atelier': self.tailor.profile.atelier, } instance = mommy.make('atelier.Profile', **kwargs) response = self.client.get('/en/atelier/profile/{}/'.format(instance.id)) self.assertEqual(response.status_code, 200) selector = htmls.S(response.content) user = selector.one('.user').alltext_normalized atel = selector.one('.atelier').alltext_normalized is_tailor = selector.one('.is_tailor').alltext_normalized self.assertEqual(user, user_2.username) self.assertEqual(atel, self.tailor.profile.atelier.name) self.assertEqual(is_tailor, 'simple profile') self.assertTemplateUsed(response, 'atelier/profile_detail.html') def test_profile_detail_view_superuser(self): ''' test by the help of htmls module. (see https://github.com/espenak/htmls/) ''' user_2 = mommy.make('User') self.client.login(username='superuser', password='<PASSWORD>') kwargs = { 'id': 1, 'user': user_2, 'is_tailor': False, 'atelier': self.superuser.profile.atelier, } instance = mommy.make('atelier.Profile', **kwargs) response = self.client.get('/en/atelier/profile/{}/'.format(instance.id)) selector = htmls.S(response.content) user = selector.one('.user').alltext_normalized atel = selector.one('.atelier').alltext_normalized is_tailor = selector.one('.is_tailor').alltext_normalized self.assertEqual(response.status_code, 200) self.assertEqual(user, user_2.username) self.assertEqual(atel, self.tailor.profile.atelier.name) self.assertEqual(is_tailor, 'simple profile') self.assertTemplateUsed(response, 'atelier/profile_detail.html') class ProfileCreateViewTests(SetUpPreMixin): def test_profile_create_view_not_logged_in(self): response = self.client.post(reverse_lazy('atelier:profile_form')) self.assertEqual(response.status_code, 404) def test_client_create_view_user(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:profile_form')) self.assertEqual(response.status_code, 404) def test_profile_create_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:profile_form')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/create_form.html') def test_profile_create_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:profile_form')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/create_form.html') class ProfileEditViewTests(SetUpPreMixin): def test_profile_edit_view_not_logged_in(self): instance = mommy.make('atelier.Profile') response = self.client.post(reverse_lazy('atelier:profile_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_profile_edit_view_user(self): self.client.login(username='user', password='<PASSWORD>') instance = mommy.make('atelier.Profile', atelier=self.user.profile.atelier) response = self.client.post(reverse_lazy('atelier:profile_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_profile_edit_view_tailor_not_in_atelier(self): instance = mommy.make('atelier.Profile') tailor_user = User.objects.create_user('tuser', '<EMAIL>', '<PASSWORD>') tailor_profile = mommy.make('atelier.Profile', user=tailor_user, is_tailor=True) self.client.login(username='tuser', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:profile_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_profile_edit_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.Profile', atelier=self.tailor.profile.atelier) response = self.client.post( reverse_lazy('atelier:profile_update_form', kwargs={'pk': instance.id}), data={ 'email': '<EMAIL>', 'is_tailor': False, }) self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/profile/') instance.refresh_from_db() self.assertEqual(instance.user.email, '<EMAIL>') self.assertEqual(instance.is_tailor, False) def test_profile_edit_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') instance = mommy.make('atelier.Profile') response = self.client.post( reverse_lazy('atelier:profile_update_form', kwargs={'pk': instance.id}), data={ 'email': '<EMAIL>', 'is_tailor': False, }) self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/profile/') instance.refresh_from_db() self.assertEqual(instance.user.email, '<EMAIL>') self.assertEqual(instance.is_tailor, False) class ProfileDeleteViewTests(SetUpPreMixin): def test_profile_delete_view_no_logged_in(self): instance = mommy.make('atelier.Profile') response = self.client.get('/en/atelier/profile/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_profile_delete_view_tailor_not_in_atelier(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.Profile') response = self.client.get('/en/atelier/profile/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_profile_delete_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.Profile', atelier=self.user.profile.atelier) self.assertEqual(Profile.objects.count(), 4) # + 3 profile instances from SetUpPreMixin response = self.client.post('/en/atelier/profile/{}/delete/'.format(instance.id)) self.assertRedirects(response, '/en/atelier/profile/') self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/profile/') self.assertEqual(Profile.objects.count(), 3) def test_profile_delete_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') instance = mommy.make('atelier.Profile') self.assertEqual(Profile.objects.count(), 4) # + 3 profile instances from SetUpPreMixin response = self.client.post('/en/atelier/profile/{}/delete/'.format(instance.id)) self.assertRedirects(response, '/en/atelier/profile/') self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/profile/') self.assertEqual(Profile.objects.count(), 3) class ProfileListViewTests(SetUpPreMixin): def test_profile_list_view_no_logged_in(self): response = self.client.get('/en/atelier/profile/') self.assertEqual(response.status_code, 404) def test_profile_list_view_user(self): self.client.login(username='user', password='<PASSWORD>') mommy.make('atelier.Profile', atelier=self.user.profile.atelier) response = self.client.get('/en/atelier/profile/') self.assertEqual(response.status_code, 404) def test_profile_list_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') mommy.make('atelier.Profile', atelier=self.tailor.profile.atelier, _quantity=2) # this atelier profiles mommy.make('atelier.Profile', _quantity=10) # not this atelier profiles (tailor has not see) response = self.client.get('/en/atelier/profile/') self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/profile_list.html') self.assertEqual(len(response.context['object_list']), 5) # + 3 profile instances from SetUpPreMixin def test_profile_list_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') mommy.make('atelier.Profile', atelier=self.superuser.profile.atelier, _quantity=3) response = self.client.get('/en/atelier/profile/') self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/profile_list.html') self.assertEqual(len(response.context['object_list']), 6) # + 3 profile instances from SetUpPreMixin def test_profile_pagination_is_ten(self): self.client.login(username='superuser', password='<PASSWORD>') # Create an instances more than 10 for pagination tests (13 instances) mommy.make('atelier.Profile', _quantity=13) resp = self.client.get(reverse_lazy('atelier:profile_list')) self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertEqual(len(resp.context['object_list']), 10) def test_profile_list_all_elements(self): # get second page and confirm it has (exactly) remaining 3 items self.client.login(username='superuser', password='<PASSWORD>') mommy.make('atelier.Profile', _quantity=13) resp = self.client.get(reverse_lazy('atelier:profile_list') + '?page=2') self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertEqual(len(resp.context['object_list']), 6) # + 3 profile instances from SetUpPreMixin <file_sep>/atelier/tests/tests_views/test_allowance_discount_view.py import htmls from model_mommy import mommy from atelier.models import AllowanceDiscount from django.urls import reverse_lazy from atelier.tests.tests_views.setup_premixin import SetUpPreMixin class AllowanceDiscountDetailViewTests(SetUpPreMixin): def test_allowance_discount_detail_view_not_logged_in(self): item = mommy.make('atelier.AllowanceDiscount') response = self.client.get(reverse_lazy('atelier:allowance_discount_detail', kwargs={'pk': item.pk, })) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertEqual(response.status_code, 302) self.assertTrue(response.url.startswith('/accounts/login/')) def test_allowance_discount_detail_view(self): self.client.login(username='user', password='<PASSWORD>') kwargs = { 'id': 1, 'name': 'Name', 'coefficient': 1, 'label': 'Label', } instance = mommy.make('atelier.AllowanceDiscount', **kwargs) response = self.client.get('/en/atelier/allowance_discount/{}/'.format(instance.id)) # get response in way one selector = htmls.S(response.content) type = selector.one('.type').alltext_normalized coefficient = selector.one('.coefficient').alltext_normalized name = selector.one('.name').alltext_normalized self.assertEqual(response.status_code, 200) self.assertEqual(type, 'Label') self.assertEqual(coefficient, '1.00') self.assertEqual(name, 'Name') self.assertTemplateUsed(response, 'atelier/allowance_discount_detail.html') class AllowanceDiscountCreateViewTests(SetUpPreMixin): def test_allowance_discount_create_view_not_logged_in(self): response = self.client.post(reverse_lazy('atelier:allowance_discount_form')) self.assertEqual(response.status_code, 404) def test_allowance_discount_create_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:allowance_discount_form')) self.assertEqual(response.status_code, 404) def test_allowance_discount_create_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:allowance_discount_form')) # get response in way two self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/create_form.html') class AllowanceDiscountEditViewTests(SetUpPreMixin): def test_allowance_discount_edit_view_not_logged_in(self): instance = mommy.make('atelier.AllowanceDiscount') response = self.client.post(reverse_lazy('atelier:allowance_discount_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_allowance_discount_edit_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.AllowanceDiscount') response = self.client.post(reverse_lazy('atelier:allowance_discount_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_allowance_discount_edit_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') instance = mommy.make('atelier.AllowanceDiscount', name='Name') response = self.client.post( reverse_lazy('atelier:allowance_discount_update_form', kwargs={'pk': instance.id}), data={ 'name': 'SomeName', 'coefficient': 1, 'label': 'SomeLabel', }) self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/allowance_discount/{}/'.format(instance.id)) instance.refresh_from_db() print(instance.coefficient) self.assertEqual(AllowanceDiscount.objects.count(), 1) self.assertEqual(AllowanceDiscount.objects.get(id=instance.id), instance) self.assertEqual(AllowanceDiscount.objects.get(id=instance.id).name, 'SomeName') self.assertEqual(AllowanceDiscount.objects.get(id=instance.id).coefficient, 1.00) self.assertEqual(AllowanceDiscount.objects.get(id=instance.id).label, 'SomeLabel') self.assertEqual(AllowanceDiscount.objects.get(id=instance.id).last_updated_by, self.superuser) class AllowanceDiscountDeleteViewTests(SetUpPreMixin): def test_allowance_discount_delete_view_no_logged_in(self): instance = mommy.make('atelier.AllowanceDiscount') response = self.client.get('/en/atelier/allowance_discount/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_allowance_discount_delete_view_user(self): self.client.login(username='user', password='<PASSWORD>') instance = mommy.make('atelier.AllowanceDiscount') response = self.client.get('/en/atelier/allowance_discount/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_allowance_discount_delete_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.AllowanceDiscount') response = self.client.get('/en/atelier/allowance_discount/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_allowance_discount_delete_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') instance = mommy.make('atelier.AllowanceDiscount') self.assertEqual(AllowanceDiscount.objects.count(), 1) response = self.client.post('/en/atelier/allowance_discount/{}/delete/'.format(instance.id)) self.assertRedirects(response, '/en/atelier/allowance_discount/') self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/allowance_discount/') self.assertEqual(AllowanceDiscount.objects.count(), 0) class AllowanceDiscountListViewTests(SetUpPreMixin): def test_allowance_discount_list_view_no_logged_in(self): response = self.client.get('/en/atelier/allowance_discount/') self.assertEqual(response.status_code, 302) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertTrue(response.url.startswith('/accounts/login/')) def test_allowance_discount_list_view_user(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.get('/en/atelier/allowance_discount/') self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/allowance_discount_list.html') def test_allowance_discount_list_pagination_is_ten(self): self.client.login(username='user', password='<PASSWORD>') self.allowance_discount = mommy.make('atelier.AllowanceDiscount', _quantity=13) # Create an instances more than 10 for pagination tests (13 instances) resp = self.client.get(reverse_lazy('atelier:allowance_discount_list')) self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertTrue(len(resp.context['object_list']) == 10) def test_allowance_discount_list_all_elements(self): # get second page and confirm it has (exactly) remaining 3 items self.client.login(username='user', password='<PASSWORD>') self.allowance_discount = mommy.make('atelier.AllowanceDiscount', _quantity=13) resp = self.client.get(reverse_lazy('atelier:allowance_discount_list') + '?page=2') self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertEqual(len(resp.context['object_list']), 3) <file_sep>/atelier/tests/tests_views/test_minimal_style_view.py import htmls from model_mommy import mommy from atelier.models import MinimalStyle from django.urls import reverse_lazy from atelier.tests.tests_views.setup_premixin import SetUpPreMixin class MinimalStyleViewTests(SetUpPreMixin): def test_minimal_style_detail_view_not_logged_in(self): item = mommy.make('atelier.MinimalStyle') response = self.client.get(reverse_lazy('atelier:minimal_style_detail', kwargs={'pk': item.pk, })) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertEqual(response.status_code, 302) self.assertTrue(response.url.startswith('/accounts/login/')) def test_minimal_style_detail_view_user(self): self.client.login(username='user', password='<PASSWORD>') kwargs = { 'id': 1, 'name': 'MSName', 'group': 'Group1', } instance = mommy.make('atelier.MinimalStyle', **kwargs) response = self.client.get('/en/atelier/minimal_style/{}/'.format(instance.id)) # get response in way one selector = htmls.S(response.content) name = selector.one('.name').alltext_normalized group = selector.one('.group').alltext_normalized self.assertEqual(response.status_code, 200) self.assertEqual(group, 'Group1') self.assertEqual(name, 'MSName') self.assertTemplateUsed(response, 'atelier/minimal_style_detail.html') class MinimalStyleCreateViewTests(SetUpPreMixin): def test_minimal_style_create_view_not_logged_in(self): response = self.client.post(reverse_lazy('atelier:minimal_style_form')) self.assertEqual(response.status_code, 404) def test_minimal_style_create_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:minimal_style_form')) self.assertEqual(response.status_code, 404) def test_minimal_style_create_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:minimal_style_form')) # get response in way two self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/create_form.html') class MinimalStyleEditViewTests(SetUpPreMixin): def test_minimal_style_edit_view_not_logged_in(self): instance = mommy.make('atelier.MinimalStyle') response = self.client.post(reverse_lazy('atelier:minimal_style_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_minimal_style_edit_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.MinimalStyle') response = self.client.post(reverse_lazy('atelier:minimal_style_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_minimal_style_edit_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') instance = mommy.make('atelier.MinimalStyle', name='Name', group='GR0') response = self.client.post( reverse_lazy('atelier:minimal_style_update_form', kwargs={'pk': instance.id}), data={ 'name': 'MSName', 'group': 'Group1', }) self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/minimal_style/{}/'.format(instance.id)) instance.refresh_from_db() self.assertEqual(MinimalStyle.objects.count(), 1) self.assertEqual(MinimalStyle.objects.get(id=instance.id), instance) self.assertEqual(MinimalStyle.objects.get(id=instance.id).name, 'MSName') self.assertEqual(MinimalStyle.objects.get(id=instance.id).group, 'Group1') self.assertEqual(MinimalStyle.objects.get(id=instance.id).last_updated_by, self.superuser) class MinimalStyleDeleteViewTests(SetUpPreMixin): def test_minimal_style_delete_view_no_logged_in(self): instance = mommy.make('atelier.MinimalStyle') response = self.client.get('/en/atelier/minimal_style/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_minimal_style_delete_view_user(self): self.client.login(username='user', password='<PASSWORD>') instance = mommy.make('atelier.MinimalStyle') response = self.client.get('/en/atelier/minimal_style/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_minimal_style_delete_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.MinimalStyle') response = self.client.get('/en/atelier/minimal_style/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_minimal_style_delete_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') instance = mommy.make('atelier.MinimalStyle') self.assertEqual(MinimalStyle.objects.count(), 1) response = self.client.post('/en/atelier/minimal_style/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/minimal_style/') self.assertEqual(MinimalStyle.objects.count(), 0) class MinimalStyleListViewTests(SetUpPreMixin): def test_minimal_style_list_view_no_logged_in(self): response = self.client.get('/en/atelier/minimal_style/') self.assertEqual(response.status_code, 302) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertTrue(response.url.startswith('/accounts/login/')) def test_minimal_style_list_view_user(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.get('/en/atelier/minimal_style/') self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/minimal_style_list.html') def test_minimal_style_list_pagination_is_ten(self): self.client.login(username='user', password='<PASSWORD>') mommy.make('atelier.MinimalStyle', _quantity=13) # Create an instances more than 10 for pagination tests (13 instances) resp = self.client.get(reverse_lazy('atelier:minimal_style_list')) self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertTrue(len(resp.context['object_list']) == 10) def test_minimal_style_list_all_elements(self): # get second page and confirm it has (exactly) remaining 3 items self.client.login(username='user', password='<PASSWORD>') mommy.make('atelier.MinimalStyle', _quantity=13) resp = self.client.get(reverse_lazy('atelier:minimal_style_list') + '?page=2') self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertEqual(len(resp.context['object_list']), 3) <file_sep>/atelier/migrations/0001_initial.py # Generated by Django 2.2.3 on 2019-11-25 18:40 import datetime from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='AllowanceDiscount', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_datetime', models.DateTimeField(auto_now_add=True)), ('last_updated_datetime', models.DateTimeField(auto_now=True)), ('name', models.CharField(max_length=255, verbose_name='name')), ('coefficient', models.DecimalField(decimal_places=2, max_digits=5, verbose_name='coefficient')), ('label', models.CharField(max_length=255, verbose_name='group')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ('last_updated_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['name'], }, ), migrations.CreateModel( name='Atelier', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_datetime', models.DateTimeField(auto_now_add=True)), ('last_updated_datetime', models.DateTimeField(auto_now=True)), ('name', models.CharField(max_length=150, verbose_name='name')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ('last_updated_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['name'], }, ), migrations.CreateModel( name='Client', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_datetime', models.DateTimeField(auto_now_add=True)), ('last_updated_datetime', models.DateTimeField(auto_now=True)), ('first_name', models.CharField(max_length=30, verbose_name='first Name')), ('last_name', models.CharField(max_length=30, verbose_name='second Name')), ('tel_number', models.CharField(blank=True, max_length=30, verbose_name='tel. number')), ('place', models.CharField(max_length=30, verbose_name='place')), ('atelier', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='atelier.Atelier', verbose_name='atelier')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ('last_updated_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['first_name'], }, ), migrations.CreateModel( name='ComplicationElement', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_datetime', models.DateTimeField(auto_now_add=True)), ('last_updated_datetime', models.DateTimeField(auto_now=True)), ('name', models.CharField(max_length=264, verbose_name='name')), ('base_price', models.DecimalField(decimal_places=2, max_digits=5, verbose_name='base price')), ('complexity', models.DecimalField(decimal_places=2, default=1, max_digits=3, verbose_name='complexity')), ('group', models.CharField(default='4', max_length=255, verbose_name='group name')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ('last_updated_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['group'], }, ), migrations.CreateModel( name='Fabric', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_datetime', models.DateTimeField(auto_now_add=True)), ('last_updated_datetime', models.DateTimeField(auto_now=True)), ('name', models.CharField(max_length=264, verbose_name='name')), ('group', models.CharField(choices=[('GR0', 'Group 0'), ('GR1', 'Group I'), ('GR2', 'Group II'), ('GR3', 'Group III'), ('GR4', 'Group IV')], default='GR2', max_length=3, verbose_name='group')), ('complexity_factor', models.DecimalField(decimal_places=2, default=1, max_digits=5, verbose_name='complexity factor')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ('last_updated_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['group'], }, ), migrations.CreateModel( name='MinimalStyle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_datetime', models.DateTimeField(auto_now_add=True)), ('last_updated_datetime', models.DateTimeField(auto_now=True)), ('name', models.TextField(max_length=264, verbose_name='name')), ('group', models.CharField(max_length=264, verbose_name='product group')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ('last_updated_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['group'], }, ), migrations.CreateModel( name='Tailor', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_datetime', models.DateTimeField(auto_now_add=True)), ('last_updated_datetime', models.DateTimeField(auto_now=True)), ('atelier', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='atelier.Atelier', verbose_name='atelier')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ('last_updated_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ('name', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='name')), ], options={ 'ordering': ['name'], }, ), migrations.CreateModel( name='Product', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_datetime', models.DateTimeField(auto_now_add=True)), ('last_updated_datetime', models.DateTimeField(auto_now=True)), ('name', models.CharField(max_length=264, verbose_name='name')), ('base_price', models.DecimalField(decimal_places=2, max_digits=10, verbose_name='base price')), ('atelier', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='atelier.Atelier', verbose_name='Atelier')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ('last_updated_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ('minimal_style', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='atelier.MinimalStyle', verbose_name='minimal style')), ], options={ 'ordering': ['name'], }, ), migrations.CreateModel( name='Order', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_datetime', models.DateTimeField(auto_now_add=True)), ('last_updated_datetime', models.DateTimeField(auto_now=True)), ('processing_category', models.CharField(choices=[('1', 'Processing category 1'), ('2', 'Processing category 2')], default='2', max_length=1, verbose_name='processing category')), ('order_date', models.DateField(default=datetime.date.today, verbose_name='order date')), ('deadline', models.DateField(blank=True, default=datetime.date(2019, 12, 9), null=True, verbose_name='deadline')), ('allowance_discount', models.ManyToManyField(blank=True, to='atelier.AllowanceDiscount', verbose_name='allowance/discount')), ('atelier', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='atelier.Atelier', verbose_name='atelier')), ('client', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='atelier.Client', verbose_name='client')), ('complication_elements', models.ManyToManyField(blank=True, to='atelier.ComplicationElement', verbose_name='complication elements')), ('created_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ('fabric', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='atelier.Fabric', verbose_name='fabric')), ('last_updated_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='atelier.Product', verbose_name='product')), ('tailor', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='tailor')), ], options={ 'ordering': ['order_date'], }, ), ] <file_sep>/atelier/tests/tests_forms/tests_forms.py import datetime from django.contrib.auth.models import User from django.test import TestCase from model_mommy import mommy from atelier.forms import ProfileRegisterForm, ClientForm, ProfileChangeForm, AllowanceDiscountForm, AtelierForm, \ ComplicationElementForm, FabricForm, MinimalStyleForm, OrderForm, ProductForm from atelier.models import Profile, Client class TestProfileRegisterForm(TestCase): def test_valid_form(self): data = {'username': 'user', 'is_tailor': True, 'email': '<EMAIL>', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>', } form = ProfileRegisterForm(data=data) print(form.error_messages) self.assertTrue(form.is_valid()) def test_invalid_form_user_field(self): user = mommy.make(User, username='user', email='<EMAIL>', password='<PASSWORD>') data = {'username': 'user', # A user with that username already exists. 'is_tailor': True, 'email': '<EMAIL>', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>', } form = ProfileRegisterForm(data=data) print(form.errors) self.assertFalse(form.is_valid()) self.assertEqual(form.errors, {'username': ['A user with that username already exists.']}) def test_invalid_form_email_field(self): user = mommy.make(User, username='user', email='<EMAIL>', password='<PASSWORD>') data = {'username': 'newuser', # A user with that username already exists. 'is_tailor': True, 'email': '<EMAIL>', 'password1': '<PASSWORD>', 'password2': '<PASSWORD>', } form = ProfileRegisterForm(data=data) self.assertFalse(form.is_valid()) self.assertEqual(form.errors['email'], ['Email Already Exists']) class TestProfileChangeForm(TestCase): def test_valid_form(self): user = mommy.make(User, username='user', email='<EMAIL>', password='<PASSWORD>') profile = mommy.make(Profile, user=user, is_tailor=False) form = ProfileChangeForm({ 'is_tailor': True, 'email': '<EMAIL>', }, instance=profile) self.assertTrue(form.is_valid()) class TestClientForm(TestCase): def test_valid_client_form(self): data = {'first_name': 'John', 'last_name': 'name', 'tel_number': '123456', 'place': 'place', } form = ClientForm(data=data) self.assertTrue(form.is_valid()) def test_invalid_client_form(self): client = mommy.make('atelier.Client', first_name='name', last_name='last', tel_number=123456) data = {'first_name': client.first_name, 'last_name': client.last_name, 'tel_number': client.tel_number, 'place': '', } form = ClientForm(data=data) self.assertFalse(form.is_valid()) self.assertEqual(form.errors['place'], ['This field is required.']) class TestAllowanceDiscountForm(TestCase): def test_valid_form(self): data = {'name': 'John', 'coefficient': 1, 'label': '123456'} form = AllowanceDiscountForm(data=data) self.assertTrue(form.is_valid()) def test_invalid_form(self): data = {'name': 'John', 'coefficient': 'coefficient', 'label': '123456'} form = AllowanceDiscountForm(data=data) self.assertFalse(form.is_valid()) self.assertEqual(form.errors['coefficient'], ['Enter a number.']) class TestAtelierForm(TestCase): def test_valid_form(self): data = {'name': 'MyAtelier', } form = AtelierForm(data=data) self.assertTrue(form.is_valid()) def test_invalid_form(self): data = {'name': '', } form = AtelierForm(data=data) self.assertFalse(form.is_valid()) self.assertEqual(form.errors['name'], ['This field is required.']) class TestComplicationElementForm(TestCase): def test_valid_form(self): data = { 'name': 'John', 'base_price': 1, 'complexity': 2, 'group': '2' } form = ComplicationElementForm(data=data) self.assertTrue(form.is_valid()) def test_invalid_form(self): data = { 'name': 'John', 'base_price': 'A', 'complexity': 2, 'group': '2' } form = ComplicationElementForm(data=data) self.assertFalse(form.is_valid()) self.assertEqual(form.errors['base_price'], ['Enter a number.']) class TestFabricForm(TestCase): def test_valid_form(self): data = { 'name': 'John', 'complexity_factor': 1, 'group': 'GR2' } form = FabricForm(data=data) self.assertTrue(form.is_valid()) def test_invalid_form(self): data = { 'name': 'John', 'complexity_factor': 1, 'group': '2' } form = FabricForm(data=data) self.assertFalse(form.is_valid()) self.assertEqual(form.errors['group'], ['Select a valid choice. 2 is not one of the available choices.']) class TestMinimalStyleForm(TestCase): def test_valid_form(self): data = { 'name': 'John', 'group': 'GR2' } form = MinimalStyleForm(data=data) self.assertTrue(form.is_valid()) def test_invalid_form(self): data = { 'name': 'John', 'group': '' } form = MinimalStyleForm(data=data) self.assertFalse(form.is_valid()) self.assertEqual(form.errors['group'], ['This field is required.']) class TestOrderForm(TestCase): def test_valid_form(self): client = mommy.make('atelier.Client') product = mommy.make('atelier.Product') fabric = mommy.make('atelier.Fabric') complication_elements = mommy.make('atelier.ComplicationElement') allowance_discount = mommy.make('atelier.AllowanceDiscount') performer = mommy.make('User') now = datetime.datetime.now().date() data = { 'client': client.pk, 'product': product.pk, 'fabric': fabric.pk, 'processing_category': '1', 'complication_elements': [complication_elements.pk], 'allowance_discount': [allowance_discount.pk], 'performer': performer.pk, 'order_date': now, 'deadline': now + datetime.timedelta(weeks=2), 'is_closed': 'False', } form = OrderForm(data=data) self.assertTrue(form.is_valid()) def test_invalid_form(self): client = mommy.make('atelier.Client') product = mommy.make('atelier.Product') fabric = mommy.make('atelier.Fabric') complication_elements = mommy.make('atelier.ComplicationElement') allowance_discount = mommy.make('atelier.AllowanceDiscount') performer = mommy.make('User') now = datetime.datetime.now().date() data = { 'client': '', 'product': product.pk, 'fabric': fabric.pk, 'processing_category': '1', 'complication_elements': [complication_elements.pk], 'allowance_discount': [allowance_discount.pk], 'performer': performer.pk, 'order_date': now, 'deadline': now + datetime.timedelta(weeks=2), 'is_closed': 'False', } form = OrderForm(data=data) self.assertFalse(form.is_valid()) self.assertEqual(form.errors['client'], ['This field is required.']) class TestProductForm(TestCase): def test_valid_form(self): m_s = mommy.make('atelier.MinimalStyle') data = { 'name': 'John', 'minimal_style': m_s.pk, 'base_price': 100, } form = ProductForm(data=data) self.assertTrue(form.is_valid()) def test_invalid_form(self): m_s = mommy.make('atelier.MinimalStyle') data = { 'name': 'John', 'minimal_style': m_s.pk, 'base_price': '', } form = ProductForm(data=data) self.assertFalse(form.is_valid()) self.assertEqual(form.errors['base_price'], ['This field is required.']) <file_sep>/atelier/forms/client_form.py from django import forms from atelier.models import Client class ClientForm(forms.ModelForm): class Meta: model = Client fields = ['first_name', 'last_name', 'tel_number', 'place'] <file_sep>/atelier/forms/complication_element_form.py from django import forms from atelier.models import ComplicationElement class ComplicationElementForm(forms.ModelForm): class Meta: model = ComplicationElement fields = ['name', 'base_price', 'complexity', 'group'] <file_sep>/atelier/tests/tests_views/test_complication_element_view.py import htmls from model_mommy import mommy from atelier.models import ComplicationElement from django.urls import reverse_lazy from atelier.tests.tests_views.setup_premixin import SetUpPreMixin class ComplicationElementDetailViewTests(SetUpPreMixin): def test_complication_element_detail_view_not_logged_in(self): item = mommy.make('atelier.ComplicationElement') response = self.client.get(reverse_lazy('atelier:complication_element_detail', kwargs={'pk': item.pk, })) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertEqual(response.status_code, 302) self.assertTrue(response.url.startswith('/accounts/login/')) def test_complication_element_detail_view_user(self): self.client.login(username='user', password='<PASSWORD>') kwargs = { 'id': 1, 'name': 'Name', 'complexity': 2, 'base_price': 100, 'group': 'GR1', } instance = mommy.make('atelier.ComplicationElement', **kwargs) response = self.client.get( '/en/atelier/complication_element/{}/'.format(instance.id)) # get response in way one selector = htmls.S(response.content) name = selector.one('.name').alltext_normalized complexity = selector.one('.complexity').alltext_normalized group = selector.one('.group').alltext_normalized base_price = selector.one('.base_price').alltext_normalized self.assertEqual(response.status_code, 200) self.assertEqual(complexity, '2.00') self.assertEqual(group, 'GR1') self.assertEqual(base_price, '100.00') self.assertEqual(name, 'Name') self.assertTemplateUsed(response, 'atelier/complication_element_detail.html') class ComplicationElementCreateViewTests(SetUpPreMixin): def test_complication_element_create_view_not_logged_in(self): response = self.client.post(reverse_lazy('atelier:complication_element_form')) self.assertEqual(response.status_code, 404) def test_complication_element_create_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:complication_element_form')) self.assertEqual(response.status_code, 404) def test_complication_element_create_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:complication_element_form')) # get response in way two self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/create_form.html') class ComplicationElementEditViewTests(SetUpPreMixin): def test_complication_element_edit_view_not_logged_in(self): instance = mommy.make('atelier.ComplicationElement') response = self.client.post( reverse_lazy('atelier:complication_element_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_complication_element_edit_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.ComplicationElement') response = self.client.post( reverse_lazy('atelier:complication_element_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_complication_element_edit_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') instance = mommy.make('atelier.ComplicationElement', name='Name') response = self.client.post( reverse_lazy('atelier:complication_element_update_form', kwargs={'pk': instance.id}), data={ 'name': 'SomeName', 'complexity': 2, 'base_price': 100, 'group': 'GR1', 'last_updated_by': 'user', }) self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/complication_element/{}/'.format(instance.id)) instance.refresh_from_db() self.assertEqual(ComplicationElement.objects.count(), 1) self.assertEqual(ComplicationElement.objects.get(id=instance.id), instance) self.assertEqual(ComplicationElement.objects.get(id=instance.id).name, 'SomeName') class ComplicationElementDeleteViewTests(SetUpPreMixin): def test_complication_element_delete_view_no_logged_in(self): instance = mommy.make('atelier.ComplicationElement') response = self.client.get('/en/atelier/complication_element/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_complication_element_delete_view_user(self): self.client.login(username='user', password='<PASSWORD>') instance = mommy.make('atelier.ComplicationElement') response = self.client.get('/en/atelier/complication_element/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_complication_element_delete_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.ComplicationElement') response = self.client.get('/en/atelier/complication_element/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_complication_element_delete_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') instance = mommy.make('atelier.ComplicationElement') self.assertEqual(ComplicationElement.objects.count(), 1) response = self.client.post('/en/atelier/complication_element/{}/delete/'.format(instance.id)) self.assertRedirects(response, '/en/atelier/complication_element/') self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/complication_element/') self.assertEqual(ComplicationElement.objects.count(), 0) class ComplicationElementListViewTests(SetUpPreMixin): def test_complication_element_list_view_no_logged_in(self): response = self.client.get('/en/atelier/complication_element/') self.assertEqual(response.status_code, 302) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertTrue(response.url.startswith('/accounts/login/')) def test_complication_element_list_view_user(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.get('/en/atelier/complication_element/') self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/complication_element_list.html') def test_complication_element_list_pagination_is_ten(self): self.client.login(username='user', password='<PASSWORD>') mommy.make('atelier.ComplicationElement', _quantity=13) # Create an instances more than 10 for pagination tests (13 instances) resp = self.client.get(reverse_lazy('atelier:complication_element_list')) self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertTrue(len(resp.context['object_list']) == 10) def test_complication_element_list_all_elements(self): # get second page and confirm it has (exactly) remaining 3 items self.client.login(username='user', password='<PASSWORD>') mommy.make('atelier.ComplicationElement', _quantity=13) resp = self.client.get(reverse_lazy('atelier:complication_element_list') + '?page=2') self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertEqual(len(resp.context['object_list']), 3) <file_sep>/atelier/forms/fabric_form.py from django import forms from atelier.models import Fabric class FabricForm(forms.ModelForm): class Meta: model = Fabric fields = ['name', 'group', 'complexity_factor'] <file_sep>/atelier/tests/tests_views/test_fabric_view.py import htmls from model_mommy import mommy from atelier.models import Fabric from django.urls import reverse_lazy from atelier.tests.tests_views.setup_premixin import SetUpPreMixin class FabricDetailViewTests(SetUpPreMixin): def test_fabric_detail_view_not_logged_in(self): item = mommy.make('atelier.Fabric') response = self.client.get(reverse_lazy('atelier:fabric_detail', kwargs={'pk': item.pk, })) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertEqual(response.status_code, 302) self.assertTrue(response.url.startswith('/accounts/login/')) def test_fabric_detail_view_user(self): self.client.login(username='user', password='<PASSWORD>') kwargs = { 'id': 1, 'name': 'Name', 'complexity_factor': 2, 'group': 'GR1', # choice field } instance = mommy.make('atelier.Fabric', **kwargs) response = self.client.get('/en/atelier/fabric/{}/'.format(instance.id)) # get response in way one selector = htmls.S(response.content) name = selector.one('.name').alltext_normalized factor = selector.one('.factor').alltext_normalized group = selector.one('.group').alltext_normalized self.assertEqual(response.status_code, 200) self.assertEqual(factor, '2.00') self.assertEqual(group, 'GR1') self.assertEqual(name, 'Name') self.assertTemplateUsed(response, 'atelier/fabric_detail.html') class FabricCreateViewTests(SetUpPreMixin): def test_fabric_create_view_not_logged_in(self): response = self.client.post(reverse_lazy('atelier:fabric_form')) self.assertEqual(response.status_code, 404) def test_fabric_create_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:fabric_form')) self.assertEqual(response.status_code, 404) def test_fabric_create_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:fabric_form')) # get response in way two self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/create_form.html') class FabricEditViewTests(SetUpPreMixin): def test_fabric_edit_view_not_logged_in(self): instance = mommy.make('atelier.Fabric') response = self.client.post(reverse_lazy('atelier:fabric_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_fabric_edit_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.Fabric') response = self.client.post(reverse_lazy('atelier:fabric_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_fabric_edit_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') instance = mommy.make('atelier.Fabric', name='Name', complexity_factor=1, group='GR0') response = self.client.post( reverse_lazy('atelier:fabric_update_form', kwargs={'pk': instance.id}), data={ 'name': 'OtherName', 'complexity_factor': 2, 'group': 'GR1', # choice field }) self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/fabric/{}/'.format(instance.id)) instance.refresh_from_db() self.assertEqual(Fabric.objects.count(), 1) self.assertEqual(Fabric.objects.get(id=instance.id), instance) self.assertEqual(Fabric.objects.get(id=instance.id).name, 'OtherName') self.assertEqual(Fabric.objects.get(id=instance.id).complexity_factor, 2.00) self.assertEqual(Fabric.objects.get(id=instance.id).group, 'GR1') self.assertEqual(Fabric.objects.get(id=instance.id).last_updated_by, self.superuser) class FabricDeleteViewTests(SetUpPreMixin): def test_fabric_delete_view_no_logged_in(self): instance = mommy.make('atelier.Fabric') response = self.client.get('/en/atelier/fabric/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_fabric_delete_view_user(self): self.client.login(username='user', password='<PASSWORD>') instance = mommy.make('atelier.Fabric') response = self.client.get('/en/atelier/fabric/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_fabric_delete_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.Fabric') response = self.client.get('/en/atelier/fabric/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_fabric_delete_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') instance = mommy.make('atelier.Fabric') self.assertEqual(Fabric.objects.count(), 1) response = self.client.post('/en/atelier/fabric/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/fabric/') self.assertEqual(Fabric.objects.count(), 0) class FabricListViewTests(SetUpPreMixin): def test_fabric_list_view_no_logged_in(self): response = self.client.get('/en/atelier/fabric/') self.assertEqual(response.status_code, 302) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertTrue(response.url.startswith('/accounts/login/')) def test_fabric_list_view_user(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.get('/en/atelier/fabric/') self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/fabric_list.html') def test_fabric_list_pagination_is_ten(self): self.client.login(username='user', password='<PASSWORD>') mommy.make('atelier.Fabric', _quantity=13) # Create an instances more than 10 for pagination tests (13 instances) resp = self.client.get(reverse_lazy('atelier:fabric_list')) self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertTrue(len(resp.context['object_list']) == 10) def test_fabric_list_all_elements(self): # get second page and confirm it has (exactly) remaining 3 items self.client.login(username='user', password='<PASSWORD>') mommy.make('atelier.Fabric', _quantity=13) resp = self.client.get(reverse_lazy('atelier:fabric_list') + '?page=2') self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertEqual(len(resp.context['object_list']), 3) <file_sep>/atelier/models/fabric.py from django.db import models from django.urls import reverse from django.utils.translation import gettext_lazy as _ from atelier.models import AbstractBaseModel class Fabric(AbstractBaseModel): GROUP0 = 'GR0' GROUP1 = 'GR1' GROUP2 = 'GR2' GROUP3 = 'GR3' GROUP4 = 'GR4' FABRIC_GROUPS = [ (GROUP0, _('Group 0')), (GROUP1, _('Group I')), (GROUP2, _('Group II')), (GROUP3, _('Group III')), (GROUP4, _('Group IV')), ] name = models.CharField( max_length=264, verbose_name=_('name') ) group = models.CharField( max_length=3, choices=FABRIC_GROUPS, default=GROUP2, verbose_name=_('group') ) complexity_factor = models.DecimalField( default=1, max_digits=5, decimal_places=2, verbose_name=_('complexity factor') ) def __str__(self): """ to display an object in the Django admin site and as the value inserted into a template when it displays an object """ return self.name class Meta: ordering = ['group'] def get_absolute_url(self): """ Returns the url to access a particular client instance. """ return reverse('atelier:fabric_detail', args=[str(self.id)]) <file_sep>/atelier/views/profile_view.py from django.contrib.auth.models import User from django.http import HttpResponseRedirect from django.views.generic import FormView from atelier.models import Profile from django.views import generic from atelier.forms import ProfileRegisterForm, ProfileChangeForm from django.urls import reverse_lazy from atelier.views.base_view import AtelierFilterObjectsPreMixin, BaseListView, TailorPermissionPreMixin, \ BaseDetailView, BaseDeleteView, BaseUpdateView class ProfileDetailView(TailorPermissionPreMixin, AtelierFilterObjectsPreMixin, BaseDetailView): model = Profile fields = '__all__' class ProfileListView(AtelierFilterObjectsPreMixin, TailorPermissionPreMixin, BaseListView): model = Profile class ProfileCreateView(TailorPermissionPreMixin, FormView): template_name = 'atelier/create_form.html' form_class = ProfileRegisterForm def get_initial(self): """ Returns the initial data to use for atelier form field. """ initial = super().get_initial() initial['atelier'] = self.request.user.profile.atelier return initial def get_success_url(self): return reverse_lazy('atelier:profile_list') def form_valid(self, form): # The default implementation for form_valid() simply redirects to the success_url. user = User.objects.create( email=form.cleaned_data['email'], username=form.cleaned_data['username'], ) user.set_password(form.cleaned_data['<PASSWORD>']) user.save() Profile.objects.create( user=user, atelier=self.request.user.profile.atelier, is_tailor=form.cleaned_data['is_tailor'], created_by=self.request.user, last_updated_by=self.request.user, ) return super().form_valid(form) class ProfileChangeView(AtelierFilterObjectsPreMixin, TailorPermissionPreMixin, BaseUpdateView): model = Profile template_name = 'atelier/create_form.html' form_class = ProfileChangeForm def get_success_url(self): return reverse_lazy('atelier:profile_list') def get_profile_object(self): profile_id = self.kwargs.get('pk') return Profile.objects.get(id=profile_id) def get_initial(self): data = { 'email': self.get_profile_object().user.email, 'is_tailor': self.get_profile_object().is_tailor } return data def form_valid(self, form): # The default implementation for form_valid() simply redirects to the success_url. profile = self.get_profile_object() profile.is_tailor = form.cleaned_data['is_tailor'] profile.user.email = form.cleaned_data['email'] profile.last_updated_by = self.request.user profile.full_clean() profile.save() profile.user.save() return super().form_valid(form) class ProfileDeleteView(TailorPermissionPreMixin, AtelierFilterObjectsPreMixin, BaseDeleteView): model = Profile success_url = reverse_lazy('atelier:profile_list') template_name = 'atelier/delete_form.html' def get_user_object(self): profile_id = self.kwargs.get('pk') profile = Profile.objects.get(pk=profile_id) return profile.user def delete(self, request, *args, **kwargs): """ Overriding the delete() method to delete User instances, and according Profile instance will be deleted too. """ self.object = self.get_user_object() success_url = self.get_success_url() self.object.delete() return HttpResponseRedirect(success_url) def get_success_url(self): return reverse_lazy('atelier:profile_list') <file_sep>/atelier/views/product_view.py from atelier.models import Product from atelier.forms import ProductForm from django.urls import reverse_lazy from atelier.views.base_view import AtelierFilterObjectsPreMixin, BaseListView, TailorPermissionPreMixin, \ BaseDetailView, BaseDeleteView, BaseUpdateView, BaseCreateView class ProductDetailView(AtelierFilterObjectsPreMixin, BaseDetailView): model = Product fields = '__all__' class ProductListView(AtelierFilterObjectsPreMixin, BaseListView): model = Product class ProductCreateView(TailorPermissionPreMixin, BaseCreateView): model = Product form_class = ProductForm template_name = 'atelier/create_form.html' class ProductUpdateView(TailorPermissionPreMixin, AtelierFilterObjectsPreMixin, BaseUpdateView): model = Product form_class = ProductForm template_name = 'atelier/create_form.html' class ProductDeleteView(TailorPermissionPreMixin, AtelierFilterObjectsPreMixin, BaseDeleteView): model = Product success_url = reverse_lazy('atelier:product_list') template_name = 'atelier/delete_form.html' <file_sep>/README.rst ============= VOKSS Atelier ============= Atelier is a simple, responsive application suitable for ateliers, tailors, design studios or small sewing business . It uses the best tailor practices, and on top of that, it’s fast, simple, and easy to use. ----------- ############################################################ Getting started with the Django app and/or the documentation ############################################################ ************************ Install the requirements ************************ Install the following: #. Python #. PIP_ #. VirtualEnv_ #. virtualenvwrapper_ #. gettext for Django translations *********************** Install in a virtualenv *********************** Create a virtualenv using Python 3 (an isolated Python environment):: $ mkvirtualenv -p /usr/local/bin/python3 atelier Install the development requirements:: $ pip install -r requirements/develop.txt .. _enable-virtualenv: .. note:: Whenever you start a new shell where you need to use the virtualenv we created with ``mkvirtualenv`` above, you have to run:: $ workon atelier ******************************************* Create or recreate the development database ******************************************* Run:: $ ievv recreate_devdb ************************** Running development server ************************** Run:: $ ievv devrun You can adjust what this command actually runs in the ``IEVVTASKS_DEVRUN_RUNNABLES`` setting (in ``develop_settings.py``). **************************************** Add new data to the development database **************************************** Always recreate the database (see the section above) before you add new data to the development database. Furthermore, let the other developers know that you are doing this. This avoids conflicts in the SQL dump (which is really hard to merge correctly). To create a new dump of the development database from you local development database, use:: $ ievv dump_db_as_sql This modifies ``mimirdb/project/develop/dumps/default.sql``, which you should commit and push. ***************************************************** Creating private backups of your development database ***************************************************** See the backup and restore chapter of the django_dbdev docs (http://django-dbdev.readthedocs.io). This is especially useful when you are developing data migrations or working with a combination of a production database clone and a development database. ************* Running tests ************* To run the tests, we need to use a different settings file. We tell Django to do this using the ``DJANGOENV`` environent variable:: $ DJANGOENV=test python manage.py test ************** Build the docs ************** :ref:`Enable the virtualenv <enable-virtualenv>`, and run:: $ ievv docs -b -o ``-o`` opens the docs in your default browser. If you do not use ``-o`` the command will print the location of the index.html file. .. _PIP: https://pip.pypa.io .. _VirtualEnv: https://virtualenv.pypa.io .. _virtualenvwrapper: http://virtualenvwrapper.readthedocs.org/ <file_sep>/atelier/models/complication_element.py from django.db import models from django.urls import reverse from django.utils.translation import gettext_lazy as _ from atelier.models import AbstractBaseModel class ComplicationElement(AbstractBaseModel): name = models.CharField( max_length=264, verbose_name=_('name') ) base_price = models.DecimalField( max_digits=5, decimal_places=2, verbose_name=_('base price') ) complexity = models.DecimalField( default=1, max_digits=3, decimal_places=2, verbose_name=_('complexity') ) group = models.CharField( default='4', max_length=255, verbose_name=_("group name") ) def __str__(self): """ to display an object in the Django admin site and as the value inserted into a template when it displays an object """ return '{} {}'.format(self.group, self.name) class Meta: ordering = ['group'] def get_absolute_url(self): """ Returns the url to access a particular client instance. """ return reverse('atelier:complication_element_detail', args=[str(self.id)]) <file_sep>/atelier/tests/tests_pages.py import htmls from django.contrib.auth.models import User from django.test import TestCase from model_mommy import mommy from atelier.models import Profile from django.urls import reverse from atelier.tests.tests_views.setup_premixin import SetUpPreMixin # def _indent_string(string): # try: # return '\n'.join([' {}'.format(line) for line in string.split('\n')]) # except: # return string class LoginTestCase(SetUpPreMixin): def testLogin(self): self.client.login(username='john', password='<PASSWORD>') response = self.client.get(reverse('login')) self.assertEqual(response.status_code, 200) class PagesTest(SetUpPreMixin): # def prettyformat_response_content(self, response): # warnings = [] # output = None # if hasattr(response, 'render'): # try: # response.render() # except Exception as e: # warnings.append('[cradmin TestCaseMixin warning] response.render() failed with: {}'.format(e)) # else: # try: # output = '[cradmin TestCaseMixin info]: Prettyformatted response.content:\n{}'.format( # _indent_string(htmls.S(response.content).prettify()) # ) # except: # pass # if output is None: # try: # content = response.content.decode('utf-8') # except UnicodeError: # content = response.content # if content: # output = '[cradmin TestCaseMixin info]: response.content:\n{}'.format( # _indent_string(content)) # else: # output = '[cradmin TestCaseMixin info]: response.content is empty.' # return output, warnings def test_index_page_not_logged_in(self): response = self.client.get('/en/atelier/') self.assertEqual(response.status_code, 302) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertTrue(response.url.startswith('/accounts/login/')) def test_index_page_logged_in(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.get('/en/atelier/') self.assertEqual(response.status_code, 200) def test_client_page_not_logged_in(self): response = self.client.get('/en/atelier/client/') self.assertEqual(response.status_code, 302) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertTrue(response.url.startswith('/accounts/login/')) def test_client_page_user(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.get('/en/atelier/client/') self.assertEqual(response.status_code, 200) def test_product_page_not_logged_in(self): response = self.client.get('/en/atelier/product/') self.assertEqual(response.status_code, 302) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertTrue(response.url.startswith('/accounts/login/')) def test_product_page_user(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.get('/en/atelier/product/') self.assertEqual(response.status_code, 200) def test_order_page_not_logged_in(self): response = self.client.get('/en/atelier/order/') self.assertEqual(response.status_code, 302) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertTrue(response.url.startswith('/accounts/login/')) def test_order_page(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.get('/en/atelier/order/') self.assertEqual(response.status_code, 200) def test_allowance_discount_page_not_logged_in(self): response = self.client.get('/en/atelier/allowance_discount/') self.assertEqual(response.status_code, 302) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertTrue(response.url.startswith('/accounts/login/')) def test_allowance_discount_page(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.get('/en/atelier/allowance_discount/') self.assertEqual(response.status_code, 200) def test_complication_element_page_not_logged_in(self): response = self.client.get('/en/atelier/complication_element/') self.assertEqual(response.status_code, 302) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertTrue(response.url.startswith('/accounts/login/')) def test_complication_element_page(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.get('/en/atelier/complication_element/') self.assertEqual(response.status_code, 200) def test_fabric_page_not_logged_in(self): response = self.client.get('/en/atelier/fabric/') self.assertEqual(response.status_code, 302) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertTrue(response.url.startswith('/accounts/login/')) def test_fabric_page(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.get('/en/atelier/fabric/') self.assertEqual(response.status_code, 200) def test_minimal_style_page_not_logged_in(self): response = self.client.get('/en/atelier/minimal_style/') self.assertEqual(response.status_code, 302) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertTrue(response.url.startswith('/accounts/login/')) def test_minimal_style_page(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.get('/en/atelier/minimal_style/') self.assertEqual(response.status_code, 200) def test_profile_page_not_logged_in(self): response = self.client.get('/en/atelier/profile/') self.assertEqual(response.status_code, 404) def test_profile_page_not_tailor(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.get('/en/atelier/profile/') self.assertEqual(response.status_code, 404) def test_profile_page_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') response = self.client.get('/en/atelier/profile/') self.assertEqual(response.status_code, 200) def test_profile_page_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') response = self.client.get('/en/atelier/profile/') self.assertEqual(response.status_code, 200) def test_atelier_page_not_logged_in(self): response = self.client.get('/en/atelier/atelier/') self.assertEqual(response.status_code, 404) def test_atelier_page_user(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.get('/en/atelier/atelier/') self.assertEqual(response.status_code, 404) def test_atelier_page_not_superuser(self): self.client.login(username='tailor', password='<PASSWORD>') response = self.client.get('/en/atelier/atelier/') self.assertEqual(response.status_code, 404) def test_atelier_page_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') response = self.client.get('/en/atelier/atelier/') self.assertEqual(response.status_code, 200) def test_index_page_2(self): """ tests with using htmls module (see the docs here: https://github.com/espenak/htmls) """ self.client.login(username='user', password='<PASSWORD>') response = self.client.get('/en/atelier/') selector = htmls.S(response.content) selector.list('h2')[0].prettyprint() # print <h2>...</h2> first tag in terminal self.assertEqual(selector.one('h2').text_normalized, 'Welcome to Atelier application!') def test_index_page_3(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.get('/en/atelier/') selector = htmls.S(response.content) el = selector.one('.fa-home') print(el.alltext_normalized) self.assertEqual(len(selector.list('li')), 20) # there is simple test for practice to use htmls def test_index_page_4(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.get('/en/atelier/') selector = htmls.S(response.content) self.assertEqual(selector.one('title').alltext_normalized, "Atelier") <file_sep>/atelier/views/__init__.py from .index import * from .client_view import * from .product_view import * from .order_view import * from .minimal_style_view import * from .fabric_view import * from .complication_element_view import * from .allowance_discount_view import * from .atelier_view import * from .profile_view import * from .base_view import * <file_sep>/atelier/tests/tests_views/setup_premixin.py from django.test import TestCase from django.contrib.auth.models import User from model_mommy import mommy from atelier.models import AllowanceDiscount, Profile class SetUpPreMixin(TestCase): @classmethod def setUpTestData(cls): """ Create an instances for tests. The setUpTestData() allows the creation of initial data at the class level, once for the whole TestCase. This technique allows for faster tests as compared to using setUp() """ cls.atelier = mommy.make('Atelier') cls.user = User.objects.create_user('user', '<EMAIL>', '<PASSWORD>') cls.tailor = User.objects.create_user('tailor', '<EMAIL>', '<PASSWORD>') cls.superuser = User.objects.create_superuser('superuser', '<EMAIL>', '<PASSWORD>') cls.superuser_profile = mommy.make(Profile, user=cls.superuser, atelier=cls.atelier) cls.user_profile = mommy.make('Profile', user=cls.user, atelier=cls.atelier) cls.tailor_profile = mommy.make(Profile, user=cls.tailor, atelier=cls.atelier, is_tailor=True) <file_sep>/atelier/tests/tests_views/test_order_view.py import htmls from django.contrib.auth.models import User from model_mommy import mommy from atelier.models import Order from django.urls import reverse_lazy from atelier.tests.tests_views.setup_premixin import SetUpPreMixin import datetime class OrderDetailViewTests(SetUpPreMixin): def test_client_detail_view_not_logged_in(self): item = mommy.make('atelier.Order') response = self.client.get(reverse_lazy('atelier:order_detail', kwargs={'pk': item.pk, })) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertEqual(response.status_code, 302) self.assertTrue(response.url.startswith('/accounts/login/')) def test_order_detail_view_tailor_not_in_atelier(self): self.client.login(username='tailor', password='<PASSWORD>') item = mommy.make('atelier.Order') response = self.client.get(reverse_lazy('atelier:order_detail', kwargs={'pk': item.pk, })) self.assertEqual(response.status_code, 404) def test_order_detail_view_user(self): ''' test by the help of htmls module. (see https://github.com/espenak/htmls/) ''' self.client.login(username='user', password='<PASSWORD>') client = mommy.make('atelier.Client') product = mommy.make('atelier.Product') fabric = mommy.make('atelier.Fabric') complication_elements = mommy.make('atelier.ComplicationElement', _quantity=2) allowance_discount = mommy.make('atelier.AllowanceDiscount', _quantity=3) kwargs = { 'id': 1, 'client': client, 'product': product, 'fabric': fabric, 'processing_category': '1', 'complication_elements': complication_elements, 'allowance_discount': allowance_discount, 'order_date': datetime.date.today, 'performer': self.user, 'deadline': datetime.datetime.now() + datetime.timedelta(weeks=2), 'atelier': self.user_profile.atelier, } instance = mommy.make('atelier.Order', **kwargs) response = self.client.get('/en/atelier/order/{}/'.format(instance.id)) self.assertEqual(response.status_code, 200) formated_deadline = (datetime.datetime.now() + datetime.timedelta(weeks=2)).strftime('%b. %-d, %Y') formated_date = datetime.datetime.now().strftime('%A %d %B %Y') selector = htmls.S(response.content) s_client = selector.one('.client').alltext_normalized s_product = selector.one('.product').alltext_normalized s_fabric = selector.one('.fabric').alltext_normalized s_fabric_group = selector.one('.fabric_group').alltext_normalized s_complication_elements = selector.count('.elements') s_processing_category = selector.one('.category').alltext_normalized s_allowance_discount = selector.count('.discount') s_order_date = selector.one('.date').alltext_normalized s_performer = selector.one('.performer').alltext_normalized s_deadline = selector.one('.deadline').alltext_normalized s_closed = selector.one('.closed').alltext_normalized self.assertEqual(response.status_code, 200) self.assertEqual(s_client, client.first_name + ' ' + client.last_name) self.assertEqual(s_product, product.name) self.assertEqual(s_fabric, fabric.name) self.assertEqual(s_fabric_group, fabric.group) self.assertEqual(s_processing_category, '1') self.assertEqual(s_complication_elements, 2) self.assertEqual(s_allowance_discount, 3) self.assertEqual(s_order_date, formated_date) self.assertEqual(s_performer, self.user.username) self.assertEqual(s_closed, 'No') self.assertEqual(s_deadline, formated_deadline) self.assertTemplateUsed(response, 'atelier/order_detail.html') class OrderCreateViewTests(SetUpPreMixin): def test_order_create_view_not_logged_in(self): response = self.client.post(reverse_lazy('atelier:order_form')) self.assertEqual(response.status_code, 404) def test_order_create_view_not_tailor(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:order_form')) self.assertEqual(response.status_code, 404) def test_order_create_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:order_form')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/order_form.html') def test_order_create_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:order_form')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/order_form.html') def test_order_create_view_fields_filtering(self): # the tailor can choose instanses of his own atelier only (for ForeignKey fields) mommy.make('atelier.Client', _quantity=13) mommy.make('atelier.Product', _quantity=13) mommy.make('atelier.Profile', _quantity=13) user = User.objects.create_user('user1', '<EMAIL>', '<PASSWORD>') profile = mommy.make('atelier.Profile', user=user, is_tailor=True) mommy.make('atelier.Client', atelier=profile.atelier, _quantity=2) mommy.make('atelier.Product', atelier=profile.atelier, _quantity=4) mommy.make('atelier.Profile', atelier=profile.atelier, _quantity=6) self.client.login(username='user1', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:order_form')) self.assertEqual(response.context_data['form'].fields['client'].queryset.count(), 2) self.assertEqual(response.context_data['form'].fields['product'].queryset.count(), 4) self.assertEqual(response.context_data['form'].fields['performer'].queryset.count(), 7) # +1 user yourself class OrderEditViewTests(SetUpPreMixin): def test_order_edit_view_not_logged_in(self): instance = mommy.make('atelier.Order') response = self.client.post(reverse_lazy('atelier:order_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_order_edit_view_not_tailor(self): self.client.login(username='user', password='<PASSWORD>') instance = mommy.make('atelier.Order', atelier=self.user.profile.atelier) response = self.client.post(reverse_lazy('atelier:order_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_order_edit_view_tailor_not_in_atelier(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.Order') response = self.client.post(reverse_lazy('atelier:order_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_order_edit_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') order = mommy.make('atelier.Order', atelier=self.tailor.profile.atelier) my_client = mommy.make('atelier.Client') product = mommy.make('atelier.Product') fabric = mommy.make('atelier.Fabric') complication_elements = mommy.make('atelier.ComplicationElement', _quantity=2) c_e = [] for i in complication_elements: c_e.append(i.id) allowance_discount = mommy.make('atelier.AllowanceDiscount', _quantity=3) a_d = [] for j in allowance_discount: a_d.append(j.id) now = datetime.datetime.now().date() response = self.client.post( reverse_lazy('atelier:order_update_form', kwargs={'pk': order.id}), data={ 'client': my_client.id, 'product': product.id, 'fabric': fabric.id, 'processing_category': 1, 'complication_elements': c_e, 'allowance_discount': a_d, 'order_date': now, 'performer': self.user.id, 'deadline': now + datetime.timedelta(weeks=2), 'is_closed': False, }) self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/order/{}/'.format(order.id)) order.refresh_from_db() self.assertEqual(order.client, my_client) self.assertEqual(order.product, product) self.assertEqual(order.fabric, fabric) self.assertEqual(order.processing_category, '1') self.assertTrue(order.complication_elements) self.assertTrue(order.allowance_discount) self.assertEqual(order.order_date, now) self.assertEqual(order.performer, self.user) self.assertEqual(order.deadline, now + datetime.timedelta(weeks=2)) self.assertEqual(order.is_closed, False) self.assertEqual(Order.objects.get(id=order.id).last_updated_by, self.tailor) def test_order_edit_view_fields_filtering(self): # the tailor can choose instanses of his own atelier only (for ForeignKey fields) mommy.make('atelier.Client', _quantity=3) mommy.make('atelier.Product', _quantity=3) mommy.make('atelier.Profile', _quantity=3) user1 = User.objects.create_user('user1', '<EMAIL>', '<PASSWORD>') profile1 = mommy.make('atelier.Profile', user=user1, is_tailor=True) order = mommy.make('atelier.Order', atelier=profile1.atelier) mommy.make('atelier.Client', atelier=profile1.atelier, _quantity=2) mommy.make('atelier.Product', atelier=profile1.atelier, _quantity=4) mommy.make('atelier.Profile', atelier=profile1.atelier, _quantity=6) self.client.login(username='user1', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:order_update_form', kwargs={'pk': order.pk})) self.assertEqual(response.context_data['form'].fields['client'].queryset.count(), 2) self.assertEqual(response.context_data['form'].fields['product'].queryset.count(), 4) self.assertEqual(response.context_data['form'].fields['performer'].queryset.count(), 7) # +1 user yourself class OrderDeleteViewTests(SetUpPreMixin): def test_order_delete_view_no_logged_in(self): instance = mommy.make('atelier.Order') response = self.client.get('/en/atelier/order/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_order_delete_view_not_tailor(self): self.client.login(username='user', password='<PASSWORD>') instance = mommy.make('atelier.Order', atelier=self.user.profile.atelier) response = self.client.get('/en/atelier/order/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_order_delete_view_tailor_not_in_atelier(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.Order') response = self.client.get('/en/atelier/order/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_order_delete_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.Order', atelier=self.tailor.profile.atelier) self.assertEqual(Order.objects.count(), 1) response = self.client.post('/en/atelier/order/{}/delete/'.format(instance.id)) self.assertRedirects(response, '/en/atelier/order/') self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/order/') self.assertEqual(Order.objects.count(), 0) def test_order_delete_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') instance = mommy.make('atelier.Order', atelier=self.atelier) self.assertEqual(Order.objects.count(), 1) response = self.client.post('/en/atelier/order/{}/delete/'.format(instance.id)) self.assertRedirects(response, '/en/atelier/order/') self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/order/') self.assertEqual(Order.objects.count(), 0) class OrderListViewTests(SetUpPreMixin): def test_order_list_view_no_logged_in(self): response = self.client.get('/en/atelier/order/') self.assertEqual(response.status_code, 302) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertTrue(response.url.startswith('/accounts/login/')) def test_order_list_view_user(self): ''' test: user can see orders of his atelier only ''' self.client.login(username='user', password='<PASSWORD>') mommy.make('atelier.Order', atelier=self.user.profile.atelier, _quantity=2) mommy.make('atelier.Order', _quantity=4) response = self.client.get('/en/atelier/order/') self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/order_list.html') self.assertEqual(len(response.context['object_list']), 2) def test_order_list_pagination_is_ten(self): self.client.login(username='user', password='<PASSWORD>') mommy.make('atelier.Order', atelier=self.user.profile.atelier, _quantity=13) # Create an instances more than 10 for pagination tests (13 instances) resp = self.client.get(reverse_lazy('atelier:order_list')) self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertEqual(len(resp.context['object_list']), 10) def test_order_list_all_elements(self): # get second page and confirm it has (exactly) remaining 3 items self.client.login(username='user', password='<PASSWORD>') mommy.make('atelier.Order', atelier=self.user.profile.atelier, _quantity=13) resp = self.client.get(reverse_lazy('atelier:order_list') + '?page=2') self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertEqual(len(resp.context['object_list']), 3) <file_sep>/atelier/forms/order_form.py from django import forms from django.forms import CheckboxSelectMultiple from atelier.models import Order, ComplicationElement class OrderForm(forms.ModelForm): complication_elements = forms.CheckboxSelectMultiple() class Meta: model = Order fields = ['client', 'product', 'fabric', 'processing_category', 'complication_elements', 'allowance_discount', 'performer', 'order_date', 'deadline', 'is_closed'] widgets = { 'complication_elements': CheckboxSelectMultiple(), 'allowance_discount': CheckboxSelectMultiple() } <file_sep>/atelier/tests/tests_models/tests_models.py from django.contrib.auth.models import User from django.test import TestCase from model_mommy import mommy from atelier.models import Client, AllowanceDiscount, ComplicationElement, Fabric, MinimalStyle, Product, Order, \ Atelier, Profile class ClientModelTest(TestCase): """ Class to test the model Client """ def setUp(self): """ Set up all the tests """ self.atelier = mommy.make(Atelier) self.client = mommy.make(Client, atelier=self.atelier) def test_first_name_label(self): field_label = self.client._meta.get_field('first_name').verbose_name self.assertEquals(field_label, 'first Name') def test_last_name_label(self): field_label = self.client._meta.get_field('last_name').verbose_name self.assertEquals(field_label, 'second Name') def test_place_label(self): field_label = self.client._meta.get_field('place').verbose_name self.assertEquals(field_label, 'place') def test_atelier_label(self): field_label = self.client._meta.get_field('atelier').verbose_name self.assertEquals(field_label, 'atelier') def test_tel_number_label(self): field_label = self.client._meta.get_field('tel_number').verbose_name self.assertEquals(field_label, 'tel. number') def test_first_name_max_length(self): max_length = self.client._meta.get_field('first_name').max_length self.assertEquals(max_length, 30) def test_last_name_max_length(self): max_length = self.client._meta.get_field('last_name').max_length self.assertEquals(max_length, 30) def test_tel_number_max_length(self): max_length = self.client._meta.get_field('tel_number').max_length self.assertEquals(max_length, 30) def test_place_max_length(self): max_length = self.client._meta.get_field('place').max_length self.assertEquals(max_length, 30) def test_object_name_is_first_name_last_name(self): expected_object_name = '%s %s' % (self.client.first_name, self.client.last_name) self.assertEquals(expected_object_name, str(self.client)) def test_get_absolute_url(self): # This will also fail if the urlconf is not defined. id = self.client.id self.assertEquals(self.client.get_absolute_url(), '/en/atelier/client/{}/'.format(id)) class AllowanceDiscountTestModel(TestCase): """ Class to test the model AllowanceDiscount by the help of Model Mommy """ def setUp(self): """ Set up all the tests """ self.allowance_discount = mommy.make(AllowanceDiscount) def test_instance(self): self.assertTrue(isinstance(self.allowance_discount, AllowanceDiscount)) self.assertEqual(self.allowance_discount.__str__(), self.allowance_discount.name) def test_fields_verbous_name(self): field_name = self.allowance_discount._meta.get_field('name').verbose_name field_coefficient = self.allowance_discount._meta.get_field('coefficient').verbose_name field_label = self.allowance_discount._meta.get_field('label').verbose_name self.assertEquals(field_name, 'name') self.assertEquals(field_coefficient, 'coefficient') self.assertEquals(field_label, 'group') def test_field_arguments(self): max_length_name = self.allowance_discount._meta.get_field('name').max_length max_length_label = self.allowance_discount._meta.get_field('label').max_length max_digits_coefficient = self.allowance_discount._meta.get_field('coefficient').max_digits decimal_places_coefficient = self.allowance_discount._meta.get_field('coefficient').decimal_places self.assertEquals(max_length_name, 255) self.assertEquals(max_length_label, 255) self.assertEquals(max_digits_coefficient, 5) self.assertEquals(decimal_places_coefficient, 2) def test_get_absolute_url(self): # This will also fail if the urlconf is not defined. id = self.allowance_discount.id self.assertEquals(self.allowance_discount.get_absolute_url(), '/en/atelier/allowance_discount/{}/'.format(id)) class FabricTestModel(TestCase): """ Class to test the model Fabric (using model mommy Recipe) """ def setUp(self): """ Load the recipe 'fabric' from 'atelier/mommy_recipes.py' and create the instances""" self.fabric_one = mommy.make_recipe('atelier.fabric') # create recip for fabric_wool instance self.fabric_wool = mommy.make('atelier.Fabric', name='Wool', group='GR1', complexity_factor=2) def test_instance(self): """True if create instances""" self.assertTrue(isinstance(self.fabric_one, Fabric)) # True if create instance (another instance) self.assertIsInstance(self.fabric_wool, Fabric) def test_str_(self): """models _str_ checking""" self.assertEqual(self.fabric_one.__str__(), self.fabric_one.name) # another instance _str_ checking self.assertEqual(self.fabric_wool.__str__(), self.fabric_wool.name) def test_fields_verbous_name(self): field_name = self.fabric_one._meta.get_field('name').verbose_name field_group = self.fabric_one._meta.get_field('group').verbose_name field_complexity_factor = self.fabric_one._meta.get_field('complexity_factor').verbose_name self.assertEquals(field_name, 'name') self.assertEquals(field_group, 'group') self.assertEquals(field_complexity_factor, 'complexity factor') def test_field_arguments(self): max_length_name = self.fabric_one._meta.get_field('name').max_length max_length_group = self.fabric_one._meta.get_field('group').max_length max_digits_complexity_factor = self.fabric_one._meta.get_field('complexity_factor').max_digits decimal_places_complexity_factor = self.fabric_one._meta.get_field('complexity_factor').decimal_places default_complexity_factor = self.fabric_one._meta.get_field('complexity_factor').default self.assertEquals(max_length_name, 264) self.assertEquals(max_length_group, 3) self.assertEquals(max_digits_complexity_factor, 5) self.assertEquals(decimal_places_complexity_factor, 2) self.assertEquals(default_complexity_factor, 1) def test_get_absolute_url(self): # This will also fail if the urlconf is not defined. id = self.fabric_one.id self.assertEquals(self.fabric_one.get_absolute_url(), '/en/atelier/fabric/{}/'.format(id)) class ComplicationElementTestModel(TestCase): """ Class to test the model ComplicationElement """ def setUp(self): """ Set up all the tests """ self.complication_element = mommy.make(ComplicationElement) def test_instance(self): self.assertTrue(isinstance(self.complication_element, ComplicationElement)) def test_str_(self): """models _str_ checking""" self.assertEqual(self.complication_element.__str__(), '{} {}'.format(self.complication_element.group, self.complication_element.name)) def test_fields_verbouse_name(self): field_name = self.complication_element._meta.get_field('name').verbose_name field_group = self.complication_element._meta.get_field('group').verbose_name field_base_price = self.complication_element._meta.get_field('base_price').verbose_name field_complexity = self.complication_element._meta.get_field('complexity').verbose_name self.assertEquals(field_name, 'name') self.assertEquals(field_group, 'group name') self.assertEquals(field_complexity, 'complexity') self.assertEquals(field_base_price, 'base price') def test_field_arguments(self): max_length_name = self.complication_element._meta.get_field('name').max_length max_length_group = self.complication_element._meta.get_field('group').max_length default_group = self.complication_element._meta.get_field('group').default max_digits_complexity = self.complication_element._meta.get_field('complexity').max_digits default_complexity = self.complication_element._meta.get_field('complexity').default decimal_places_complexity = self.complication_element._meta.get_field('complexity').decimal_places max_digits_base_price = self.complication_element._meta.get_field('base_price').max_digits decimal_places_base_price = self.complication_element._meta.get_field('base_price').decimal_places self.assertEquals(max_length_name, 264) self.assertEquals(max_length_group, 255) self.assertEquals(default_group, '4') self.assertEquals(max_digits_complexity, 3) self.assertEquals(default_complexity, 1) self.assertEquals(decimal_places_complexity, 2) self.assertEquals(max_digits_base_price, 5) self.assertEquals(decimal_places_base_price, 2) def test_get_absolute_url(self): # This will also fail if the urlconf is not defined. id = self.complication_element.id self.assertEquals(self.complication_element.get_absolute_url(), '/en/atelier/complication_element/{}/'.format(id)) class MinimalStyleTestModel(TestCase): """ Class to test the model MinimalStyle """ def setUp(self): """ Set up all the tests """ self.minimal_style = mommy.make(MinimalStyle) def test_instance(self): self.assertTrue(isinstance(self.minimal_style, MinimalStyle)) def test_str_(self): """models _str_ checking""" self.assertEqual(self.minimal_style.__str__(), self.minimal_style.name) def test_fields_verbouse_name(self): field_name = self.minimal_style._meta.get_field('name').verbose_name field_group = self.minimal_style._meta.get_field('group').verbose_name self.assertEquals(field_name, 'name') self.assertEquals(field_group, 'product group') def test_field_arguments(self): max_length_name = self.minimal_style._meta.get_field('name').max_length max_length_group = self.minimal_style._meta.get_field('group').max_length self.assertEquals(max_length_name, 264) self.assertEquals(max_length_group, 264) def test_get_absolute_url(self): # This will also fail if the urlconf is not defined. id = self.minimal_style.id self.assertEquals(self.minimal_style.get_absolute_url(), '/en/atelier/minimal_style/{}/'.format(id)) class ProductTestModel(TestCase): """ Class to test the model Product """ def setUp(self): """ Set up all the tests """ self.minimal_style = mommy.make('atelier.MinimalStyle') self.atelier = mommy.make(('atelier.Atelier')) self.product = mommy.make(Product, minimal_style=self.minimal_style, atelier=self.atelier) def test_instance(self): self.assertTrue(isinstance(self.product, Product)) def test_relative_foreign_key(self): """ if Product model foreign key related with MinimalStyle model :return:true """ self.assertEquals(self.product.minimal_style, self.minimal_style) self.assertEquals(self.product.atelier, self.atelier) def test_str_(self): """models _str_ checking""" self.assertEqual(self.product.__str__(), self.product.name) def test_fields_verbose_name(self): field_name = self.product._meta.get_field('name').verbose_name field_minimal_style = self.product._meta.get_field('minimal_style').verbose_name field_base_price = self.product._meta.get_field('base_price').verbose_name field_atelier = self.product._meta.get_field('atelier').verbose_name self.assertEquals(field_name, 'name') self.assertEquals(field_minimal_style, 'minimal style') self.assertEquals(field_base_price, 'base price') self.assertEquals(field_atelier, 'Atelier') def test_field_arguments(self): max_length_name = self.product._meta.get_field('name').max_length max_digits_base_price = self.product._meta.get_field('base_price').max_digits decimal_places_base_price = self.product._meta.get_field('base_price').decimal_places self.assertEquals(max_length_name, 264) self.assertEquals(max_digits_base_price, 10) self.assertEquals(decimal_places_base_price, 2) def test_get_absolute_url(self): # This will also fail if the urlconf is not defined. id = self.product.id self.assertEquals(self.product.get_absolute_url(), '/en/atelier/product/{}/'.format(id)) class OrderTestModel(TestCase): """ Class to test the model Order """ def setUp(self): """ Set up all the tests """ complication_element1 = mommy.make('atelier.ComplicationElement', base_price=10, complexity=2, name='Element1') complication_element2 = mommy.make('atelier.ComplicationElement', base_price=5, complexity=1, name='Element2') client = mommy.make('atelier.Client') product = mommy.make('atelier.Product', base_price=100) fabric = mommy.make('atelier.Fabric', complexity_factor=2) allowance_discount_1 = mommy.make('atelier.AllowanceDiscount', coefficient=1) allowance_discount_2 = mommy.make('atelier.AllowanceDiscount', coefficient=2) atelier = mommy.make('atelier.Atelier') user = mommy.make('User') self.order = mommy.make('atelier.Order', complication_elements=[complication_element1, complication_element2], atelier=atelier, client=client, product=product, fabric=fabric, allowance_discount=[allowance_discount_1, allowance_discount_2], performer=user) def test_instance(self): self.assertTrue(isinstance(self.order, Order)) def test_str_(self): """models _str_ checking""" self.assertEqual(self.order.__str__(), ('{} {}'.format(self.order.client, self.order.order_date))) def test_get_absolute_url(self): # This will also fail if the urlconf is not defined. id = self.order.id self.assertEquals(self.order.get_absolute_url(), '/en/atelier/order/{}/'.format(id)) def test_fields_verbose_name(self): field_processing_category = self.order._meta.get_field('processing_category').verbose_name field_order_date = self.order._meta.get_field('order_date').verbose_name field_deadline = self.order._meta.get_field('deadline').verbose_name field_is_closed = self.order._meta.get_field('is_closed').verbose_name field_client = self.order._meta.get_field('client').verbose_name field_product = self.order._meta.get_field('product').verbose_name field_fabric = self.order._meta.get_field('fabric').verbose_name field_allowance_discount = self.order._meta.get_field('allowance_discount').verbose_name field_performer = self.order._meta.get_field('performer').verbose_name field_atelier = self.order._meta.get_field('atelier').verbose_name field_complication_elements = self.order._meta.get_field('complication_elements').verbose_name self.assertEquals(field_processing_category, 'processing category') self.assertEquals(field_order_date, 'order date') self.assertEquals(field_deadline, 'deadline') self.assertEquals(field_is_closed, 'closed') self.assertEquals(field_client, 'client') self.assertEquals(field_product, 'product') self.assertEquals(field_complication_elements, 'complication elements') self.assertEquals(field_fabric, 'fabric') self.assertEquals(field_allowance_discount, 'allowance/discount') self.assertEquals(field_performer, 'performer') self.assertEquals(field_atelier, 'atelier') def test_order_price(self): self.order.processing_category = 3 self.assertEqual(self.order.order_price, '1000.00') class AtelierTestModel(TestCase): """ Class to test the model Atelier """ def setUp(self): """ Set up all the tests """ self.atelier = mommy.make('atelier.Atelier') def test_instance(self): self.assertTrue(isinstance(self.atelier, Atelier)) def test_str_(self): """models _str_ checking""" self.assertEqual(self.atelier.__str__(), self.atelier.name) def test_get_absolute_url(self): # This will also fail if the urlconf is not defined. id = self.atelier.id self.assertEquals(self.atelier.get_absolute_url(), '/en/atelier/atelier/{}/'.format(id)) def test_fields_verbose_name(self): field_name = self.atelier._meta.get_field('name').verbose_name self.assertEquals(field_name, 'name') def test_field_arguments(self): max_length_name = self.atelier._meta.get_field('name').max_length self.assertEquals(max_length_name, 150) class ProfileTestModel(TestCase): """ Class to test the model Profile """ def setUp(self): """ Set up all the tests """ user = mommy.make('User') self.profile = mommy.make('atelier.Profile', user=user) def test_instance(self): self.assertTrue(isinstance(self.profile, Profile)) def test_str_(self): """models _str_ checking""" self.assertEqual(self.profile.__str__(), self.profile.user.username) def test_get_absolute_url(self): # This will also fail if the urlconf is not defined. id = self.profile.id self.assertEquals(self.profile.get_absolute_url(), '/en/atelier/profile/{}/'.format(id)) def test_fields_verbose_name(self): field_user = self.profile._meta.get_field('user').verbose_name field_atelier = self.profile._meta.get_field('atelier').verbose_name field_is_tailor = self.profile._meta.get_field('is_tailor').verbose_name self.assertEquals(field_user, 'user') self.assertEquals(field_atelier, 'atelier') self.assertEquals(field_is_tailor, 'tailor') def test_field_arguments(self): help_text_is_tailor = self.profile._meta.get_field('is_tailor').help_text self.assertEquals(help_text_is_tailor, 'User can be a tailor to have administrator access within his atelier') <file_sep>/atelier/admin.py from django.contrib import admin from .models import Product, Order, Client, AllowanceDiscount, ComplicationElement, Fabric, MinimalStyle, Atelier, Profile admin.site.register(Atelier) admin.site.register(Profile) admin.site.register(Fabric) admin.site.register(AllowanceDiscount) admin.site.register(ComplicationElement) admin.site.register(Product) admin.site.register(MinimalStyle) # Define the admin class class OrderInline(admin.TabularInline): # addition admin.class to show orders for select client model = Order extra = 0 fields = ('product', 'order_date') class ClientAdmin(admin.ModelAdmin): list_display = ('first_name', 'last_name', 'tel_number', 'place', 'atelier', 'created_datetime', 'last_updated_datetime', 'created_by', 'last_updated_by') inlines = [OrderInline] # Register the admin class with the associated model admin.site.register(Client, ClientAdmin) # Register the Admin classes for Order using the decorator @admin.register(Order) class OrderAdmin(admin.ModelAdmin): # to display all fields for orders list_display = ('client', 'product', 'atelier', 'fabric', 'processing_category', 'performer', 'display_allowance_discount', 'display_complication_elements', 'order_date', 'deadline', 'is_closed') # add the filters list_filter = ('atelier', 'client', 'product', 'fabric', 'order_date', 'performer', 'is_closed') # division of fields into groups fieldsets = ( (None, { 'fields': ('atelier', 'client', 'product', 'fabric', 'order_date', 'performer', 'is_closed', 'created_by', 'last_updated_by',) }), ('Addition', { 'fields': ('deadline', 'complication_elements', 'allowance_discount') }), ) <file_sep>/atelier/migrations/0002_auto_20200130_1709.py # Generated by Django 2.2.9 on 2020-01-30 17:09 import datetime from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('atelier', '0001_initial'), ] operations = [ migrations.CreateModel( name='Profile', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_datetime', models.DateTimeField(auto_now_add=True, null=True)), ('last_updated_datetime', models.DateTimeField(auto_now=True, null=True)), ('is_tailor', models.BooleanField(blank=True, default=False, help_text='User can be a tailor to have administrator access within his atelier', verbose_name='tailor')), ], options={ 'ordering': ['user'], }, ), migrations.RemoveField( model_name='order', name='tailor', ), migrations.AddField( model_name='order', name='is_closed', field=models.BooleanField(blank=True, default=False, verbose_name='closed'), ), migrations.AddField( model_name='order', name='performer', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='performer'), ), migrations.AlterField( model_name='allowancediscount', name='created_datetime', field=models.DateTimeField(auto_now_add=True, null=True), ), migrations.AlterField( model_name='allowancediscount', name='last_updated_datetime', field=models.DateTimeField(auto_now=True, null=True), ), migrations.AlterField( model_name='atelier', name='created_datetime', field=models.DateTimeField(auto_now_add=True, null=True), ), migrations.AlterField( model_name='atelier', name='last_updated_datetime', field=models.DateTimeField(auto_now=True, null=True), ), migrations.AlterField( model_name='client', name='created_datetime', field=models.DateTimeField(auto_now_add=True, null=True), ), migrations.AlterField( model_name='client', name='last_updated_datetime', field=models.DateTimeField(auto_now=True, null=True), ), migrations.AlterField( model_name='complicationelement', name='created_datetime', field=models.DateTimeField(auto_now_add=True, null=True), ), migrations.AlterField( model_name='complicationelement', name='last_updated_datetime', field=models.DateTimeField(auto_now=True, null=True), ), migrations.AlterField( model_name='fabric', name='created_datetime', field=models.DateTimeField(auto_now_add=True, null=True), ), migrations.AlterField( model_name='fabric', name='last_updated_datetime', field=models.DateTimeField(auto_now=True, null=True), ), migrations.AlterField( model_name='minimalstyle', name='created_datetime', field=models.DateTimeField(auto_now_add=True, null=True), ), migrations.AlterField( model_name='minimalstyle', name='last_updated_datetime', field=models.DateTimeField(auto_now=True, null=True), ), migrations.AlterField( model_name='order', name='created_datetime', field=models.DateTimeField(auto_now_add=True, null=True), ), migrations.AlterField( model_name='order', name='deadline', field=models.DateField(blank=True, default=datetime.date(2020, 2, 13), null=True, verbose_name='deadline'), ), migrations.AlterField( model_name='order', name='last_updated_datetime', field=models.DateTimeField(auto_now=True, null=True), ), migrations.AlterField( model_name='product', name='created_datetime', field=models.DateTimeField(auto_now_add=True, null=True), ), migrations.AlterField( model_name='product', name='last_updated_datetime', field=models.DateTimeField(auto_now=True, null=True), ), migrations.DeleteModel( name='Tailor', ), migrations.AddField( model_name='profile', name='atelier', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='atelier.Atelier', verbose_name='atelier'), ), migrations.AddField( model_name='profile', name='created_by', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL), ), migrations.AddField( model_name='profile', name='last_updated_by', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL), ), migrations.AddField( model_name='profile', name='user', field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, verbose_name='user'), ), ] <file_sep>/atelier/tests/tests_views/test_client_view.py import htmls from model_mommy import mommy from atelier.models import Client from django.urls import reverse_lazy from atelier.tests.tests_views.setup_premixin import SetUpPreMixin class ClientDetailViewTests(SetUpPreMixin): def test_client_detail_view_not_logged_in(self): item = mommy.make('atelier.Client') response = self.client.get(reverse_lazy('atelier:client_detail', kwargs={'pk': item.pk, })) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertEqual(response.status_code, 302) self.assertTrue(response.url.startswith('/accounts/login/')) def test_client_detail_view_tailor_not_in_atelier(self): self.client.login(username='tailor', password='<PASSWORD>') item = mommy.make('atelier.Client') response = self.client.get(reverse_lazy('atelier:client_detail', kwargs={'pk': item.pk, })) self.assertEqual(response.status_code, 404) def test_client_detail_view_user(self): ''' test by the help of htmls module. (see https://github.com/espenak/htmls/) ''' self.client.login(username='user', password='<PASSWORD>') kwargs = { 'id': 1, 'first_name': 'Ivan', 'last_name': 'Sakh', 'tel_number': 123456, 'place': 'Kyiv', 'atelier': self.user_profile.atelier, } instance = mommy.make('atelier.Client', **kwargs) response = self.client.get('/en/atelier/client/{}/'.format(instance.id)) selector = htmls.S(response.content) first = selector.one('.first').alltext_normalized last = selector.one('.last').alltext_normalized tel = selector.one('.tel').alltext_normalized pl = selector.one('.pl').alltext_normalized self.assertEqual(response.status_code, 200) self.assertEqual(first, 'Ivan') self.assertEqual(last, 'Sakh') self.assertEqual(tel, '123456') self.assertEqual(pl, 'Kyiv') self.assertTemplateUsed(response, 'atelier/client_detail.html') class ClientCreateViewTests(SetUpPreMixin): def test_client_create_view_not_logged_in(self): response = self.client.post(reverse_lazy('atelier:client_form')) self.assertEqual(response.status_code, 404) def test_client_create_view_not_tailor(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:client_form')) self.assertEqual(response.status_code, 404) def test_client_create_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:client_form')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/create_form.html') def test_client_create_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:client_form')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/create_form.html') class ClientEditViewTests(SetUpPreMixin): def test_client_edit_view_not_logged_in(self): instance = mommy.make('atelier.Client') response = self.client.post(reverse_lazy('atelier:client_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_client_edit_view_not_tailor(self): self.client.login(username='user', password='supassword') instance = mommy.make('atelier.Client', atelier=self.user.profile.atelier) response = self.client.post(reverse_lazy('atelier:client_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_client_edit_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.Client', atelier=self.tailor.profile.atelier) response = self.client.post( reverse_lazy('atelier:client_update_form', kwargs={'pk': instance.id}), data={ 'first_name': 'Sashko', 'last_name': 'Fritz', 'tel_number': '123456', 'place': 'Morshyn', }) self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/client/{}/'.format(instance.id)) instance.refresh_from_db() self.assertEqual(instance.first_name, 'Sashko') self.assertEqual(instance.tel_number, '123456') self.assertEqual(instance.last_name, 'Fritz') self.assertEqual(instance.place, 'Morshyn') self.assertEqual(Client.objects.get(id=instance.id).last_updated_by, self.tailor) def test_client_edit_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') instance = mommy.make('atelier.Client', atelier=self.atelier) response = self.client.post( reverse_lazy('atelier:client_update_form', kwargs={'pk': instance.id}), data={ 'first_name': 'Ivan', 'last_name': 'Sakh', 'tel_number': 123456, 'place': 'Kyiv', }) print(response['Location']) self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/client/{}/'.format(instance.id)) instance.refresh_from_db() self.assertEqual(instance.first_name, 'Ivan') self.assertEqual(instance.last_name, 'Sakh') self.assertEqual(instance.tel_number, '123456') self.assertEqual(instance.place, 'Kyiv') self.assertEqual(instance.last_updated_by, self.superuser) class ClientDeleteViewTests(SetUpPreMixin): def test_client_delete_view_no_logged_in(self): instance = mommy.make('atelier.Client') response = self.client.get('/en/atelier/client/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_client_delete_view_not_tailor(self): self.client.login(username='user', password='<PASSWORD>') instance = mommy.make('atelier.Client', atelier=self.user.profile.atelier) response = self.client.get('/en/atelier/client/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_client_delete_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.Client', atelier=self.tailor.profile.atelier) self.assertEqual(Client.objects.count(), 1) response = self.client.post('/en/atelier/client/{}/delete/'.format(instance.id)) self.assertRedirects(response, '/en/atelier/client/') self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/client/') self.assertEqual(Client.objects.count(), 0) def test_client_delete_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') instance = mommy.make('atelier.Client', atelier=self.atelier) self.assertEqual(Client.objects.count(), 1) response = self.client.post('/en/atelier/client/{}/delete/'.format(instance.id)) self.assertRedirects(response, '/en/atelier/client/') self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/client/') self.assertEqual(Client.objects.count(), 0) class ClientListViewTests(SetUpPreMixin): def test_client_list_view_no_logged_in(self): response = self.client.get('/en/atelier/client/') self.assertEqual(response.status_code, 302) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertTrue(response.url.startswith('/accounts/login/')) def test_client_list_view_user(self): self.client.login(username='user', password='<PASSWORD>') mommy.make('atelier.Client', atelier=self.user.profile.atelier, _quantity=4) mommy.make('atelier.Client', _quantity=6) response = self.client.get('/en/atelier/client/') self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/client_list.html') self.assertEqual(len(response.context['object_list']), 4) def test_client_list_pagination_is_ten(self): self.client.login(username='user', password='<PASSWORD>') mommy.make('atelier.Client', atelier=self.user.profile.atelier, _quantity=13) # Create an instances more than 10 for pagination tests (13 instances) resp = self.client.get(reverse_lazy('atelier:client_list')) self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertEqual(len(resp.context['object_list']), 10) def test_client_list_all_elements(self): # get second page and confirm it has (exactly) remaining 3 items self.client.login(username='user', password='<PASSWORD>') mommy.make('atelier.Client', atelier=self.user.profile.atelier, _quantity=13) resp = self.client.get(reverse_lazy('atelier:client_list') + '?page=2') self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertEqual(len(resp.context['object_list']), 3) <file_sep>/atelier/tests/tests_views/test_product_view.py import htmls from model_mommy import mommy from atelier.models import Product, MinimalStyle from django.urls import reverse_lazy from atelier.tests.tests_views.setup_premixin import SetUpPreMixin class ProductDetailViewTests(SetUpPreMixin): def test_client_detail_view_not_logged_in(self): item = mommy.make('atelier.Product') response = self.client.get(reverse_lazy('atelier:product_detail', kwargs={'pk': item.pk, })) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertEqual(response.status_code, 302) self.assertTrue(response.url.startswith('/accounts/login/')) def test_product_detail_view_user_not_in_atelier(self): self.client.login(username='user', password='<PASSWORD>') item = mommy.make('atelier.Product') response = self.client.get(reverse_lazy('atelier:product_detail', kwargs={'pk': item.pk, })) self.assertEqual(response.status_code, 404) def test_product_detail_view_tailor_not_in_atelier(self): self.client.login(username='tailor', password='<PASSWORD>') item = mommy.make('atelier.Product') response = self.client.get(reverse_lazy('atelier:product_detail', kwargs={'pk': item.pk, })) self.assertEqual(response.status_code, 404) def test_product_detail_view_user(self): ''' test by the help of htmls module. (see https://github.com/espenak/htmls/) ''' minimal_style = mommy.make(MinimalStyle) self.client.login(username='user', password='<PASSWORD>') kwargs = { 'id': 1, 'name': 'Product', 'minimal_style': minimal_style, 'base_price': 100, 'atelier': self.user_profile.atelier, } instance = mommy.make('atelier.Product', **kwargs) response = self.client.get('/en/atelier/product/{}/'.format(instance.id)) self.assertEqual(response.status_code, 200) selector = htmls.S(response.content) name = selector.one('.name').alltext_normalized minimal = selector.one('.minimal_style').alltext_normalized base_price = selector.one('.base_price').alltext_normalized self.assertEqual(response.status_code, 200) self.assertEqual(name, 'Product') self.assertEqual(minimal, minimal_style.group) self.assertEqual(base_price, '100.00 ₴') self.assertTemplateUsed(response, 'atelier/product_detail.html') class ProductCreateViewTests(SetUpPreMixin): def test_product_create_view_not_logged_in(self): response = self.client.post(reverse_lazy('atelier:product_form')) self.assertEqual(response.status_code, 404) def test_product_create_view_not_tailor(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:product_form')) self.assertEqual(response.status_code, 404) def test_product_create_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:product_form')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/create_form.html') def test_product_create_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:product_form')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/create_form.html') class ProductEditViewTests(SetUpPreMixin): def test_product_edit_view_not_logged_in(self): instance = mommy.make('atelier.Product') response = self.client.post(reverse_lazy('atelier:product_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_product_edit_view_not_tailor(self): self.client.login(username='user', password='<PASSWORD>') instance = mommy.make('atelier.Product', atelier=self.user.profile.atelier) response = self.client.post(reverse_lazy('atelier:product_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_product_edit_view_tailor_not_in_atelier(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.Product') response = self.client.post(reverse_lazy('atelier:product_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_product_edit_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') minimal_style = mommy.make(MinimalStyle) instance = mommy.make('atelier.Product', atelier=self.tailor.profile.atelier) response = self.client.post( reverse_lazy('atelier:product_update_form', kwargs={'pk': instance.id}), data={ 'name': 'Product', 'minimal_style': minimal_style.id, 'base_price': 100, }) self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/product/{}/'.format(instance.id)) instance.refresh_from_db() self.assertEqual(instance.name, 'Product') self.assertEqual(instance.minimal_style, minimal_style) self.assertEqual(instance.base_price, 100) self.assertEqual(Product.objects.get(id=instance.id).last_updated_by, self.tailor) def test_product_edit_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') minimal_style = mommy.make(MinimalStyle) instance = mommy.make('atelier.Product', atelier=self.tailor.profile.atelier) response = self.client.post( reverse_lazy('atelier:product_update_form', kwargs={'pk': instance.id}), data={ 'name': 'Product', 'minimal_style': minimal_style.id, 'base_price': 100, }) self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/product/{}/'.format(instance.id)) instance.refresh_from_db() self.assertEqual(instance.name, 'Product') self.assertEqual(instance.minimal_style, minimal_style) self.assertEqual(instance.base_price, 100) self.assertEqual(Product.objects.get(id=instance.id).last_updated_by, self.superuser) class ProductDeleteViewTests(SetUpPreMixin): def test_product_delete_view_no_logged_in(self): instance = mommy.make('atelier.Product') response = self.client.get('/en/atelier/product/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_product_delete_view_not_tailor(self): self.client.login(username='user', password='<PASSWORD>') instance = mommy.make('atelier.Product', atelier=self.user.profile.atelier) response = self.client.get('/en/atelier/product/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_product_delete_view_tailor_not_in_atelier(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.Product') response = self.client.get('/en/atelier/product/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_product_delete_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.Product', atelier=self.tailor.profile.atelier) self.assertEqual(Product.objects.count(), 1) response = self.client.post('/en/atelier/product/{}/delete/'.format(instance.id)) self.assertRedirects(response, '/en/atelier/product/') self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/product/') self.assertEqual(Product.objects.count(), 0) def test_product_delete_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') print(self.superuser.is_staff, self.superuser.is_superuser) instance = mommy.make('atelier.Product') self.assertEqual(Product.objects.count(), 1) response = self.client.post('/en/atelier/product/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/product/') self.assertRedirects(response, '/en/atelier/product/') self.assertEqual(Product.objects.count(), 0) class ProductListViewTests(SetUpPreMixin): def test_product_list_view_no_logged_in(self): response = self.client.get('/en/atelier/product/') self.assertEqual(response.status_code, 302) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertTrue(response.url.startswith('/accounts/login/')) def test_product_list_view_user(self): ''' test: user can see product list of his atelier only ''' self.client.login(username='user', password='<PASSWORD>') mommy.make('atelier.Product', atelier=self.user.profile.atelier, _quantity=3) mommy.make('atelier.Product', _quantity=7) response = self.client.get('/en/atelier/product/') self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/product_list.html') self.assertEqual(len(response.context['object_list']), 3) def test_product_list_view_superuser(self): ''' test: superuser can see all products in all ateliers ''' self.client.login(username='superuser', password='<PASSWORD>') mommy.make('atelier.Product', _quantity=7) response = self.client.get('/en/atelier/product/') self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/product_list.html') self.assertEqual(len(response.context['object_list']), 7) def test_product_list_pagination_is_ten(self): self.client.login(username='user', password='<PASSWORD>') mommy.make('atelier.Product', atelier=self.user.profile.atelier, _quantity=13) # Create an instances more than 10 for pagination tests (13 instances) resp = self.client.get(reverse_lazy('atelier:product_list')) self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertEqual(len(resp.context['object_list']), 10) def test_product_list_all_elements(self): # get second page and confirm it has (exactly) remaining 3 items self.client.login(username='user', password='<PASSWORD>') mommy.make('atelier.Product', atelier=self.user.profile.atelier, _quantity=13) resp = self.client.get(reverse_lazy('atelier:product_list') + '?page=2') self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertEqual(len(resp.context['object_list']), 3) <file_sep>/atelier/models/__init__.py from .abstract_base import AbstractBaseModel from .atelier import Atelier from .fabric import Fabric from .product import Product from .client import Client from .order import Order from .complication_element import ComplicationElement from .allowance_discount import AllowanceDiscount from .minimal_style import MinimalStyle from .profile import Profile <file_sep>/atelier/tests/tests_views/test_atelier_view.py import htmls from model_mommy import mommy from atelier.models import Atelier from django.urls import reverse_lazy from atelier.tests.tests_views.setup_premixin import SetUpPreMixin class AtelierDetailViewTests(SetUpPreMixin): def test_atelier_detail_view_not_logged_in(self): item = mommy.make('atelier.Atelier') response = self.client.get(reverse_lazy('atelier:atelier_detail', kwargs={'pk': item.pk, })) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertEqual(response.status_code, 404) def test_atelier_detail_view_user(self): self.client.login(username='user', password='<PASSWORD>') item = mommy.make('atelier.Atelier') response = self.client.get(reverse_lazy('atelier:atelier_detail', kwargs={'pk': item.pk, })) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertEqual(response.status_code, 404) def test_atelier_detail_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') item = mommy.make('atelier.Atelier') response = self.client.get(reverse_lazy('atelier:atelier_detail', kwargs={'pk': item.pk, })) # Manually check redirect (Can't use assertRedirect, because the redirect URL is unpredictable) self.assertEqual(response.status_code, 404) def test_atelier_detail_view_superuser(self): ''' test by the help of htmls module. (see https://github.com/espenak/htmls/) ''' self.client.login(username='superuser', password='<PASSWORD>') kwargs = { 'id': 1, 'name': 'super_atelier', } instance = mommy.make('atelier.Atelier', **kwargs) response = self.client.get('/en/atelier/atelier/{}/'.format(instance.id)) selector = htmls.S(response.content) name = selector.one('.name').alltext_normalized self.assertEqual(response.status_code, 200) self.assertEqual(name, 'Ateliers: super_atelier') self.assertTemplateUsed(response, 'atelier/atelier_detail.html') class AtelierCreateViewTests(SetUpPreMixin): def test_atelier_create_view_not_logged_in(self): response = self.client.post(reverse_lazy('atelier:atelier_form')) self.assertEqual(response.status_code, 404) def test_client_create_view_user(self): self.client.login(username='user', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:atelier_form')) self.assertEqual(response.status_code, 404) def test_atelier_create_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:atelier_form')) self.assertEqual(response.status_code, 404) def test_atelier_create_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') response = self.client.post(reverse_lazy('atelier:atelier_form')) self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/create_form.html') class AtelierEditViewTests(SetUpPreMixin): def test_atelier_edit_view_not_logged_in(self): instance = mommy.make('atelier.Atelier') response = self.client.post(reverse_lazy('atelier:atelier_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_atelier_edit_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.Atelier') response = self.client.post(reverse_lazy('atelier:atelier_update_form', kwargs={'pk': instance.id})) self.assertEqual(response.status_code, 404) def test_atelier_edit_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') instance = mommy.make('atelier.Atelier') response = self.client.post( reverse_lazy('atelier:atelier_update_form', kwargs={'pk': instance.id}), data={ 'name': 'Superatelier', }) self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/atelier/{}/'.format(instance.id)) instance.refresh_from_db() self.assertEqual(instance.name, 'Superatelier') class AtelierDeleteViewTests(SetUpPreMixin): def test_atelier_delete_view_no_logged_in(self): instance = mommy.make('atelier.Atelier') response = self.client.get('/en/atelier/atelier/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_atelier_delete_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') instance = mommy.make('atelier.Atelier') response = self.client.get('/en/atelier/atelier/{}/delete/'.format(instance.id)) self.assertEqual(response.status_code, 404) def test_atelier_delete_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') instance = mommy.make('atelier.Atelier') self.assertEqual(Atelier.objects.count(), 2) # + 1 instance from SetUpPreMixin response = self.client.post('/en/atelier/atelier/{}/delete/'.format(instance.id)) self.assertRedirects(response, '/en/atelier/atelier/') self.assertEqual(response.status_code, 302) self.assertRedirects(response, '/en/atelier/atelier/') self.assertEqual(Atelier.objects.count(), 1) class AtelierListViewTests(SetUpPreMixin): def test_atelier_list_view_no_logged_in(self): response = self.client.get('/en/atelier/atelier/') self.assertEqual(response.status_code, 404) def test_atelier_list_view_tailor(self): self.client.login(username='tailor', password='<PASSWORD>') response = self.client.get('/en/atelier/atelier/') self.assertEqual(response.status_code, 404) def test_atelier_list_view_superuser(self): self.client.login(username='superuser', password='<PASSWORD>') mommy.make('atelier.Atelier') response = self.client.get('/en/atelier/atelier/') self.assertEqual(response.status_code, 200) self.assertTemplateUsed(response, 'atelier/atelier_list.html') self.assertEqual(len(response.context['object_list']), 2) # + 1 instance from SetUpPreMixin def test_atelier_pagination_is_ten(self): self.client.login(username='superuser', password='<PASSWORD>') # Create an instances more than 10 for pagination tests (13 instances) mommy.make('atelier.Atelier', _quantity=13) resp = self.client.get(reverse_lazy('atelier:atelier_list')) self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertEqual(len(resp.context['object_list']), 10) def test_atelier_list_all_elements(self): # get second page and confirm it has (exactly) remaining 3 items self.client.login(username='superuser', password='<PASSWORD>') mommy.make('atelier.Atelier', _quantity=13) resp = self.client.get(reverse_lazy('atelier:atelier_list') + '?page=2') self.assertEqual(resp.status_code, 200) self.assertTrue('is_paginated' in resp.context) self.assertTrue(resp.context['is_paginated']) self.assertEqual(len(resp.context['object_list']), 4) # + 1 instance from SetUpPreMixin <file_sep>/atelier/views/minimal_style_view.py from atelier.models import MinimalStyle from atelier.forms import MinimalStyleForm from django.urls import reverse_lazy from atelier.views.base_view import BaseDetailView, BaseListView, \ SuperuserPermissionPreMixin, BaseCreateView, BaseUpdateView, BaseDeleteView class MinimalStyleDetailView(BaseDetailView): model = MinimalStyle fields = '__all__' template_name = 'atelier/minimal_style_detail.html' class MinimalStyleListView(BaseListView): model = MinimalStyle template_name = 'atelier/minimal_style_list.html' class MinimalStyleCreateView(SuperuserPermissionPreMixin, BaseCreateView): model = MinimalStyle form_class = MinimalStyleForm template_name = 'atelier/create_form.html' class MinimalStyleUpdateView(SuperuserPermissionPreMixin, BaseUpdateView): model = MinimalStyle form_class = MinimalStyleForm template_name = 'atelier/create_form.html' class MinimalStyleDeleteView(SuperuserPermissionPreMixin, BaseDeleteView): model = MinimalStyle success_url = reverse_lazy('atelier:minimal_style_list') template_name = 'atelier/delete_form.html' <file_sep>/atelier/models/order.py from django.conf import settings from django.db import models import datetime from django.urls import reverse from django.utils.translation import ugettext_lazy as _ from atelier.app_utils import order_price_calculation from django.contrib.auth import get_user_model from atelier.models import AbstractBaseModel, Atelier class Order(AbstractBaseModel): # function to set Django model date field 'deadline' of default value to future date def return_date_time(): return datetime.date.today() + datetime.timedelta(weeks=2) CATEGORY1 = '1' CATEGORY2 = '2' PROCESSING_CATEGORY = [ (CATEGORY1, 'Processing category 1'), (CATEGORY2, 'Processing category 2'), ] client = models.ForeignKey( 'atelier.Client', on_delete=models.CASCADE, verbose_name=_('client') ) product = models.ForeignKey( 'atelier.Product', on_delete=models.CASCADE, verbose_name=_('product') ) fabric = models.ForeignKey( 'atelier.Fabric', on_delete=models.CASCADE, verbose_name=_('fabric') ) processing_category = models.CharField( max_length=1, choices=PROCESSING_CATEGORY, default=CATEGORY2, verbose_name=_('processing category') ) complication_elements = models.ManyToManyField( 'atelier.ComplicationElement', blank=True, verbose_name=_('complication elements') ) allowance_discount = models.ManyToManyField( 'atelier.AllowanceDiscount', blank=True, verbose_name=_('allowance/discount') ) order_date = models.DateField( default=datetime.date.today, verbose_name=_('order date') ) performer = models.ForeignKey( get_user_model(), # will return the currently active user model on_delete=models.CASCADE, verbose_name=_('performer'), null=True, ) deadline = models.DateField( default=return_date_time(), ### migration doesn't call it, so it will be evaluated by djanog when creating an instance null=True, blank=True, verbose_name=_('deadline') ) atelier = models.ForeignKey( Atelier, on_delete=models.CASCADE, verbose_name=_('atelier'), ) is_closed = models.BooleanField( default=False, blank=True, verbose_name=_('closed') ) class Meta: ordering = ["order_date"] def __str__(self): """ to display an object in the Django admin site and as the value inserted into a template when it displays an object """ return '{} {}'.format(self.client, self.order_date) def display_allowance_discount(self): """ Creates a string for the allowance_discount. This is required to display allowance_discount in Admin. """ return ', '.join([allowance_discount.name for allowance_discount in self.allowance_discount.all()[:3]]) display_allowance_discount.short_description = _('allowance/discount') def display_complication_elements(self): """ Creates a string for the complication_elements. This is required to display complication_elements in Admin. """ return ', '.join([complication_elements.name for complication_elements in self.complication_elements.all()[:3]]) display_complication_elements.short_description = _('complication elements') def get_absolute_url(self): """ Returns the url to access a particular client instance. """ return reverse('atelier:order_detail', args=[str(self.id)]) @property def order_price(self): complication_elements_base_price_list = [] complication_elements_complexity_list = [] allowance_discount_coefficient_list = [] for i in self.complication_elements.all(): complication_elements_base_price_list.append(i.base_price) complication_elements_complexity_list.append(i.complexity) for k in self.allowance_discount.all(): allowance_discount_coefficient_list.append(k.coefficient) return order_price_calculation(self.fabric.complexity_factor, self.product.base_price, complication_elements_base_price_list, complication_elements_complexity_list, self.processing_category, allowance_discount_coefficient_list) <file_sep>/atelier/forms/profile_form.py from django import forms from django.contrib.auth.forms import UserCreationForm, UserChangeForm, ReadOnlyPasswordHashField from django.contrib.auth.models import User from django.core.exceptions import ValidationError from django.utils.translation import gettext_lazy as _ from atelier.models import Atelier class ProfileRegisterForm(UserCreationForm): email = forms.EmailField() is_tailor = forms.BooleanField(label=_('Is Tailor'), required=False) def clean_email(self): email = self.cleaned_data['email'] if User.objects.filter(email=email).exists(): raise ValidationError(_('Email Already Exists')) return email class ProfileChangeForm(UserChangeForm): email = forms.EmailField() is_tailor = forms.BooleanField(label=_('Is Tailor'), required=False) # rewrite this for add the link in help_text password = ReadOnlyPasswordHashField( label=_("Password"), help_text=_( "!Raw passwords are not stored, so there is no way to see this " "user's password, but you can change the password using " "<a href=/accounts/password_change/?next=/en/atelier/profile/{}/edit/>this form</a>." ), ) class Meta(ProfileRegisterForm.Meta): fields = ('email', 'is_tailor') <file_sep>/atelier/views/client_view.py from django.utils.translation import gettext_lazy as _ from atelier.models import Client, Order from atelier.forms import ClientForm from django.urls import reverse_lazy from atelier.views.base_view import AtelierFilterObjectsPreMixin, BaseListView, TailorPermissionPreMixin, \ BaseDetailView, BaseUpdateView, BaseCreateView, BaseDeleteView class ClientListView(AtelierFilterObjectsPreMixin, BaseListView): model = Client class ClientCreateView(TailorPermissionPreMixin, BaseCreateView): model = Client form_class = ClientForm template_name = 'atelier/create_form.html' initial = {'place': _('Morshyn'), } class ClientUpdateView(TailorPermissionPreMixin, AtelierFilterObjectsPreMixin, BaseUpdateView): model = Client form_class = ClientForm template_name = 'atelier/create_form.html' class ClientDetailView(AtelierFilterObjectsPreMixin, BaseDetailView): model = Client def get_context_data(self, **kwargs): # Call the base implementation first to get a context context = super().get_context_data(**kwargs) # Add in a QuerySet of all the orders context['order_list'] = Order.objects.all().filter(client=self.object) return context class ClientDeleteView(TailorPermissionPreMixin, AtelierFilterObjectsPreMixin, BaseDeleteView): model = Client success_url = reverse_lazy('atelier:client_list') template_name = 'atelier/delete_form.html' <file_sep>/atelier/views/base_view.py from django.contrib.auth.mixins import LoginRequiredMixin from django.http import Http404 from django.views import generic class SuperuserPermissionPreMixin(object): """ check permissions for superuser """ def dispatch(self, request, *args, **kwargs): if not self.request.user.is_superuser: raise Http404() return super().dispatch(request, *args, **kwargs) class AtelierFilterObjectsPreMixin: """ to show objects in established atelier only (for superuser all objects are showed) """ def get_queryset(self): if self.request.user.is_staff: return self.model.objects.all() # admin user access all objects else: # ordinary user has access to objects of his atelier only return self.model.objects.filter(atelier=self.request.user.profile.atelier) class TailorPermissionPreMixin: """ check permissions for tailor or superuser """ # def dispatch(self, request, *args, **kwargs): # if self.request.user.is_anonymous or not self.request.user.profile.is_tailor and not self.request.user.is_superuser: # raise Http404() # return super().dispatch(request, *args, **kwargs) def dispatch(self, request, *args, **kwargs): if self.request.user.is_anonymous: raise Http404() elif self.request.user.is_superuser or self.request.user.profile.is_tailor: return super().dispatch(request, *args, **kwargs) else: raise Http404() class BaseDetailView(LoginRequiredMixin, generic.DetailView): pass class BaseListView(LoginRequiredMixin, generic.ListView): paginate_by = 10 # number of records on the one page class BaseCreateView(LoginRequiredMixin, generic.CreateView): def form_valid(self, form): # assign atelier, created_by and last_updated_by attributes to instances atelier_object = form.save(commit=False) ## Create, but don't save the new instance. atelier = self.request.user.profile.atelier created_by = self.request.user atelier_object.created_by = created_by atelier_object.last_updated_by = created_by atelier_object.atelier = atelier atelier_object.save() return super().form_valid(form) class BaseUpdateView(LoginRequiredMixin, generic.UpdateView): def form_valid(self, form): # assign last_updated_by attribute to instance last_updated_by = self.request.user atelier_object = form.save() atelier_object.last_updated_by = last_updated_by atelier_object.save() return super().form_valid(form) class BaseDeleteView(LoginRequiredMixin, generic.DeleteView): pass <file_sep>/atelier/forms/__init__.py from .client_form import * from .order_form import * from .product_form import * from .allowance_discount_form import * from .complication_element_form import * from .fabric_form import * from .minimal_style_form import * from .atelier_form import * from .profile_form import * <file_sep>/atelier/views/allowance_discount_view.py from atelier.models import AllowanceDiscount from atelier.forms import AllowanceDiscountForm from django.urls import reverse_lazy from atelier.views.base_view import BaseDetailView, BaseListView, \ SuperuserPermissionPreMixin, BaseCreateView, BaseUpdateView, BaseDeleteView class AllowanceDiscountDetailView(BaseDetailView): model = AllowanceDiscount fields = '__all__' template_name = 'atelier/allowance_discount_detail.html' # we changed lowercase version of the model class’ name: # allowancediscount to allowance_discount. context_object_name = 'allowance_discount' class AllowanceDiscountListView(BaseListView): model = AllowanceDiscount template_name = 'atelier/allowance_discount_list.html' context_object_name = 'allowance_discount_list' class AllowanceDiscountCreateView(SuperuserPermissionPreMixin, BaseCreateView): model = AllowanceDiscount form_class = AllowanceDiscountForm template_name = 'atelier/create_form.html' class AllowanceDiscountUpdateView(SuperuserPermissionPreMixin, BaseUpdateView): model = AllowanceDiscount form_class = AllowanceDiscountForm template_name = 'atelier/create_form.html' class AllowanceDiscountDeleteView(SuperuserPermissionPreMixin, BaseDeleteView): model = AllowanceDiscount success_url = reverse_lazy('atelier:allowance_discount_list') template_name = 'atelier/delete_form.html' <file_sep>/vokss/develop/develop_urls.py from vokss.default.urls import default_urls urlpatterns = list(default_urls) urlpatterns.extend([ # url(r'^__debug__/', include(debug_toolbar.urls)), ]) <file_sep>/vokss/production/production_urls.py from vokss.default.urls import default_urls urlpatterns = [] urlpatterns.extend(default_urls) <file_sep>/vokss/production/staging_settings.py import os from .production_and_staging_settings_common import * # noqa<file_sep>/atelier/views/fabric_view.py from atelier.models import Fabric from atelier.forms import FabricForm from django.urls import reverse_lazy from atelier.views.base_view import BaseDetailView, BaseListView, \ SuperuserPermissionPreMixin, BaseCreateView, BaseUpdateView, BaseDeleteView class FabricDetailView(BaseDetailView): model = Fabric fields = '__all__' class FabricListView(BaseListView): model = Fabric class FabricCreateView(SuperuserPermissionPreMixin, BaseCreateView): model = Fabric form_class = FabricForm template_name = 'atelier/create_form.html' class FabricUpdateView(SuperuserPermissionPreMixin, BaseUpdateView): model = Fabric form_class = FabricForm template_name = 'atelier/create_form.html' class FabricDeleteView(SuperuserPermissionPreMixin, BaseDeleteView): model = Fabric success_url = reverse_lazy('atelier:fabric_list') template_name = 'atelier/delete_form.html' <file_sep>/atelier/urls.py from django.urls import path from . import views from django.contrib.auth import views as auth_views app_name = 'atelier' ''' path() argument: name¶ Naming your URL lets you refer to it unambiguously from elsewhere in Django, especially from within templates. This powerful feature allows you to make global changes to the URL patterns of your project while only touching a single file. ''' urlpatterns = [ path('', views.index, name='index'), path('client/', views.ClientListView.as_view(), name='client_list'), path('client/<int:pk>/', views.ClientDetailView.as_view(), name='client_detail'), path('client/add/', views.ClientCreateView.as_view(), name='client_form'), path('client/<int:pk>/edit/', views.ClientUpdateView.as_view(), name='client_update_form'), path('client/<int:pk>/delete/', views.ClientDeleteView.as_view(), name='client_delete_form'), path('product/', views.ProductListView.as_view(), name='product_list'), path('product/<int:pk>/', views.ProductDetailView.as_view(), name='product_detail'), path('product/add/', views.ProductCreateView.as_view(), name='product_form'), path('product/<int:pk>/edit/', views.ProductUpdateView.as_view(), name='product_update_form'), path('product/<int:pk>/delete/', views.ProductDeleteView.as_view(), name='product_delete_form'), path('order/', views.OrderListView.as_view(), name='order_list'), path('order/<int:pk>/', views.OrderDetailView.as_view(), name='order_detail'), path('order/add/', views.OrderCreateView.as_view(), name='order_form'), path('order/<int:pk>/edit/', views.OrderUpdateView.as_view(), name='order_update_form'), path('order/<int:pk>/delete/', views.OrderDeleteView.as_view(), name='order_delete_form'), path('allowance_discount/', views.AllowanceDiscountListView.as_view(), name='allowance_discount_list'), path('allowance_discount/<int:pk>/', views.AllowanceDiscountDetailView.as_view(), name='allowance_discount_detail'), path('allowance_discount/add/', views.AllowanceDiscountCreateView.as_view(), name='allowance_discount_form'), path('allowance_discount/<int:pk>/edit/', views.AllowanceDiscountUpdateView.as_view(), name='allowance_discount_update_form'), path('allowance_discount/<int:pk>/delete/', views.AllowanceDiscountDeleteView.as_view(), name='allowance_discount_delete_form'), path('complication_element/', views.ComplicationElementListView.as_view(), name='complication_element_list'), path('complication_element/<int:pk>/', views.ComplicationElementDetailView.as_view(), name='complication_element_detail'), path('complication_element/add/', views.ComplicationElementCreateView.as_view(), name='complication_element_form'), path('complication_element/<int:pk>/edit/', views.ComplicationElementUpdateView.as_view(), name='complication_element_update_form'), path('complication_element/<int:pk>/delete/', views.ComplicationElementDeleteView.as_view(), name='complication_element_delete_form'), path('fabric/', views.FabricListView.as_view(), name='fabric_list'), path('fabric/<int:pk>/', views.FabricDetailView.as_view(), name='fabric_detail'), path('fabric/add/', views.FabricCreateView.as_view(), name='fabric_form'), path('fabric/<int:pk>/edit/', views.FabricUpdateView.as_view(), name='fabric_update_form'), path('fabric/<int:pk>/delete/', views.FabricDeleteView.as_view(), name='fabric_delete_form'), path('minimal_style/', views.MinimalStyleListView.as_view(), name='minimal_style_list'), path('minimal_style/<int:pk>/', views.MinimalStyleDetailView.as_view(), name='minimal_style_detail'), path('minimal_style/add/', views.MinimalStyleCreateView.as_view(), name='minimal_style_form'), path('minimal_style/<int:pk>/edit/', views.MinimalStyleUpdateView.as_view(), name='minimal_style_update_form'), path('minimal_style/<int:pk>/delete/', views.MinimalStyleDeleteView.as_view(), name='minimal_style_delete_form'), path('atelier/', views.AtelierListView.as_view(), name='atelier_list'), path('atelier/<int:pk>/', views.AtelierDetailView.as_view(), name='atelier_detail'), path('atelier/add/', views.AtelierCreateView.as_view(), name='atelier_form'), path('atelier/<int:pk>/edit/', views.AtelierUpdateView.as_view(), name='atelier_update_form'), path('atelier/<int:pk>/delete/', views.AtelierDeleteView.as_view(), name='atelier_delete_form'), path('profile/', views.ProfileListView.as_view(), name='profile_list'), path('profile/<int:pk>/', views.ProfileDetailView.as_view(), name='profile_detail'), path('profile/add/', views.ProfileCreateView.as_view(), name='profile_form'), path('profile/<int:pk>/edit/', views.ProfileChangeView.as_view(), name='profile_update_form'), path('profile/<int:pk>/delete/', views.ProfileDeleteView.as_view(), name='profile_delete_form'), ] <file_sep>/atelier/forms/allowance_discount_form.py from django import forms from atelier.models import AllowanceDiscount class AllowanceDiscountForm(forms.ModelForm): class Meta: model = AllowanceDiscount fields = ['name', 'coefficient', 'label'] <file_sep>/atelier/models/minimal_style.py from django.db import models from django.urls import reverse from django.utils.translation import gettext_lazy as _ from atelier.models import AbstractBaseModel class MinimalStyleQueryset(models.QuerySet): def filter_by_name(self, name): return self.filter(name=name) class MinimalStyle(AbstractBaseModel): name = models.TextField( max_length=264, verbose_name=_('name') ) group = models.CharField( max_length=264, verbose_name=_('product group') ) def __str__(self): """ to display an object in the Django admin site and as the value inserted into a template when it displays an object """ return self.name class Meta: ordering = ['group'] def get_absolute_url(self): """ Returns the url to access a particular client instance. """ return reverse('atelier:minimal_style_detail', args=[str(self.id)]) <file_sep>/atelier/models/atelier.py from django.contrib.auth import get_user_model from django.db import models from django.urls import reverse from django.utils.translation import gettext_lazy as _ from atelier.models.abstract_base import AbstractBaseModel class Atelier(AbstractBaseModel): name = models.CharField( max_length=150, verbose_name=_('name') ) def __str__(self): """ to display an object in the Django admin site and as the value inserted into a template when it displays an object """ return self.name class Meta: ordering = ['name'] def get_absolute_url(self): """ Returns the url to access a particular client instance. """ return reverse('atelier:atelier_detail', args=[str(self.id)]) <file_sep>/atelier/app_utils.py """ order price calculation: order_price = (starting_price * Fabric.complexity_factor) * order_processing_category + order_allowance_discount where: starting_price = Product.base_price + (ComplicationElement.base_price * ComplicationElement.complexity)_1 + (ComplicationElement.base_price * ComplicationElement.complexity)_2 + ... order_allowance_discount = (AllowanceDiscount.coefficient * starting_price)_1 + (AllowanceDiscount.coefficient * starting_price)_2 +... """ def order_price_calculation(fabric_complexity_factor, product_base_price, complication_element_base_price, complication_element_complexity, order_processing_category, allowance_discount_coefficients): complication_element_price = 0 order_allowance_discount = 0 for i in range(len(complication_element_base_price)): complication_element_price += complication_element_base_price[i] * complication_element_complexity[i] starting_price = (product_base_price + complication_element_price) * fabric_complexity_factor if order_processing_category == 1: starting_price = 1.2 * starting_price # +20% for first processing category for coefficient in allowance_discount_coefficients: order_allowance_discount += coefficient * starting_price order_price = starting_price + order_allowance_discount return "%.2f" % order_price <file_sep>/vokss/develop/develop_settings.py # from buyclip.buyclip_rq import devrun_rq_runnable from ievv_opensource.utils import ievvdevrun from ievv_opensource.utils import ievvbuildstatic from .develop_and_test_settings_common import * # INSTALLED_APPS += [ # 'ievv_opensource.ievv_developemail', # ] IEVVTASKS_DEVRUN_RUNNABLES = { 'default': ievvdevrun.config.RunnableThreadList( ievvdevrun.runnables.dbdev_runserver.RunnableThread(), # ievvdevrun.runnables.redis_server.RunnableThread(port='36401'), ievvdevrun.runnables.django_runserver.RunnableThread(port=8080), ), 'design': ievvdevrun.config.RunnableThreadList( ievvdevrun.runnables.dbdev_runserver.RunnableThread(), # ievvdevrun.runnables.redis_server.RunnableThread(port='36401'), ievvdevrun.runnables.django_runserver.RunnableThread(port=8080), ievvdevrun.runnables.ievv_buildstatic.RunnableThread() ), } IEVVTASKS_HEROKUDEPLOY = { 'production': { 'release_type': 'production', 'heroku_appname': 'buyclip-prod', 'require_git_branch': 'production', 'pre_gitpush_heroku_commands': [ ], 'post_gitpush_heroku_commands': [ ['maintenance:on'], ['run', 'python manage.py migrate'], ['maintenance:off'], ], }, } # IEVVTASKS_RECREATE_DEVDB_POST_MANAGEMENT_COMMANDS = [ # { # 'name': 'ievvtasks_customsql', # 'args': ['-i', '-r'], # }, # ] # MIDDLEWARE += [ # 'debug_toolbar.middleware.DebugToolbarMiddleware', # ] # INSTALLED_APPS += [ # 'debug_toolbar', # ] # Required for django debug toolbar # INTERNAL_IPS = [ # '127.0.0.1', # ] # EMAIL_BACKEND = 'ievv_opensource.ievv_developemail.email_backend.DevelopEmailBackend' <file_sep>/atelier/views/order_view.py from atelier.app_utils import order_price_calculation from atelier.forms import OrderForm from atelier.models import Order, Client, Product, Profile from django.urls import reverse_lazy from atelier.views import BaseListView, TailorPermissionPreMixin, AtelierFilterObjectsPreMixin, BaseDetailView, \ BaseCreateView, BaseUpdateView, BaseDeleteView class OrderCreateView(TailorPermissionPreMixin, BaseCreateView): model = Order form_class = OrderForm template_name = 'atelier/order_form.html' def get_context_data(self, **kwargs): context = super(OrderCreateView, self).get_context_data(**kwargs) context['form'].fields['client'].queryset = Client.objects.filter(atelier=self.request.user.profile.atelier) context['form'].fields['product'].queryset = Product.objects.filter(atelier=self.request.user.profile.atelier) context['form'].fields['performer'].queryset = Profile.objects.filter(atelier=self.request.user.profile.atelier) return context class OrderDetailView(AtelierFilterObjectsPreMixin, BaseDetailView): model = Order fields = '__all__' def get_order_price(self): ''' there is three calculation methods ''' order = self.object complication_elements_base_price_list = [] complication_elements_complexity_list = [] allowance_discount_coefficient_list = [] for i in order.complication_elements.all(): complication_elements_base_price_list.append(i.base_price) for j in order.complication_elements.all(): complication_elements_complexity_list.append(j.complexity) for k in order.allowance_discount.all(): allowance_discount_coefficient_list.append(k.coefficient) return order_price_calculation(order.fabric.complexity_factor, order.product.base_price, complication_elements_base_price_list, complication_elements_complexity_list, order.processing_category, allowance_discount_coefficient_list) def get_context_data(self, **kwargs): context_data = super().get_context_data(**kwargs) context_data['order_price_view'] = self.get_order_price() return context_data class OrderUpdateView(TailorPermissionPreMixin, AtelierFilterObjectsPreMixin, BaseUpdateView): model = Order form_class = OrderForm template_name = 'atelier/order_form.html' def get_context_data(self, **kwargs): context = super(OrderUpdateView, self).get_context_data(**kwargs) context['form'].fields['client'].queryset = Client.objects.filter(atelier=self.request.user.profile.atelier) context['form'].fields['product'].queryset = Product.objects.filter(atelier=self.request.user.profile.atelier) context['form'].fields['performer'].queryset = Profile.objects.filter(atelier=self.request.user.profile.atelier) return context class OrderListView(AtelierFilterObjectsPreMixin, BaseListView): model = Order template_name = 'atelier/order_list.html' context_object_name = 'order_list' class OrderDeleteView(TailorPermissionPreMixin, AtelierFilterObjectsPreMixin, BaseDeleteView): model = Order success_url = reverse_lazy('atelier:order_list') template_name = 'atelier/delete_form.html' <file_sep>/atelier/mommy_recipes.py from model_mommy.recipe import Recipe from atelier.models import Fabric fabric = Recipe( Fabric, name='Velvet', group='GR2', complexity_factor=3, ) <file_sep>/vokss/develop/dumps/default.sql -- -- PostgreSQL database dump -- -- Dumped from database version 12.1 (Ubuntu 12.1-1.pgdg18.04+1) -- Dumped by pg_dump version 12.1 (Ubuntu 12.1-1.pgdg18.04+1) SET statement_timeout = 0; SET lock_timeout = 0; SET idle_in_transaction_session_timeout = 0; SET client_encoding = 'UTF8'; SET standard_conforming_strings = on; SELECT pg_catalog.set_config('search_path', '', false); SET check_function_bodies = false; SET xmloption = content; SET client_min_messages = warning; SET row_security = off; SET default_tablespace = ''; SET default_table_access_method = heap; -- -- Name: atelier_allowancediscount; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.atelier_allowancediscount ( id integer NOT NULL, created_datetime timestamp with time zone, last_updated_datetime timestamp with time zone, name character varying(255) NOT NULL, coefficient numeric(5,2) NOT NULL, label character varying(255) NOT NULL, created_by_id integer, last_updated_by_id integer ); ALTER TABLE public.atelier_allowancediscount OWNER TO dbdev; -- -- Name: atelier_allowancediscount_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.atelier_allowancediscount_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.atelier_allowancediscount_id_seq OWNER TO dbdev; -- -- Name: atelier_allowancediscount_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.atelier_allowancediscount_id_seq OWNED BY public.atelier_allowancediscount.id; -- -- Name: atelier_atelier; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.atelier_atelier ( id integer NOT NULL, created_datetime timestamp with time zone, last_updated_datetime timestamp with time zone, name character varying(150) NOT NULL, created_by_id integer, last_updated_by_id integer ); ALTER TABLE public.atelier_atelier OWNER TO dbdev; -- -- Name: atelier_atelier_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.atelier_atelier_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.atelier_atelier_id_seq OWNER TO dbdev; -- -- Name: atelier_atelier_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.atelier_atelier_id_seq OWNED BY public.atelier_atelier.id; -- -- Name: atelier_client; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.atelier_client ( id integer NOT NULL, created_datetime timestamp with time zone, last_updated_datetime timestamp with time zone, first_name character varying(30) NOT NULL, last_name character varying(30) NOT NULL, tel_number character varying(30) NOT NULL, place character varying(30) NOT NULL, atelier_id integer NOT NULL, created_by_id integer, last_updated_by_id integer ); ALTER TABLE public.atelier_client OWNER TO dbdev; -- -- Name: atelier_client_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.atelier_client_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.atelier_client_id_seq OWNER TO dbdev; -- -- Name: atelier_client_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.atelier_client_id_seq OWNED BY public.atelier_client.id; -- -- Name: atelier_complicationelement; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.atelier_complicationelement ( id integer NOT NULL, created_datetime timestamp with time zone, last_updated_datetime timestamp with time zone, name character varying(264) NOT NULL, base_price numeric(5,2) NOT NULL, complexity numeric(3,2) NOT NULL, "group" character varying(255) NOT NULL, created_by_id integer, last_updated_by_id integer ); ALTER TABLE public.atelier_complicationelement OWNER TO dbdev; -- -- Name: atelier_complicationelement_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.atelier_complicationelement_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.atelier_complicationelement_id_seq OWNER TO dbdev; -- -- Name: atelier_complicationelement_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.atelier_complicationelement_id_seq OWNED BY public.atelier_complicationelement.id; -- -- Name: atelier_fabric; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.atelier_fabric ( id integer NOT NULL, created_datetime timestamp with time zone, last_updated_datetime timestamp with time zone, name character varying(264) NOT NULL, "group" character varying(3) NOT NULL, complexity_factor numeric(5,2) NOT NULL, created_by_id integer, last_updated_by_id integer ); ALTER TABLE public.atelier_fabric OWNER TO dbdev; -- -- Name: atelier_fabric_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.atelier_fabric_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.atelier_fabric_id_seq OWNER TO dbdev; -- -- Name: atelier_fabric_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.atelier_fabric_id_seq OWNED BY public.atelier_fabric.id; -- -- Name: atelier_minimalstyle; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.atelier_minimalstyle ( id integer NOT NULL, created_datetime timestamp with time zone, last_updated_datetime timestamp with time zone, name text NOT NULL, "group" character varying(264) NOT NULL, created_by_id integer, last_updated_by_id integer ); ALTER TABLE public.atelier_minimalstyle OWNER TO dbdev; -- -- Name: atelier_minimalstyle_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.atelier_minimalstyle_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.atelier_minimalstyle_id_seq OWNER TO dbdev; -- -- Name: atelier_minimalstyle_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.atelier_minimalstyle_id_seq OWNED BY public.atelier_minimalstyle.id; -- -- Name: atelier_order; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.atelier_order ( id integer NOT NULL, created_datetime timestamp with time zone, last_updated_datetime timestamp with time zone, processing_category character varying(1) NOT NULL, order_date date NOT NULL, deadline date, is_closed boolean NOT NULL, atelier_id integer NOT NULL, client_id integer NOT NULL, created_by_id integer, fabric_id integer NOT NULL, last_updated_by_id integer, performer_id integer, product_id integer NOT NULL ); ALTER TABLE public.atelier_order OWNER TO dbdev; -- -- Name: atelier_order_allowance_discount; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.atelier_order_allowance_discount ( id integer NOT NULL, order_id integer NOT NULL, allowancediscount_id integer NOT NULL ); ALTER TABLE public.atelier_order_allowance_discount OWNER TO dbdev; -- -- Name: atelier_order_allowance_discount_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.atelier_order_allowance_discount_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.atelier_order_allowance_discount_id_seq OWNER TO dbdev; -- -- Name: atelier_order_allowance_discount_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.atelier_order_allowance_discount_id_seq OWNED BY public.atelier_order_allowance_discount.id; -- -- Name: atelier_order_complication_elements; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.atelier_order_complication_elements ( id integer NOT NULL, order_id integer NOT NULL, complicationelement_id integer NOT NULL ); ALTER TABLE public.atelier_order_complication_elements OWNER TO dbdev; -- -- Name: atelier_order_complication_elements_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.atelier_order_complication_elements_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.atelier_order_complication_elements_id_seq OWNER TO dbdev; -- -- Name: atelier_order_complication_elements_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.atelier_order_complication_elements_id_seq OWNED BY public.atelier_order_complication_elements.id; -- -- Name: atelier_order_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.atelier_order_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.atelier_order_id_seq OWNER TO dbdev; -- -- Name: atelier_order_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.atelier_order_id_seq OWNED BY public.atelier_order.id; -- -- Name: atelier_product; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.atelier_product ( id integer NOT NULL, created_datetime timestamp with time zone, last_updated_datetime timestamp with time zone, name character varying(264) NOT NULL, base_price numeric(10,2) NOT NULL, atelier_id integer NOT NULL, created_by_id integer, last_updated_by_id integer, minimal_style_id integer NOT NULL ); ALTER TABLE public.atelier_product OWNER TO dbdev; -- -- Name: atelier_product_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.atelier_product_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.atelier_product_id_seq OWNER TO dbdev; -- -- Name: atelier_product_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.atelier_product_id_seq OWNED BY public.atelier_product.id; -- -- Name: atelier_profile; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.atelier_profile ( id integer NOT NULL, created_datetime timestamp with time zone, last_updated_datetime timestamp with time zone, is_tailor boolean NOT NULL, atelier_id integer NOT NULL, created_by_id integer, last_updated_by_id integer, user_id integer NOT NULL ); ALTER TABLE public.atelier_profile OWNER TO dbdev; -- -- Name: atelier_profile_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.atelier_profile_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.atelier_profile_id_seq OWNER TO dbdev; -- -- Name: atelier_profile_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.atelier_profile_id_seq OWNED BY public.atelier_profile.id; -- -- Name: auth_group; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.auth_group ( id integer NOT NULL, name character varying(150) NOT NULL ); ALTER TABLE public.auth_group OWNER TO dbdev; -- -- Name: auth_group_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.auth_group_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.auth_group_id_seq OWNER TO dbdev; -- -- Name: auth_group_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.auth_group_id_seq OWNED BY public.auth_group.id; -- -- Name: auth_group_permissions; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.auth_group_permissions ( id integer NOT NULL, group_id integer NOT NULL, permission_id integer NOT NULL ); ALTER TABLE public.auth_group_permissions OWNER TO dbdev; -- -- Name: auth_group_permissions_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.auth_group_permissions_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.auth_group_permissions_id_seq OWNER TO dbdev; -- -- Name: auth_group_permissions_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.auth_group_permissions_id_seq OWNED BY public.auth_group_permissions.id; -- -- Name: auth_permission; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.auth_permission ( id integer NOT NULL, name character varying(255) NOT NULL, content_type_id integer NOT NULL, codename character varying(100) NOT NULL ); ALTER TABLE public.auth_permission OWNER TO dbdev; -- -- Name: auth_permission_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.auth_permission_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.auth_permission_id_seq OWNER TO dbdev; -- -- Name: auth_permission_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.auth_permission_id_seq OWNED BY public.auth_permission.id; -- -- Name: auth_user; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.auth_user ( id integer NOT NULL, password character varying(128) NOT NULL, last_login timestamp with time zone, is_superuser boolean NOT NULL, username character varying(150) NOT NULL, first_name character varying(30) NOT NULL, last_name character varying(150) NOT NULL, email character varying(254) NOT NULL, is_staff boolean NOT NULL, is_active boolean NOT NULL, date_joined timestamp with time zone NOT NULL ); ALTER TABLE public.auth_user OWNER TO dbdev; -- -- Name: auth_user_groups; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.auth_user_groups ( id integer NOT NULL, user_id integer NOT NULL, group_id integer NOT NULL ); ALTER TABLE public.auth_user_groups OWNER TO dbdev; -- -- Name: auth_user_groups_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.auth_user_groups_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.auth_user_groups_id_seq OWNER TO dbdev; -- -- Name: auth_user_groups_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.auth_user_groups_id_seq OWNED BY public.auth_user_groups.id; -- -- Name: auth_user_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.auth_user_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.auth_user_id_seq OWNER TO dbdev; -- -- Name: auth_user_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.auth_user_id_seq OWNED BY public.auth_user.id; -- -- Name: auth_user_user_permissions; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.auth_user_user_permissions ( id integer NOT NULL, user_id integer NOT NULL, permission_id integer NOT NULL ); ALTER TABLE public.auth_user_user_permissions OWNER TO dbdev; -- -- Name: auth_user_user_permissions_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.auth_user_user_permissions_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.auth_user_user_permissions_id_seq OWNER TO dbdev; -- -- Name: auth_user_user_permissions_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.auth_user_user_permissions_id_seq OWNED BY public.auth_user_user_permissions.id; -- -- Name: django_admin_log; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.django_admin_log ( id integer NOT NULL, action_time timestamp with time zone NOT NULL, object_id text, object_repr character varying(200) NOT NULL, action_flag smallint NOT NULL, change_message text NOT NULL, content_type_id integer, user_id integer NOT NULL, CONSTRAINT django_admin_log_action_flag_check CHECK ((action_flag >= 0)) ); ALTER TABLE public.django_admin_log OWNER TO dbdev; -- -- Name: django_admin_log_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.django_admin_log_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.django_admin_log_id_seq OWNER TO dbdev; -- -- Name: django_admin_log_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.django_admin_log_id_seq OWNED BY public.django_admin_log.id; -- -- Name: django_content_type; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.django_content_type ( id integer NOT NULL, app_label character varying(100) NOT NULL, model character varying(100) NOT NULL ); ALTER TABLE public.django_content_type OWNER TO dbdev; -- -- Name: django_content_type_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.django_content_type_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.django_content_type_id_seq OWNER TO dbdev; -- -- Name: django_content_type_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.django_content_type_id_seq OWNED BY public.django_content_type.id; -- -- Name: django_migrations; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.django_migrations ( id integer NOT NULL, app character varying(255) NOT NULL, name character varying(255) NOT NULL, applied timestamp with time zone NOT NULL ); ALTER TABLE public.django_migrations OWNER TO dbdev; -- -- Name: django_migrations_id_seq; Type: SEQUENCE; Schema: public; Owner: dbdev -- CREATE SEQUENCE public.django_migrations_id_seq AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER TABLE public.django_migrations_id_seq OWNER TO dbdev; -- -- Name: django_migrations_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: dbdev -- ALTER SEQUENCE public.django_migrations_id_seq OWNED BY public.django_migrations.id; -- -- Name: django_session; Type: TABLE; Schema: public; Owner: dbdev -- CREATE TABLE public.django_session ( session_key character varying(40) NOT NULL, session_data text NOT NULL, expire_date timestamp with time zone NOT NULL ); ALTER TABLE public.django_session OWNER TO dbdev; -- -- Name: atelier_allowancediscount id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_allowancediscount ALTER COLUMN id SET DEFAULT nextval('public.atelier_allowancediscount_id_seq'::regclass); -- -- Name: atelier_atelier id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_atelier ALTER COLUMN id SET DEFAULT nextval('public.atelier_atelier_id_seq'::regclass); -- -- Name: atelier_client id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_client ALTER COLUMN id SET DEFAULT nextval('public.atelier_client_id_seq'::regclass); -- -- Name: atelier_complicationelement id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_complicationelement ALTER COLUMN id SET DEFAULT nextval('public.atelier_complicationelement_id_seq'::regclass); -- -- Name: atelier_fabric id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_fabric ALTER COLUMN id SET DEFAULT nextval('public.atelier_fabric_id_seq'::regclass); -- -- Name: atelier_minimalstyle id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_minimalstyle ALTER COLUMN id SET DEFAULT nextval('public.atelier_minimalstyle_id_seq'::regclass); -- -- Name: atelier_order id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order ALTER COLUMN id SET DEFAULT nextval('public.atelier_order_id_seq'::regclass); -- -- Name: atelier_order_allowance_discount id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order_allowance_discount ALTER COLUMN id SET DEFAULT nextval('public.atelier_order_allowance_discount_id_seq'::regclass); -- -- Name: atelier_order_complication_elements id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order_complication_elements ALTER COLUMN id SET DEFAULT nextval('public.atelier_order_complication_elements_id_seq'::regclass); -- -- Name: atelier_product id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_product ALTER COLUMN id SET DEFAULT nextval('public.atelier_product_id_seq'::regclass); -- -- Name: atelier_profile id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_profile ALTER COLUMN id SET DEFAULT nextval('public.atelier_profile_id_seq'::regclass); -- -- Name: auth_group id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_group ALTER COLUMN id SET DEFAULT nextval('public.auth_group_id_seq'::regclass); -- -- Name: auth_group_permissions id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_group_permissions ALTER COLUMN id SET DEFAULT nextval('public.auth_group_permissions_id_seq'::regclass); -- -- Name: auth_permission id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_permission ALTER COLUMN id SET DEFAULT nextval('public.auth_permission_id_seq'::regclass); -- -- Name: auth_user id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_user ALTER COLUMN id SET DEFAULT nextval('public.auth_user_id_seq'::regclass); -- -- Name: auth_user_groups id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_user_groups ALTER COLUMN id SET DEFAULT nextval('public.auth_user_groups_id_seq'::regclass); -- -- Name: auth_user_user_permissions id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_user_user_permissions ALTER COLUMN id SET DEFAULT nextval('public.auth_user_user_permissions_id_seq'::regclass); -- -- Name: django_admin_log id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.django_admin_log ALTER COLUMN id SET DEFAULT nextval('public.django_admin_log_id_seq'::regclass); -- -- Name: django_content_type id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.django_content_type ALTER COLUMN id SET DEFAULT nextval('public.django_content_type_id_seq'::regclass); -- -- Name: django_migrations id; Type: DEFAULT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.django_migrations ALTER COLUMN id SET DEFAULT nextval('public.django_migrations_id_seq'::regclass); -- -- Data for Name: atelier_allowancediscount; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.atelier_allowancediscount (id, created_datetime, last_updated_datetime, name, coefficient, label, created_by_id, last_updated_by_id) FROM stdin; \. -- -- Data for Name: atelier_atelier; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.atelier_atelier (id, created_datetime, last_updated_datetime, name, created_by_id, last_updated_by_id) FROM stdin; \. -- -- Data for Name: atelier_client; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.atelier_client (id, created_datetime, last_updated_datetime, first_name, last_name, tel_number, place, atelier_id, created_by_id, last_updated_by_id) FROM stdin; \. -- -- Data for Name: atelier_complicationelement; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.atelier_complicationelement (id, created_datetime, last_updated_datetime, name, base_price, complexity, "group", created_by_id, last_updated_by_id) FROM stdin; \. -- -- Data for Name: atelier_fabric; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.atelier_fabric (id, created_datetime, last_updated_datetime, name, "group", complexity_factor, created_by_id, last_updated_by_id) FROM stdin; \. -- -- Data for Name: atelier_minimalstyle; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.atelier_minimalstyle (id, created_datetime, last_updated_datetime, name, "group", created_by_id, last_updated_by_id) FROM stdin; \. -- -- Data for Name: atelier_order; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.atelier_order (id, created_datetime, last_updated_datetime, processing_category, order_date, deadline, is_closed, atelier_id, client_id, created_by_id, fabric_id, last_updated_by_id, performer_id, product_id) FROM stdin; \. -- -- Data for Name: atelier_order_allowance_discount; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.atelier_order_allowance_discount (id, order_id, allowancediscount_id) FROM stdin; \. -- -- Data for Name: atelier_order_complication_elements; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.atelier_order_complication_elements (id, order_id, complicationelement_id) FROM stdin; \. -- -- Data for Name: atelier_product; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.atelier_product (id, created_datetime, last_updated_datetime, name, base_price, atelier_id, created_by_id, last_updated_by_id, minimal_style_id) FROM stdin; \. -- -- Data for Name: atelier_profile; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.atelier_profile (id, created_datetime, last_updated_datetime, is_tailor, atelier_id, created_by_id, last_updated_by_id, user_id) FROM stdin; \. -- -- Data for Name: auth_group; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.auth_group (id, name) FROM stdin; \. -- -- Data for Name: auth_group_permissions; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.auth_group_permissions (id, group_id, permission_id) FROM stdin; \. -- -- Data for Name: auth_permission; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.auth_permission (id, name, content_type_id, codename) FROM stdin; 1 Can add log entry 1 add_logentry 2 Can change log entry 1 change_logentry 3 Can delete log entry 1 delete_logentry 4 Can view log entry 1 view_logentry 5 Can add permission 2 add_permission 6 Can change permission 2 change_permission 7 Can delete permission 2 delete_permission 8 Can view permission 2 view_permission 9 Can add group 3 add_group 10 Can change group 3 change_group 11 Can delete group 3 delete_group 12 Can view group 3 view_group 13 Can add user 4 add_user 14 Can change user 4 change_user 15 Can delete user 4 delete_user 16 Can view user 4 view_user 17 Can add content type 5 add_contenttype 18 Can change content type 5 change_contenttype 19 Can delete content type 5 delete_contenttype 20 Can view content type 5 view_contenttype 21 Can add session 6 add_session 22 Can change session 6 change_session 23 Can delete session 6 delete_session 24 Can view session 6 view_session 25 Can add allowance discount 7 add_allowancediscount 26 Can change allowance discount 7 change_allowancediscount 27 Can delete allowance discount 7 delete_allowancediscount 28 Can view allowance discount 7 view_allowancediscount 29 Can add atelier 8 add_atelier 30 Can change atelier 8 change_atelier 31 Can delete atelier 8 delete_atelier 32 Can view atelier 8 view_atelier 33 Can add client 9 add_client 34 Can change client 9 change_client 35 Can delete client 9 delete_client 36 Can view client 9 view_client 37 Can add complication element 10 add_complicationelement 38 Can change complication element 10 change_complicationelement 39 Can delete complication element 10 delete_complicationelement 40 Can view complication element 10 view_complicationelement 41 Can add fabric 11 add_fabric 42 Can change fabric 11 change_fabric 43 Can delete fabric 11 delete_fabric 44 Can view fabric 11 view_fabric 45 Can add minimal style 12 add_minimalstyle 46 Can change minimal style 12 change_minimalstyle 47 Can delete minimal style 12 delete_minimalstyle 48 Can view minimal style 12 view_minimalstyle 49 Can add profile 13 add_profile 50 Can change profile 13 change_profile 51 Can delete profile 13 delete_profile 52 Can view profile 13 view_profile 53 Can add product 14 add_product 54 Can change product 14 change_product 55 Can delete product 14 delete_product 56 Can view product 14 view_product 57 Can add order 15 add_order 58 Can change order 15 change_order 59 Can delete order 15 delete_order 60 Can view order 15 view_order \. -- -- Data for Name: auth_user; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.auth_user (id, <PASSWORD>, last_login, is_superuser, username, first_name, last_name, email, is_staff, is_active, date_joined) FROM stdin; 1 pbkdf2_sha256$150000$sokASoNh5sfU$OcyTEOUmavrdCUAfL9BeHPDF6gEXKkzjK9I0N5C4gvI= 2020-01-13 13:18:18.207744+02 t <EMAIL> <EMAIL> t t 2020-01-13 13:18:08.02739+02 \. -- -- Data for Name: auth_user_groups; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.auth_user_groups (id, user_id, group_id) FROM stdin; \. -- -- Data for Name: auth_user_user_permissions; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.auth_user_user_permissions (id, user_id, permission_id) FROM stdin; \. -- -- Data for Name: django_admin_log; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.django_admin_log (id, action_time, object_id, object_repr, action_flag, change_message, content_type_id, user_id) FROM stdin; \. -- -- Data for Name: django_content_type; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.django_content_type (id, app_label, model) FROM stdin; 1 admin logentry 2 auth permission 3 auth group 4 auth user 5 contenttypes contenttype 6 sessions session 7 atelier allowancediscount 8 atelier atelier 9 atelier client 10 atelier complicationelement 11 atelier fabric 12 atelier minimalstyle 13 atelier profile 14 atelier product 15 atelier order \. -- -- Data for Name: django_migrations; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.django_migrations (id, app, name, applied) FROM stdin; 1 contenttypes 0001_initial 2020-01-13 13:16:16.625842+02 2 auth 0001_initial 2020-01-13 13:16:16.672534+02 3 admin 0001_initial 2020-01-13 13:16:16.759728+02 4 admin 0002_logentry_remove_auto_add 2020-01-13 13:16:16.778565+02 5 admin 0003_logentry_add_action_flag_choices 2020-01-13 13:16:16.78751+02 6 atelier 0001_initial 2020-01-13 13:16:16.949344+02 7 contenttypes 0002_remove_content_type_name 2020-01-13 13:16:17.108282+02 8 auth 0002_alter_permission_name_max_length 2020-01-13 13:16:17.116504+02 9 auth 0003_alter_user_email_max_length 2020-01-13 13:16:17.135722+02 10 auth 0004_alter_user_username_opts 2020-01-13 13:16:17.155809+02 11 auth 0005_alter_user_last_login_null 2020-01-13 13:16:17.178176+02 12 auth 0006_require_contenttypes_0002 2020-01-13 13:16:17.180655+02 13 auth 0007_alter_validators_add_error_messages 2020-01-13 13:16:17.201809+02 14 auth 0008_alter_user_username_max_length 2020-01-13 13:16:17.225636+02 15 auth 0009_alter_user_last_name_max_length 2020-01-13 13:16:17.257078+02 16 auth 0010_alter_group_name_max_length 2020-01-13 13:16:17.266275+02 17 auth 0011_update_proxy_permissions 2020-01-13 13:16:17.289585+02 18 sessions 0001_initial 2020-01-13 13:16:17.298457+02 \. -- -- Data for Name: django_session; Type: TABLE DATA; Schema: public; Owner: dbdev -- COPY public.django_session (session_key, session_data, expire_date) FROM stdin; qiuiqo5rgrokyv77ejegx937t132df6x YTAwZDA0MDlkZjk4NzE1YTg2YzgyN2Q3YjQ<KEY> 2020-01-27 13:18:23.688525+02 \. -- -- Name: atelier_allowancediscount_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.atelier_allowancediscount_id_seq', 1, false); -- -- Name: atelier_atelier_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.atelier_atelier_id_seq', 1, false); -- -- Name: atelier_client_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.atelier_client_id_seq', 1, false); -- -- Name: atelier_complicationelement_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.atelier_complicationelement_id_seq', 1, false); -- -- Name: atelier_fabric_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.atelier_fabric_id_seq', 1, false); -- -- Name: atelier_minimalstyle_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.atelier_minimalstyle_id_seq', 1, false); -- -- Name: atelier_order_allowance_discount_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.atelier_order_allowance_discount_id_seq', 1, false); -- -- Name: atelier_order_complication_elements_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.atelier_order_complication_elements_id_seq', 1, false); -- -- Name: atelier_order_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.atelier_order_id_seq', 1, false); -- -- Name: atelier_product_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.atelier_product_id_seq', 1, false); -- -- Name: atelier_profile_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.atelier_profile_id_seq', 1, false); -- -- Name: auth_group_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.auth_group_id_seq', 1, false); -- -- Name: auth_group_permissions_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.auth_group_permissions_id_seq', 1, false); -- -- Name: auth_permission_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.auth_permission_id_seq', 60, true); -- -- Name: auth_user_groups_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.auth_user_groups_id_seq', 1, false); -- -- Name: auth_user_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.auth_user_id_seq', 1, true); -- -- Name: auth_user_user_permissions_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.auth_user_user_permissions_id_seq', 1, false); -- -- Name: django_admin_log_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.django_admin_log_id_seq', 1, false); -- -- Name: django_content_type_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.django_content_type_id_seq', 15, true); -- -- Name: django_migrations_id_seq; Type: SEQUENCE SET; Schema: public; Owner: dbdev -- SELECT pg_catalog.setval('public.django_migrations_id_seq', 18, true); -- -- Name: atelier_allowancediscount atelier_allowancediscount_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_allowancediscount ADD CONSTRAINT atelier_allowancediscount_pkey PRIMARY KEY (id); -- -- Name: atelier_atelier atelier_atelier_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_atelier ADD CONSTRAINT atelier_atelier_pkey PRIMARY KEY (id); -- -- Name: atelier_client atelier_client_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_client ADD CONSTRAINT atelier_client_pkey PRIMARY KEY (id); -- -- Name: atelier_complicationelement atelier_complicationelement_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_complicationelement ADD CONSTRAINT atelier_complicationelement_pkey PRIMARY KEY (id); -- -- Name: atelier_fabric atelier_fabric_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_fabric ADD CONSTRAINT atelier_fabric_pkey PRIMARY KEY (id); -- -- Name: atelier_minimalstyle atelier_minimalstyle_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_minimalstyle ADD CONSTRAINT atelier_minimalstyle_pkey PRIMARY KEY (id); -- -- Name: atelier_order_allowance_discount atelier_order_allowance__order_id_allowancediscou_5826d5f9_uniq; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order_allowance_discount ADD CONSTRAINT atelier_order_allowance__order_id_allowancediscou_5826d5f9_uniq UNIQUE (order_id, allowancediscount_id); -- -- Name: atelier_order_allowance_discount atelier_order_allowance_discount_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order_allowance_discount ADD CONSTRAINT atelier_order_allowance_discount_pkey PRIMARY KEY (id); -- -- Name: atelier_order_complication_elements atelier_order_complicati_order_id_complicationele_acbe3162_uniq; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order_complication_elements ADD CONSTRAINT atelier_order_complicati_order_id_complicationele_acbe3162_uniq UNIQUE (order_id, complicationelement_id); -- -- Name: atelier_order_complication_elements atelier_order_complication_elements_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order_complication_elements ADD CONSTRAINT atelier_order_complication_elements_pkey PRIMARY KEY (id); -- -- Name: atelier_order atelier_order_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order ADD CONSTRAINT atelier_order_pkey PRIMARY KEY (id); -- -- Name: atelier_product atelier_product_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_product ADD CONSTRAINT atelier_product_pkey PRIMARY KEY (id); -- -- Name: atelier_profile atelier_profile_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_profile ADD CONSTRAINT atelier_profile_pkey PRIMARY KEY (id); -- -- Name: atelier_profile atelier_profile_user_id_key; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_profile ADD CONSTRAINT atelier_profile_user_id_key UNIQUE (user_id); -- -- Name: auth_group auth_group_name_key; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_group ADD CONSTRAINT auth_group_name_key UNIQUE (name); -- -- Name: auth_group_permissions auth_group_permissions_group_id_permission_id_0cd325b0_uniq; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_group_permissions ADD CONSTRAINT auth_group_permissions_group_id_permission_id_0cd325b0_uniq UNIQUE (group_id, permission_id); -- -- Name: auth_group_permissions auth_group_permissions_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_group_permissions ADD CONSTRAINT auth_group_permissions_pkey PRIMARY KEY (id); -- -- Name: auth_group auth_group_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_group ADD CONSTRAINT auth_group_pkey PRIMARY KEY (id); -- -- Name: auth_permission auth_permission_content_type_id_codename_01ab375a_uniq; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_permission ADD CONSTRAINT auth_permission_content_type_id_codename_01ab375a_uniq UNIQUE (content_type_id, codename); -- -- Name: auth_permission auth_permission_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_permission ADD CONSTRAINT auth_permission_pkey PRIMARY KEY (id); -- -- Name: auth_user_groups auth_user_groups_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_user_groups ADD CONSTRAINT auth_user_groups_pkey PRIMARY KEY (id); -- -- Name: auth_user_groups auth_user_groups_user_id_group_id_94350c0c_uniq; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_user_groups ADD CONSTRAINT auth_user_groups_user_id_group_id_94350c0c_uniq UNIQUE (user_id, group_id); -- -- Name: auth_user auth_user_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_user ADD CONSTRAINT auth_user_pkey PRIMARY KEY (id); -- -- Name: auth_user_user_permissions auth_user_user_permissions_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_user_user_permissions ADD CONSTRAINT auth_user_user_permissions_pkey PRIMARY KEY (id); -- -- Name: auth_user_user_permissions auth_user_user_permissions_user_id_permission_id_14a6b632_uniq; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_user_user_permissions ADD CONSTRAINT auth_user_user_permissions_user_id_permission_id_14a6b632_uniq UNIQUE (user_id, permission_id); -- -- Name: auth_user auth_user_username_key; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_user ADD CONSTRAINT auth_user_username_key UNIQUE (username); -- -- Name: django_admin_log django_admin_log_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.django_admin_log ADD CONSTRAINT django_admin_log_pkey PRIMARY KEY (id); -- -- Name: django_content_type django_content_type_app_label_model_76bd3d3b_uniq; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.django_content_type ADD CONSTRAINT django_content_type_app_label_model_76bd3d3b_uniq UNIQUE (app_label, model); -- -- Name: django_content_type django_content_type_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.django_content_type ADD CONSTRAINT django_content_type_pkey PRIMARY KEY (id); -- -- Name: django_migrations django_migrations_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.django_migrations ADD CONSTRAINT django_migrations_pkey PRIMARY KEY (id); -- -- Name: django_session django_session_pkey; Type: CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.django_session ADD CONSTRAINT django_session_pkey PRIMARY KEY (session_key); -- -- Name: atelier_allowancediscount_created_by_id_d2220fd6; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_allowancediscount_created_by_id_d2220fd6 ON public.atelier_allowancediscount USING btree (created_by_id); -- -- Name: atelier_allowancediscount_last_updated_by_id_47954b44; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_allowancediscount_last_updated_by_id_47954b44 ON public.atelier_allowancediscount USING btree (last_updated_by_id); -- -- Name: atelier_atelier_created_by_id_82de01cd; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_atelier_created_by_id_82de01cd ON public.atelier_atelier USING btree (created_by_id); -- -- Name: atelier_atelier_last_updated_by_id_4d758ab7; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_atelier_last_updated_by_id_4d758ab7 ON public.atelier_atelier USING btree (last_updated_by_id); -- -- Name: atelier_client_atelier_id_e394475b; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_client_atelier_id_e394475b ON public.atelier_client USING btree (atelier_id); -- -- Name: atelier_client_created_by_id_28c796c7; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_client_created_by_id_28c796c7 ON public.atelier_client USING btree (created_by_id); -- -- Name: atelier_client_last_updated_by_id_876ac7ba; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_client_last_updated_by_id_876ac7ba ON public.atelier_client USING btree (last_updated_by_id); -- -- Name: atelier_complicationelement_created_by_id_75008570; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_complicationelement_created_by_id_75008570 ON public.atelier_complicationelement USING btree (created_by_id); -- -- Name: atelier_complicationelement_last_updated_by_id_160ff1ed; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_complicationelement_last_updated_by_id_160ff1ed ON public.atelier_complicationelement USING btree (last_updated_by_id); -- -- Name: atelier_fabric_created_by_id_0f69c22f; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_fabric_created_by_id_0f69c22f ON public.atelier_fabric USING btree (created_by_id); -- -- Name: atelier_fabric_last_updated_by_id_5144c27f; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_fabric_last_updated_by_id_5144c27f ON public.atelier_fabric USING btree (last_updated_by_id); -- -- Name: atelier_minimalstyle_created_by_id_e83e24d3; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_minimalstyle_created_by_id_e83e24d3 ON public.atelier_minimalstyle USING btree (created_by_id); -- -- Name: atelier_minimalstyle_last_updated_by_id_34dd85b5; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_minimalstyle_last_updated_by_id_34dd85b5 ON public.atelier_minimalstyle USING btree (last_updated_by_id); -- -- Name: atelier_order_allowance_discount_allowancediscount_id_6483c11d; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_order_allowance_discount_allowancediscount_id_6483c11d ON public.atelier_order_allowance_discount USING btree (allowancediscount_id); -- -- Name: atelier_order_allowance_discount_order_id_2896ddfb; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_order_allowance_discount_order_id_2896ddfb ON public.atelier_order_allowance_discount USING btree (order_id); -- -- Name: atelier_order_atelier_id_55a0542d; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_order_atelier_id_55a0542d ON public.atelier_order USING btree (atelier_id); -- -- Name: atelier_order_client_id_a542c9de; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_order_client_id_a542c9de ON public.atelier_order USING btree (client_id); -- -- Name: atelier_order_complication_complicationelement_id_aad15a47; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_order_complication_complicationelement_id_aad15a47 ON public.atelier_order_complication_elements USING btree (complicationelement_id); -- -- Name: atelier_order_complication_elements_order_id_12d03d55; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_order_complication_elements_order_id_12d03d55 ON public.atelier_order_complication_elements USING btree (order_id); -- -- Name: atelier_order_created_by_id_a4e6c8e1; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_order_created_by_id_a4e6c8e1 ON public.atelier_order USING btree (created_by_id); -- -- Name: atelier_order_fabric_id_5a54eefd; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_order_fabric_id_5a54eefd ON public.atelier_order USING btree (fabric_id); -- -- Name: atelier_order_last_updated_by_id_aeaad62d; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_order_last_updated_by_id_aeaad62d ON public.atelier_order USING btree (last_updated_by_id); -- -- Name: atelier_order_performer_id_5b8e7188; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_order_performer_id_5b8e7188 ON public.atelier_order USING btree (performer_id); -- -- Name: atelier_order_product_id_73f0bb1c; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_order_product_id_73f0bb1c ON public.atelier_order USING btree (product_id); -- -- Name: atelier_product_atelier_id_ea0d13e6; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_product_atelier_id_ea0d13e6 ON public.atelier_product USING btree (atelier_id); -- -- Name: atelier_product_created_by_id_7765ca1f; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_product_created_by_id_7765ca1f ON public.atelier_product USING btree (created_by_id); -- -- Name: atelier_product_last_updated_by_id_3f95827b; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_product_last_updated_by_id_3f95827b ON public.atelier_product USING btree (last_updated_by_id); -- -- Name: atelier_product_minimal_style_id_d81fef44; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_product_minimal_style_id_d81fef44 ON public.atelier_product USING btree (minimal_style_id); -- -- Name: atelier_profile_atelier_id_f8e307f6; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_profile_atelier_id_f8e307f6 ON public.atelier_profile USING btree (atelier_id); -- -- Name: atelier_profile_created_by_id_935cbd99; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_profile_created_by_id_935cbd99 ON public.atelier_profile USING btree (created_by_id); -- -- Name: atelier_profile_last_updated_by_id_4665eb0a; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX atelier_profile_last_updated_by_id_4665eb0a ON public.atelier_profile USING btree (last_updated_by_id); -- -- Name: auth_group_name_a6ea08ec_like; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX auth_group_name_a6ea08ec_like ON public.auth_group USING btree (name varchar_pattern_ops); -- -- Name: auth_group_permissions_group_id_b120cbf9; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX auth_group_permissions_group_id_b120cbf9 ON public.auth_group_permissions USING btree (group_id); -- -- Name: auth_group_permissions_permission_id_84c5c92e; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX auth_group_permissions_permission_id_84c5c92e ON public.auth_group_permissions USING btree (permission_id); -- -- Name: auth_permission_content_type_id_2f476e4b; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX auth_permission_content_type_id_2f476e4b ON public.auth_permission USING btree (content_type_id); -- -- Name: auth_user_groups_group_id_97559544; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX auth_user_groups_group_id_97559544 ON public.auth_user_groups USING btree (group_id); -- -- Name: auth_user_groups_user_id_6a12ed8b; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX auth_user_groups_user_id_6a12ed8b ON public.auth_user_groups USING btree (user_id); -- -- Name: auth_user_user_permissions_permission_id_1fbb5f2c; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX auth_user_user_permissions_permission_id_1fbb5f2c ON public.auth_user_user_permissions USING btree (permission_id); -- -- Name: auth_user_user_permissions_user_id_a95ead1b; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX auth_user_user_permissions_user_id_a95ead1b ON public.auth_user_user_permissions USING btree (user_id); -- -- Name: auth_user_username_6821ab7c_like; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX auth_user_username_6821ab7c_like ON public.auth_user USING btree (username varchar_pattern_ops); -- -- Name: django_admin_log_content_type_id_c4bce8eb; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX django_admin_log_content_type_id_c4bce8eb ON public.django_admin_log USING btree (content_type_id); -- -- Name: django_admin_log_user_id_c564eba6; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX django_admin_log_user_id_c564eba6 ON public.django_admin_log USING btree (user_id); -- -- Name: django_session_expire_date_a5c62663; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX django_session_expire_date_a5c62663 ON public.django_session USING btree (expire_date); -- -- Name: django_session_session_key_c0390e0f_like; Type: INDEX; Schema: public; Owner: dbdev -- CREATE INDEX django_session_session_key_c0390e0f_like ON public.django_session USING btree (session_key varchar_pattern_ops); -- -- Name: atelier_allowancediscount atelier_allowancedis_created_by_id_d2220fd6_fk_auth_user; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_allowancediscount ADD CONSTRAINT atelier_allowancedis_created_by_id_d2220fd6_fk_auth_user FOREIGN KEY (created_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_allowancediscount atelier_allowancedis_last_updated_by_id_47954b44_fk_auth_user; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_allowancediscount ADD CONSTRAINT atelier_allowancedis_last_updated_by_id_47954b44_fk_auth_user FOREIGN KEY (last_updated_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_atelier atelier_atelier_created_by_id_82de01cd_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_atelier ADD CONSTRAINT atelier_atelier_created_by_id_82de01cd_fk_auth_user_id FOREIGN KEY (created_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_atelier atelier_atelier_last_updated_by_id_4d758ab7_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_atelier ADD CONSTRAINT atelier_atelier_last_updated_by_id_4d758ab7_fk_auth_user_id FOREIGN KEY (last_updated_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_client atelier_client_atelier_id_e394475b_fk_atelier_atelier_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_client ADD CONSTRAINT atelier_client_atelier_id_e394475b_fk_atelier_atelier_id FOREIGN KEY (atelier_id) REFERENCES public.atelier_atelier(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_client atelier_client_created_by_id_28c796c7_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_client ADD CONSTRAINT atelier_client_created_by_id_28c796c7_fk_auth_user_id FOREIGN KEY (created_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_client atelier_client_last_updated_by_id_876ac7ba_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_client ADD CONSTRAINT atelier_client_last_updated_by_id_876ac7ba_fk_auth_user_id FOREIGN KEY (last_updated_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_complicationelement atelier_complication_created_by_id_75008570_fk_auth_user; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_complicationelement ADD CONSTRAINT atelier_complication_created_by_id_75008570_fk_auth_user FOREIGN KEY (created_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_complicationelement atelier_complication_last_updated_by_id_160ff1ed_fk_auth_user; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_complicationelement ADD CONSTRAINT atelier_complication_last_updated_by_id_160ff1ed_fk_auth_user FOREIGN KEY (last_updated_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_fabric atelier_fabric_created_by_id_0f69c22f_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_fabric ADD CONSTRAINT atelier_fabric_created_by_id_0f69c22f_fk_auth_user_id FOREIGN KEY (created_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_fabric atelier_fabric_last_updated_by_id_5144c27f_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_fabric ADD CONSTRAINT atelier_fabric_last_updated_by_id_5144c27f_fk_auth_user_id FOREIGN KEY (last_updated_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_minimalstyle atelier_minimalstyle_created_by_id_e83e24d3_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_minimalstyle ADD CONSTRAINT atelier_minimalstyle_created_by_id_e83e24d3_fk_auth_user_id FOREIGN KEY (created_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_minimalstyle atelier_minimalstyle_last_updated_by_id_34dd85b5_fk_auth_user; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_minimalstyle ADD CONSTRAINT atelier_minimalstyle_last_updated_by_id_34dd85b5_fk_auth_user FOREIGN KEY (last_updated_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_order_allowance_discount atelier_order_allowa_allowancediscount_id_6483c11d_fk_atelier_a; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order_allowance_discount ADD CONSTRAINT atelier_order_allowa_allowancediscount_id_6483c11d_fk_atelier_a FOREIGN KEY (allowancediscount_id) REFERENCES public.atelier_allowancediscount(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_order_allowance_discount atelier_order_allowa_order_id_2896ddfb_fk_atelier_o; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order_allowance_discount ADD CONSTRAINT atelier_order_allowa_order_id_2896ddfb_fk_atelier_o FOREIGN KEY (order_id) REFERENCES public.atelier_order(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_order atelier_order_atelier_id_55a0542d_fk_atelier_atelier_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order ADD CONSTRAINT atelier_order_atelier_id_55a0542d_fk_atelier_atelier_id FOREIGN KEY (atelier_id) REFERENCES public.atelier_atelier(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_order atelier_order_client_id_a542c9de_fk_atelier_client_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order ADD CONSTRAINT atelier_order_client_id_a542c9de_fk_atelier_client_id FOREIGN KEY (client_id) REFERENCES public.atelier_client(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_order_complication_elements atelier_order_compli_complicationelement__aad15a47_fk_atelier_c; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order_complication_elements ADD CONSTRAINT atelier_order_compli_complicationelement__aad15a47_fk_atelier_c FOREIGN KEY (complicationelement_id) REFERENCES public.atelier_complicationelement(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_order_complication_elements atelier_order_compli_order_id_12d03d55_fk_atelier_o; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order_complication_elements ADD CONSTRAINT atelier_order_compli_order_id_12d03d55_fk_atelier_o FOREIGN KEY (order_id) REFERENCES public.atelier_order(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_order atelier_order_created_by_id_a4e6c8e1_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order ADD CONSTRAINT atelier_order_created_by_id_a4e6c8e1_fk_auth_user_id FOREIGN KEY (created_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_order atelier_order_fabric_id_5a54eefd_fk_atelier_fabric_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order ADD CONSTRAINT atelier_order_fabric_id_5a54eefd_fk_atelier_fabric_id FOREIGN KEY (fabric_id) REFERENCES public.atelier_fabric(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_order atelier_order_last_updated_by_id_aeaad62d_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order ADD CONSTRAINT atelier_order_last_updated_by_id_aeaad62d_fk_auth_user_id FOREIGN KEY (last_updated_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_order atelier_order_performer_id_5b8e7188_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order ADD CONSTRAINT atelier_order_performer_id_5b8e7188_fk_auth_user_id FOREIGN KEY (performer_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_order atelier_order_product_id_73f0bb1c_fk_atelier_product_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_order ADD CONSTRAINT atelier_order_product_id_73f0bb1c_fk_atelier_product_id FOREIGN KEY (product_id) REFERENCES public.atelier_product(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_product atelier_product_atelier_id_ea0d13e6_fk_atelier_atelier_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_product ADD CONSTRAINT atelier_product_atelier_id_ea0d13e6_fk_atelier_atelier_id FOREIGN KEY (atelier_id) REFERENCES public.atelier_atelier(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_product atelier_product_created_by_id_7765ca1f_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_product ADD CONSTRAINT atelier_product_created_by_id_7765ca1f_fk_auth_user_id FOREIGN KEY (created_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_product atelier_product_last_updated_by_id_3f95827b_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_product ADD CONSTRAINT atelier_product_last_updated_by_id_3f95827b_fk_auth_user_id FOREIGN KEY (last_updated_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_product atelier_product_minimal_style_id_d81fef44_fk_atelier_m; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_product ADD CONSTRAINT atelier_product_minimal_style_id_d81fef44_fk_atelier_m FOREIGN KEY (minimal_style_id) REFERENCES public.atelier_minimalstyle(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_profile atelier_profile_atelier_id_f8e307f6_fk_atelier_atelier_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_profile ADD CONSTRAINT atelier_profile_atelier_id_f8e307f6_fk_atelier_atelier_id FOREIGN KEY (atelier_id) REFERENCES public.atelier_atelier(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_profile atelier_profile_created_by_id_935cbd99_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_profile ADD CONSTRAINT atelier_profile_created_by_id_935cbd99_fk_auth_user_id FOREIGN KEY (created_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_profile atelier_profile_last_updated_by_id_4665eb0a_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_profile ADD CONSTRAINT atelier_profile_last_updated_by_id_4665eb0a_fk_auth_user_id FOREIGN KEY (last_updated_by_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: atelier_profile atelier_profile_user_id_ccb9712a_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.atelier_profile ADD CONSTRAINT atelier_profile_user_id_ccb9712a_fk_auth_user_id FOREIGN KEY (user_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: auth_group_permissions auth_group_permissio_permission_id_84c5c92e_fk_auth_perm; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_group_permissions ADD CONSTRAINT auth_group_permissio_permission_id_84c5c92e_fk_auth_perm FOREIGN KEY (permission_id) REFERENCES public.auth_permission(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: auth_group_permissions auth_group_permissions_group_id_b120cbf9_fk_auth_group_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_group_permissions ADD CONSTRAINT auth_group_permissions_group_id_b120cbf9_fk_auth_group_id FOREIGN KEY (group_id) REFERENCES public.auth_group(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: auth_permission auth_permission_content_type_id_2f476e4b_fk_django_co; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_permission ADD CONSTRAINT auth_permission_content_type_id_2f476e4b_fk_django_co FOREIGN KEY (content_type_id) REFERENCES public.django_content_type(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: auth_user_groups auth_user_groups_group_id_97559544_fk_auth_group_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_user_groups ADD CONSTRAINT auth_user_groups_group_id_97559544_fk_auth_group_id FOREIGN KEY (group_id) REFERENCES public.auth_group(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: auth_user_groups auth_user_groups_user_id_6a12ed8b_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_user_groups ADD CONSTRAINT auth_user_groups_user_id_6a12ed8b_fk_auth_user_id FOREIGN KEY (user_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: auth_user_user_permissions auth_user_user_permi_permission_id_1fbb5f2c_fk_auth_perm; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_user_user_permissions ADD CONSTRAINT auth_user_user_permi_permission_id_1fbb5f2c_fk_auth_perm FOREIGN KEY (permission_id) REFERENCES public.auth_permission(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: auth_user_user_permissions auth_user_user_permissions_user_id_a95ead1b_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.auth_user_user_permissions ADD CONSTRAINT auth_user_user_permissions_user_id_a95ead1b_fk_auth_user_id FOREIGN KEY (user_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: django_admin_log django_admin_log_content_type_id_c4bce8eb_fk_django_co; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.django_admin_log ADD CONSTRAINT django_admin_log_content_type_id_c4bce8eb_fk_django_co FOREIGN KEY (content_type_id) REFERENCES public.django_content_type(id) DEFERRABLE INITIALLY DEFERRED; -- -- Name: django_admin_log django_admin_log_user_id_c564eba6_fk_auth_user_id; Type: FK CONSTRAINT; Schema: public; Owner: dbdev -- ALTER TABLE ONLY public.django_admin_log ADD CONSTRAINT django_admin_log_user_id_c564eba6_fk_auth_user_id FOREIGN KEY (user_id) REFERENCES public.auth_user(id) DEFERRABLE INITIALLY DEFERRED; -- -- PostgreSQL database dump complete -- <file_sep>/atelier/views/complication_element_view.py from atelier.models import ComplicationElement from atelier.forms import ComplicationElementForm from django.urls import reverse_lazy from atelier.views.base_view import BaseDetailView, BaseListView, \ SuperuserPermissionPreMixin, BaseCreateView, BaseUpdateView, BaseDeleteView class ComplicationElementDetailView(BaseDetailView): model = ComplicationElement fields = '__all__' template_name = 'atelier/complication_element_detail.html' class ComplicationElementListView(BaseListView): model = ComplicationElement template_name = 'atelier/complication_element_list.html' class ComplicationElementCreateView(SuperuserPermissionPreMixin, BaseCreateView): model = ComplicationElement form_class = ComplicationElementForm template_name = 'atelier/create_form.html' class ComplicationElementUpdateView(SuperuserPermissionPreMixin, BaseUpdateView): model = ComplicationElement form_class = ComplicationElementForm template_name = 'atelier/create_form.html' class ComplicationElementDeleteView(SuperuserPermissionPreMixin, BaseDeleteView): model = ComplicationElement success_url = reverse_lazy('atelier:complication_element_list') template_name = 'atelier/delete_form.html' <file_sep>/atelier/models/product.py from django.db import models from django.urls import reverse from django.utils.translation import gettext_lazy as _ from atelier.models import AbstractBaseModel from atelier.models.atelier import Atelier class Product(AbstractBaseModel): name = models.CharField( max_length=264, verbose_name=_('name') ) minimal_style = models.ForeignKey( 'atelier.MinimalStyle', on_delete=models.CASCADE, verbose_name=_('minimal style') ) base_price = models.DecimalField( max_digits=10, decimal_places=2, verbose_name=_('base price') ) atelier = models.ForeignKey( Atelier, on_delete=models.CASCADE, verbose_name=_('Atelier'), ) def __str__(self): return self.name class Meta: ordering = ['name'] def get_absolute_url(self): return reverse('atelier:product_detail', args=[str(self.id)]) <file_sep>/atelier/forms/atelier_form.py from django import forms from atelier.models import Atelier class AtelierForm(forms.ModelForm): class Meta: model = Atelier fields = ['name'] <file_sep>/vokss/settingsproxy.py import json import os """ A simple Django settings module proxy that lets us configure Django using the DJANGOENV environment variable. Example (running tests):: $ DJANGOENV=test python manage.py test Defaults to the ``develop`` environment, so developers can use ``python manage.py`` without anything extra during development. """ DJANGOENV = os.environ.get('DJANGOENV', 'develop') # def _load_develop_environment_from_file(): # if os.path.exists('develop-environment.json'): # environment_dict = json.loads(open('develop-environment.json', 'r').read()) # for key, value in environment_dict.items(): # if key.upper() == key: # os.environ[key] = value if DJANGOENV == 'develop': # Used for local development # _load_develop_environment_from_file() from vokss.develop.develop_settings import * elif DJANGOENV == 'test': # Used when running the Django tests locally from vokss.develop.test_settings import * elif DJANGOENV == 'production': # Used on production server from vokss.production.production_settings import * elif DJANGOENV == 'staging': # Used on staging server from vokss.production.staging_settings import * else: raise ValueError('Invalid value for the DJANGOENV environment variable: {}'.format(DJANGOENV)) <file_sep>/setup.py import json import os from setuptools import setup, find_packages setup( name='vokss', description='The vokss django project.', version=1, author='Vitamal', packages=find_packages(exclude=['manage']), install_requires=[], include_package_data=True, zip_safe=False, ) <file_sep>/atelier/models/allowance_discount.py from django.db import models from django.urls import reverse from django.utils.translation import gettext_lazy as _ from atelier.models.abstract_base import AbstractBaseModel class AllowanceDiscount(AbstractBaseModel): name = models.CharField( max_length=255, verbose_name=_('name') ) coefficient = models.DecimalField( max_digits=5, decimal_places=2, verbose_name=_('coefficient') ) label = models.CharField( max_length=255, verbose_name=_('group') ) def __str__(self): """ to display an object in the Django admin site and as the value inserted into a template when it displays an object """ return self.name class Meta: ordering = ['name'] def get_absolute_url(self): """ Returns the url to access a particular client instance. """ return reverse('atelier:allowance_discount_detail', args=[str(self.id)]) <file_sep>/atelier/templates/atelier/order_detail.html {% extends 'atelier/template.html' %} {% load i18n %} {% load atelier_tags %} {% load atelier_tags %} {# give class name for testing (test_css_class) #} {% block content %} <div class="container"> <div class="row"> <div class="col-sm-12"> <h3>{% trans 'Order' %}:</h3> {% if user.profile.is_tailor %} <button class="btn btn-sm btn-outline-secondary" type="button"> <a href="{% url 'atelier:order_form' %}">{% trans 'Create Order' %}</a> </button> {% endif %} <br/><br/> <div class="table-responsive"> <table class="table"> <tbody> <tr> <td>{% trans 'Client' %}:</td> <td class="{% test_css_class 'client' %}">{{ order.client }} </td> </tr> <tr> <td>{% trans 'Product' %}:</td> <td class="{% test_css_class 'product' %}">{{ order.product }}</td> </tr> <tr> <td>{% trans 'Fabric' %}:</td> <td class="{% test_css_class 'fabric' %}">{{ order.fabric }}</td> </tr> <tr> <td>{% trans 'Group of fabric' %}:</td> <td class="{% test_css_class 'fabric_group' %}">{{ order.fabric.group }}</td> </tr> <tr> <td>{% trans 'Processing category' %}:</td> <td class="{% test_css_class 'category' %}">{{ order.processing_category }}</td> </tr> <tr> <td>{% trans 'Complication Elements' %}:</td> <td> <ul> {% for complication_element in order.complication_elements.all %} <li class="{% test_css_class 'elements' %}"> {{ complication_element }}</li> {% endfor %} </ul> </td> <tr> <td>{% trans 'Allowance/Discount' %}:</td> <td> <ul> {% for allowance_discount in order.allowance_discount.all %} <li class="{% test_css_class 'discount' %}">{{ allowance_discount }}</li> {% endfor %} </ul> </td> </tr> <tr> <td>{% trans 'Order Date' %}:</td> <td class="{% test_css_class 'date' %}">{{ order.order_date|date:"l d F Y" }}</td> {# Formats a date according to the given format. #} </tr> <tr> <td>{% trans 'Order Price' %}:</td> <td>{{ order.order_price }} ₴</td> {# from model @property #} </tr> {# <tr>#} {# <td>{% trans 'Order Price' %}:</td>#} {# <td>{{ order_price_view }} ₴ </td> {# from view #} {# </tr>#} {# <tr>#} {# {% order_price_tag order as tag_order_price %}#} {# <td>{% trans 'Order Price' %}:</td>#} {# <td>{{ tag_order_price }} ₴ </td> {# from model temlatetag #} {# </tr>#} <tr> <td>{% trans 'Performer' %}:</td> <td class="{% test_css_class 'performer' %}">{{ order.performer }}</td> </tr> <tr> <td>{% trans 'deadline' %}:</td> <td class="{% test_css_class 'deadline' %}">{{ order.deadline }}</td> </tr> <tr> <td>{% trans 'Closed' %}:</td> <td class="{% test_css_class 'closed' %}">{{ order.is_closed|yesno:_("Yes,No") }}</td> </tr> </tbody> </table> </div> {% if user.profile.is_tailor %} <button class="btn btn-sm btn-outline-secondary" type="button"> <a href="{% url 'atelier:order_update_form' pk=order.pk %}">{% trans 'Edit' %}</a> </button> <button class="btn btn-sm btn-outline-secondary" type="button"> <a href="{% url 'atelier:order_delete_form' pk=order.pk %}">{% trans 'Delete' %}</a> </button> {% endif %} <button class="btn btn-sm btn-outline-secondary" type="button"> <a href="{% url 'atelier:order_list' %}">{% trans 'Back to list' %}</a> </button> </div> </div> </div> {% endblock %} <file_sep>/atelier/templatetags/atelier_tags.py from django import template from atelier.app_utils import order_price_calculation import os register = template.Library() @register.simple_tag def order_price_tag(order): complication_elements_base_price_list = [] complication_elements_complexity_list = [] allowance_discount_coefficient_list = [] for i in order.complication_elements.all(): complication_elements_base_price_list.append(i.base_price) complication_elements_complexity_list.append(i.complexity) for k in order.allowance_discount.all(): allowance_discount_coefficient_list.append(k.coefficient) return order_price_calculation(order.fabric.complexity_factor, order.product.base_price, complication_elements_base_price_list, complication_elements_complexity_list, order.processing_category, allowance_discount_coefficient_list) @register.simple_tag def test_css_class(string): if os.environ.get('DJANGOENV') == 'test': return string return None <file_sep>/atelier/models/abstract_base.py from django.contrib.auth import get_user_model from django.db import models class AbstractBaseModel(models.Model): """ Abstract model to provide basic fields common for all models. """ #: When was the instance created created_datetime = models.DateTimeField( auto_now_add=True, null=True, ) #: When was the instance last updated last_updated_datetime = models.DateTimeField( auto_now=True, null=True, ) #: Who created the instance created_by = models.ForeignKey( get_user_model(), on_delete=models.SET_NULL, null=True, blank=True, related_name='+' ) #: Who was the last user to modify/update the instance last_updated_by = models.ForeignKey( get_user_model(), on_delete=models.SET_NULL, null=True, blank=True, related_name='+' ) class Meta: abstract = True <file_sep>/vokss/production/production_settings.py from .production_and_staging_settings_common import * # noqa SECURE_SSL_REDIRECT = True<file_sep>/atelier/forms/minimal_style_form.py from django import forms from atelier.models import MinimalStyle class MinimalStyleForm(forms.ModelForm): class Meta: model = MinimalStyle fields = ['name', 'group'] <file_sep>/atelier/tests/test_app_utils.py from django import test from atelier.app_utils import order_price_calculation class TestAppUtils(test.TestCase): def test_util_sanity(self): fabric_complexity_factor = 12345.12 product_base_price = 1234567890.12 complication_element_base_price = [12345.12, 1.02, 1, 0.55] complication_element_complexity = [123.12, 1, 2, 3] order_processing_category = 1 allowance_discount_coefficients = [12345.12, 1.01, 99.99] result = order_price_calculation(fabric_complexity_factor, product_base_price, complication_element_base_price, complication_element_complexity, order_processing_category, allowance_discount_coefficients) self.assertEqual(result, '227926471619334944.00') def test_util_sanity_simple(self): fabric_complexity_factor = 1 product_base_price = 750 complication_element_base_price = [30, 30, 30, 30, 30, 30, 30, 30, 30] complication_element_complexity = [1, 1, 1, 1, 1, 1, 1, 1, 1] order_processing_category = 1 allowance_discount_coefficients = [] result = order_price_calculation(fabric_complexity_factor, product_base_price, complication_element_base_price, complication_element_complexity, order_processing_category, allowance_discount_coefficients) self.assertEqual(result, '1224.00') <file_sep>/atelier/views/index.py from django.shortcuts import get_object_or_404, render from atelier.models import Profile, Client, Product, Fabric, Order, ComplicationElement, MinimalStyle, \ AllowanceDiscount, Atelier from django.contrib.auth.decorators import login_required, user_passes_test @login_required def index(request): """ Home page view """ # Number of visits to this view, as counted in the session variable. num_visits = request.session.get('num_visits', 0) request.session['num_visits'] = num_visits + 1 if request.user.is_superuser: ''' Home page view for superuser ''' num_ateliers = Atelier.objects.all().count() num_profiles = Profile.objects.all().count() num_fabrics = Fabric.objects.all().count() num_products = Product.objects.all().count() num_clients = Client.objects.all().count() num_orders = Order.objects.all().count() num_complication_element = ComplicationElement.objects.all().count() num_minimal_style = MinimalStyle.objects.all().count() num_allowance_discount = AllowanceDiscount.objects.all().count() return render(request, 'atelier/index.html', context={ 'num_ateliers': num_ateliers, 'num_profiles': num_profiles, 'num_fabrics': num_fabrics, 'num_products': num_products, 'num_clients': num_clients, 'num_orders': num_orders, 'num_allowance_discount': num_allowance_discount, 'num_complication_element': num_complication_element, 'num_minimal_style': num_minimal_style, 'num_visits': num_visits, }) else: ''' Home page view for all users ''' atelier = request.user.profile.atelier num_products = Product.objects.filter(atelier=atelier).count() num_clients = Client.objects.filter(atelier=atelier).count() num_orders = Order.objects.filter(atelier=atelier).count() num_profiles_in_atelier = Profile.objects.filter(atelier=atelier).count() return render(request, 'atelier/index.html', context={ 'atelier': atelier, 'num_products': num_products, 'num_clients': num_clients, 'num_orders': num_orders, 'num_visits': num_visits, 'num_profiles_in_atelier': num_profiles_in_atelier, }) <file_sep>/atelier/views/atelier_view.py from atelier.models import Atelier, Profile from atelier.forms import AtelierForm from django.urls import reverse_lazy from atelier.views.base_view import BaseDetailView, BaseListView, \ SuperuserPermissionPreMixin, BaseCreateView, BaseUpdateView, BaseDeleteView class AtelierDetailView(SuperuserPermissionPreMixin, BaseDetailView): model = Atelier fields = '__all__' class AtelierListView(SuperuserPermissionPreMixin, BaseListView): model = Atelier # def get_context_data(self, *, object_list=None, **kwargs): # num_tailors = Profile.objects.filter(is_tailor=True).count() # num_simple_users = Profile.objects.filter(is_tailor=False).count() # context = { # 'num_tailors': num_tailors, # 'num_simple_users': num_simple_users, # } # return super().get_context_data(**context) class AtelierCreateView(SuperuserPermissionPreMixin, BaseCreateView): model = Atelier form_class = AtelierForm template_name = 'atelier/create_form.html' class AtelierUpdateView(SuperuserPermissionPreMixin, BaseUpdateView): model = Atelier form_class = AtelierForm template_name = 'atelier/create_form.html' class AtelierDeleteView(SuperuserPermissionPreMixin, BaseDeleteView): model = Atelier success_url = reverse_lazy('atelier:atelier_list') template_name = 'atelier/delete_form.html' <file_sep>/atelier/models/profile.py from django.db import models from django.urls import reverse from django.utils.translation import gettext_lazy as _ from atelier.models import Atelier from atelier.models.abstract_base import AbstractBaseModel from django.db.models.signals import post_save from django.dispatch import receiver from django.contrib.auth.models import User class Profile(AbstractBaseModel): user = models.OneToOneField( User, on_delete=models.CASCADE, verbose_name=_('user') ) atelier = models.ForeignKey( Atelier, on_delete=models.CASCADE, verbose_name=_('atelier') ) is_tailor = models.BooleanField( default=False, blank=True, help_text=_("User can be a tailor to have administrator access within his atelier"), verbose_name=_('tailor') ) def __str__(self): """ to display an object in the Django admin site and as the value inserted into a template when it displays an object """ return self.user.username class Meta: ordering = ['user'] def get_absolute_url(self): """ Returns the url to access a particular client instance. """ return reverse('atelier:profile_detail', args=[str(self.id)])
55f37c8afc9c8e496a9d7ca9762f0d916b3efaa5
[ "SQL", "Python", "HTML", "reStructuredText" ]
63
Python
Vitamal/vokss
1323ecae9260cf000e23ff7eb64ee6a62fd57885
14793cee5b0c4c6ed1dc047f36c1bff2174234ea
refs/heads/master
<file_sep># Kinect2 API This folder contains all the C++ code for generating the language agnostic DLL for interfacing with the Kinect2 Sensor. ## Dev #### Building 1. Install the [Kinect2 SDK](https://www.microsoft.com/en-us/search?q=kinect+2+sdk) 2. Set the environment var `KINECTSDK20_DIR` to its install location 3. Open the project (`Kinect2-API.sln`) with [Visual Studio](https://visualstudio.microsoft.com/) 4. Use default Release/x64 settings to build `Kinect2-API.dll` #### Methods Outline See code for more info on exported functions. ```c++ // Sensor Flags #define F_SENSOR_COLOR 0x00000001 #define F_SENSOR_DEPTH 0x00000010 #define F_SENSOR_IR 0x00000100 #define F_SENSOR_BODY 0x00001000 #define F_SENSOR_MULTI 0x00001111 #define F_SENSOR_AUDIO 0x00010000 #define F_MAP_COLOR_CAM 0x00000002 #define F_MAP_DEPTH_CAM 0x00000020 #define F_MAP_DEPTH_COLOR 0x00000200 #define F_MAP_COLOR_DEPTH 0x00002000 ``` ```c++ // Other Constants #define COLOR_WIDTH 1920 #define COLOR_HEIGHT 1080 #define COLOR_CHANNELS 4 #define DEPTH_WIDTH 512 #define DEPTH_HEIGHT 424 #define IR_WIDTH 512 #define IR_HEIGHT 424 #define MAX_SUBFRAMES 8 #define AUDIO_BUF_LEN 512 #define SUBFRAME_SIZE 256 #define MAX_BODIES 6 #define BODY_PROPS 15 #define MAX_JOINTS 25 #define JOINT_PROPS 9 #define FLOAT_MULT 100000 ``` ```c++ // Start collecting data from the Kinect2 (flags determine what kind of data) bool init_kinect(int sensor_flags, int mapping_flags); // Stop collecting data and clean up. void close_kinect(); ``` ```c++ // Pause the thread responsable for fetching frames void pause_worker(); // Resume that thread void resume_worker(); // Get the current frame count int get_tick(); ``` ```c++ // Methods for collecting multidimensional data from the sensor: // Each of these will overwrite the provided array with data // in the shape given. // These will have undefined behavior when called if used w/o // appropriate flags on init. // (COLOR_HEIGHT, COLOR_WIDTH, COLOR_CHANNELS) bool get_color_data(UINT8* array); // (IR_HEIGHT, IR_WIDTH, 1) bool get_ir_data(UINT16* array); // (DEPTH_HEIGHT, DEPTH_WIDTH, 1) bool get_depth_data(UINT16* array); // (MAX_BODIES, BODY_PROPS) (MAX_BODIES, MAX_JOINTS, JOINT_PROPS) // See code for how this is encoded. bool get_body_data(UINT8* body_array, INT32* joint_array); // (AUDIO_BUF_LEN * SUBFRAME_SIZE) (AUDIO_BUF_LEN, 2) // See code for how this is encoded. int get_audio_data(FLOAT* array, FLOAT* meta_array); // (COLOR_HEIGHT, COLOR_WIDTH, 3) bool get_map_color_to_camera(FLOAT* array); // (DEPTH_HEIGHT, DEPTH_WIDTH, 3) bool get_map_depth_to_camera(FLOAT* array); // (DEPTH_HEIGHT, DEPTH_WIDTH, 2) bool get_map_depth_to_color(FLOAT* array); // (COLOR_HEIGHT, COLOR_WIDTH, 2) bool get_map_color_depth(FLOAT* array); ```<file_sep>#pragma once #include "stdafx.h"<file_sep>#include "stdafx.h" #include "Kinect2-API.h" #include <Kinect.h> #include <cstring> #include <iostream> #include <mutex> #define WORKER_TIMEOUT 5000 // Flag Constants #define F_SENSOR_COLOR 0x00000001 #define F_SENSOR_DEPTH 0x00000010 #define F_SENSOR_IR 0x00000100 #define F_SENSOR_BODY 0x00001000 #define F_SENSOR_MULTI 0x00001111 #define F_SENSOR_AUDIO 0x00010000 #define F_MAP_COLOR_CAM 0x00000002 #define F_MAP_DEPTH_CAM 0x00000020 #define F_MAP_DEPTH_COLOR 0x00000200 #define F_MAP_COLOR_DEPTH 0x00002000 // Misc Constants #define COLOR_WIDTH 1920 #define COLOR_HEIGHT 1080 #define COLOR_CHANNELS 4 #define DEPTH_WIDTH 512 #define DEPTH_HEIGHT 424 #define IR_WIDTH 512 #define IR_HEIGHT 424 #define MAX_SUBFRAMES 8 #define AUDIO_BUF_LEN 512 #define SUBFRAME_SIZE 256 #define MAX_BODIES 6 #define BODY_PROPS 15 #define MAX_JOINTS 25 #define JOINT_PROPS 9 #define FLOAT_MULT 100000 int sensors = 0; int mappings = 0; IKinectSensor* sensor; std::mutex worker_lock; int multi_tick = 0; IMultiSourceFrameReader* multi_reader; WAITABLE_HANDLE multi_frame_event; HANDLE multi_terminate = NULL; HANDLE multi_worker_thread; UINT8 buffer_color[COLOR_WIDTH * COLOR_HEIGHT * COLOR_CHANNELS]; UINT16 buffer_depth[DEPTH_WIDTH * DEPTH_HEIGHT]; UINT16 buffer_ir[IR_WIDTH * IR_HEIGHT]; UINT8 buffer_bodies[MAX_BODIES * BODY_PROPS]; INT32 buffer_joints[MAX_BODIES * MAX_JOINTS * JOINT_PROPS]; ICoordinateMapper* coord_mapper; std::mutex buffer_color_lock; std::mutex buffer_depth_lock; std::mutex buffer_ir_lock; std::mutex buffer_body_lock; CameraSpacePoint map_color_camera[COLOR_WIDTH * COLOR_HEIGHT]; CameraSpacePoint map_depth_camera[DEPTH_WIDTH * DEPTH_HEIGHT]; ColorSpacePoint map_depth_color[DEPTH_WIDTH * DEPTH_HEIGHT]; DepthSpacePoint map_color_depth[COLOR_WIDTH * COLOR_HEIGHT]; std::mutex buffer_map_lock; IAudioBeamFrameReader* audio_reader; WAITABLE_HANDLE audio_frame_event; HANDLE audio_terminate = NULL; HANDLE audio_worker_thread; FLOAT buffer_audio[AUDIO_BUF_LEN * SUBFRAME_SIZE]; FLOAT buffer_audio_meta[AUDIO_BUF_LEN * 2]; UINT32 buffer_audio_used = 0; std::mutex buffer_audio_lock; // Init the connection and workers // sensor_flags contains a combination of F_SENSOR_* constants // mapping_flags contains a combination of F_MAP_* constants EXPORTFUNC bool init_kinect(int sensor_flags, int mapping_flags) { if (!sensor_flags || FAILED(GetDefaultKinectSensor(&sensor))) { return false; } sensors = sensor_flags; mappings = mapping_flags; if (sensor) { sensor->Open(); int source_types = 0; if (sensors & F_SENSOR_COLOR) { source_types |= FrameSourceTypes::FrameSourceTypes_Color; } if (sensors & F_SENSOR_DEPTH) { source_types |= FrameSourceTypes::FrameSourceTypes_Depth; } if (sensors & F_SENSOR_IR) { source_types |= FrameSourceTypes::FrameSourceTypes_Infrared; } if (sensors & F_SENSOR_BODY) { source_types |= FrameSourceTypes::FrameSourceTypes_Body; } if (sensors & F_SENSOR_MULTI) { sensor->OpenMultiSourceFrameReader(source_types, &multi_reader); sensor->get_CoordinateMapper(&coord_mapper); multi_reader->SubscribeMultiSourceFrameArrived(&multi_frame_event); multi_terminate = CreateEvent(NULL, FALSE, FALSE, NULL); CreateThread(NULL, 0, &multi_worker_wrapper, NULL, 0, NULL); } else { mappings = 0; } if (sensors & F_SENSOR_AUDIO) { IAudioSource* audio_source; sensor->get_AudioSource(&audio_source); audio_source->OpenReader(&audio_reader); audio_reader->SubscribeFrameArrived(&audio_frame_event); audio_terminate = CreateEvent(NULL, FALSE, FALSE, NULL); CreateThread(NULL, 0, &audio_worker_wrapper, NULL, 0, NULL); SAFE_RELEASE(audio_source); } return true; } return false; } // Close and clean all data related to the Kinect. EXPORTFUNC void close_kinect() { if (sensors & F_SENSOR_MULTI) { SetEvent(multi_terminate); WaitForSingleObject(multi_worker_thread, INFINITE); CloseHandle(multi_worker_thread); CloseHandle(multi_terminate); multi_reader->UnsubscribeMultiSourceFrameArrived(multi_frame_event); SAFE_RELEASE(multi_reader); } if (sensors & F_SENSOR_AUDIO) { SetEvent(audio_terminate); WaitForSingleObject(audio_worker_thread, INFINITE); CloseHandle(audio_worker_thread); CloseHandle(audio_terminate); audio_reader->UnsubscribeFrameArrived(audio_frame_event); SAFE_RELEASE(audio_reader); } sensor->Close(); SAFE_RELEASE(sensor); } // Get the current frame # captured EXPORTFUNC int get_tick() { return multi_tick; } // Pause the multi worker EXPORTFUNC void pause_worker() { worker_lock.lock(); } // Unpause the multi worker EXPORTFUNC void resume_worker() { worker_lock.unlock(); } DWORD WINAPI multi_worker_wrapper(_In_ LPVOID lp_param) { HRESULT hr = S_OK; hr = run_multi_worker(); return SUCCEEDED(hr) ? 0 : 1; } // The multi worker. // Collects depth/color/ir/body/mapping data from the sensor // on a loop. HRESULT run_multi_worker() { bool running = true; HANDLE handles[] = { (HANDLE)multi_frame_event, multi_terminate }; IMultiSourceFrameArrivedEventArgs* multi_event_args = NULL; IMultiSourceFrameReference* multi_ref = NULL; IMultiSourceFrame* multi_frame = NULL; IColorFrame* frame_color = NULL; IColorFrameReference* frameref_color = NULL; IDepthFrame* frame_depth = NULL; IDepthFrameReference* frameref_depth = NULL; IInfraredFrame* frame_ir = NULL; IInfraredFrameReference* frameref_ir = NULL; IBodyFrame* frame_body = NULL; IBodyFrameReference* frameref_body = NULL; IBody* bodies[BODY_COUNT] = { 0 }; Joint joints[MAX_JOINTS]; JointOrientation joint_orients[MAX_JOINTS]; while (running) { DWORD result = WaitForMultipleObjects(_countof(handles), handles, FALSE, WORKER_TIMEOUT); if (result == WAIT_OBJECT_0) { // std::cout << "[worker_start]\n"; multi_reader->GetMultiSourceFrameArrivedEventData(multi_frame_event, &multi_event_args); multi_event_args->get_FrameReference(&multi_ref); multi_ref->AcquireFrame(&multi_frame); worker_lock.lock(); if (sensors & F_SENSOR_COLOR) { // std::cout << "[color_start]\n"; multi_frame->get_ColorFrameReference(&frameref_color); if (SUCCEEDED(frameref_color->AcquireFrame(&frame_color))) { buffer_color_lock.lock(); frame_color->CopyConvertedFrameDataToArray(COLOR_WIDTH * COLOR_HEIGHT * COLOR_CHANNELS, buffer_color, ColorImageFormat_Rgba); buffer_color_lock.unlock(); } } if (sensors & F_SENSOR_DEPTH) { // std::cout << "[depth_start]\n"; multi_frame->get_DepthFrameReference(&frameref_depth); if (SUCCEEDED(frameref_depth->AcquireFrame(&frame_depth))) { buffer_depth_lock.lock(); frame_depth->CopyFrameDataToArray(DEPTH_WIDTH * DEPTH_HEIGHT, buffer_depth); buffer_depth_lock.unlock(); } } if (sensors & F_SENSOR_IR) { // std::cout << "[ir_start]\n"; multi_frame->get_InfraredFrameReference(&frameref_ir); if (SUCCEEDED(frameref_ir->AcquireFrame(&frame_ir))) { buffer_ir_lock.lock(); frame_ir->CopyFrameDataToArray(IR_WIDTH * IR_HEIGHT, buffer_ir); buffer_ir_lock.unlock(); } } if (sensors & F_SENSOR_BODY) { // std::cout << "[body_start]\n"; multi_frame->get_BodyFrameReference(&frameref_body); if (SUCCEEDED(frameref_body->AcquireFrame(&frame_body))) { frame_body->GetAndRefreshBodyData(_countof(bodies), bodies); buffer_body_lock.lock(); for (int b_idx = 0; b_idx < BODY_COUNT; b_idx++) { process_body(bodies[b_idx], b_idx, joints, joint_orients); } buffer_body_lock.unlock(); } } // std::cout << "[worker_end]\n"; // Mappings all require depth sensor and that this isnt the first frame. if (multi_tick > 1 && sensors & F_SENSOR_DEPTH) { buffer_map_lock.lock(); if (mappings & F_MAP_COLOR_CAM && sensors & F_SENSOR_COLOR) { coord_mapper->MapColorFrameToCameraSpace(DEPTH_WIDTH * DEPTH_HEIGHT, buffer_depth, COLOR_WIDTH * COLOR_HEIGHT, map_color_camera); } if (mappings & F_MAP_DEPTH_CAM) { coord_mapper->MapDepthFrameToCameraSpace(DEPTH_WIDTH * DEPTH_HEIGHT, buffer_depth, DEPTH_WIDTH * DEPTH_HEIGHT, map_depth_camera); } if (mappings & F_MAP_DEPTH_COLOR && sensors & F_SENSOR_COLOR) { coord_mapper->MapDepthFrameToColorSpace(DEPTH_WIDTH * DEPTH_HEIGHT, buffer_depth, DEPTH_WIDTH * DEPTH_HEIGHT, map_depth_color); } if (mappings & F_MAP_COLOR_DEPTH && sensors & F_SENSOR_COLOR) { coord_mapper->MapColorFrameToDepthSpace(DEPTH_WIDTH * DEPTH_HEIGHT, buffer_depth, COLOR_WIDTH * COLOR_HEIGHT, map_color_depth); } buffer_map_lock.unlock(); } worker_lock.unlock(); multi_tick += 1; SAFE_RELEASE(frameref_color); SAFE_RELEASE(frame_color); SAFE_RELEASE(frameref_depth); SAFE_RELEASE(frame_depth); SAFE_RELEASE(frameref_ir); SAFE_RELEASE(frame_ir); SAFE_RELEASE(frameref_body); SAFE_RELEASE(frame_body); SAFE_RELEASE(multi_event_args); SAFE_RELEASE(multi_ref); } else { running = false; } } return S_OK; } // Read/store all the info related to each tracked body. inline void process_body(IBody* body, int body_idx, Joint* joints, JointOrientation* joint_orients) { BOOLEAN tracked; int body_offset = body_idx * BODY_PROPS; int joint_offset; Joint joint; Vector4 joint_orient; CameraSpacePoint joint_pos; ColorSpacePoint color_pos; DepthSpacePoint depth_pos; body->get_IsTracked(&tracked); buffer_bodies[body_offset] = !!tracked; if (tracked) { // Store results directly into body buffer body->get_Engaged((DetectionResult*)&buffer_bodies[body_offset + 1]); body->get_IsRestricted((BOOLEAN*)&buffer_bodies[body_offset + 2]); body->get_HandLeftConfidence((TrackingConfidence*)&buffer_bodies[body_offset + 3]); body->get_HandLeftState((HandState*)&buffer_bodies[body_offset + 4]); body->get_HandRightConfidence((TrackingConfidence*)&buffer_bodies[body_offset + 5]); body->get_HandRightState((HandState*)&buffer_bodies[body_offset + 6]); body->GetExpressionDetectionResults(2, (DetectionResult*)&buffer_bodies[body_offset + 7]); body->GetActivityDetectionResults(5, (DetectionResult*)&buffer_bodies[body_offset + 7 + 2]); body->GetAppearanceDetectionResults(1, (DetectionResult*)&buffer_bodies[body_offset + 7 + 2 + 5]); body->GetJoints(MAX_JOINTS, joints); body->GetJointOrientations(MAX_JOINTS, joint_orients); for (int j_idx = 0; j_idx < MAX_JOINTS; j_idx++) { joint_offset = body_idx * MAX_JOINTS * JOINT_PROPS + j_idx * JOINT_PROPS; joint = joints[j_idx]; joint_orient = joint_orients[j_idx].Orientation; joint_pos = joint.Position; coord_mapper->MapCameraPointToColorSpace(joint_pos, &color_pos); coord_mapper->MapCameraPointToDepthSpace(joint_pos, &depth_pos); buffer_joints[joint_offset] = joint.TrackingState; buffer_joints[joint_offset + 1] = (int)color_pos.X; buffer_joints[joint_offset + 2] = (int)color_pos.Y; buffer_joints[joint_offset + 3] = (int)depth_pos.X; buffer_joints[joint_offset + 4] = (int)depth_pos.Y; buffer_joints[joint_offset + 5] = (int)(joint_orient.w * FLOAT_MULT); buffer_joints[joint_offset + 6] = (int)(joint_orient.x * FLOAT_MULT); buffer_joints[joint_offset + 7] = (int)(joint_orient.y * FLOAT_MULT); buffer_joints[joint_offset + 8] = (int)(joint_orient.z * FLOAT_MULT); } } } DWORD WINAPI audio_worker_wrapper(_In_ LPVOID lp_param) { HRESULT hr = S_OK; hr = run_audio_worker(); return SUCCEEDED(hr) ? 0 : 1; } // The audio worker. HRESULT run_audio_worker() { bool running = true; HANDLE handles[] = { (HANDLE)audio_frame_event, audio_terminate }; IAudioBeamFrameArrivedEventArgs* audio_frame_event_args = NULL; IAudioBeamFrameReference* audio_frame_ref = NULL; IAudioBeamFrameList* audio_frames = NULL; IAudioBeamFrame* audio_frame = NULL; IAudioBeamSubFrame* subframe = NULL; UINT32 subframe_count; while (running) { DWORD result = WaitForMultipleObjects(_countof(handles), handles, FALSE, WORKER_TIMEOUT); if (result == WAIT_OBJECT_0) { audio_reader->GetFrameArrivedEventData(audio_frame_event, &audio_frame_event_args); audio_frame_event_args->get_FrameReference(&audio_frame_ref); if (SUCCEEDED(audio_frame_ref->AcquireBeamFrames(&audio_frames))) { audio_frames->OpenAudioBeamFrame(0, &audio_frame); audio_frame->get_SubFrameCount(&subframe_count); buffer_audio_lock.lock(); // Reset buffer if this next audio frame will overload it. if (subframe_count + buffer_audio_used >= AUDIO_BUF_LEN) { buffer_audio_used = 0; } for (UINT32 i = 0; i < subframe_count; i++) { audio_frame->GetSubFrame(i, &subframe); process_audio_subframe(subframe, i); SAFE_RELEASE(subframe); } buffer_audio_used += subframe_count; buffer_audio_lock.unlock(); } SAFE_RELEASE(audio_frame_event_args); SAFE_RELEASE(audio_frame_ref); SAFE_RELEASE(audio_frames); SAFE_RELEASE(audio_frame); } else { running = false; } } return S_OK; } // Extract beam angle and sample data from subframe. inline void process_audio_subframe(IAudioBeamSubFrame* subframe, int index) { float* audio_buf = NULL; float beam_angle; float beam_conf; subframe->get_BeamAngle(&beam_angle); subframe->get_BeamAngleConfidence(&beam_conf); UINT buf_size; subframe->AccessUnderlyingBuffer(&buf_size, (BYTE **)&audio_buf); buffer_audio_meta[(index + buffer_audio_used) * 2] = beam_angle; buffer_audio_meta[(index + buffer_audio_used) * 2 + 1] = beam_conf; memcpy(buffer_audio + (index + buffer_audio_used) * SUBFRAME_SIZE, audio_buf, SUBFRAME_SIZE * sizeof(FLOAT)); } EXPORTFUNC bool get_color_data(UINT8* array) { buffer_color_lock.lock(); memcpy(array, buffer_color, COLOR_WIDTH * COLOR_HEIGHT * COLOR_CHANNELS * sizeof(UINT8)); buffer_color_lock.unlock(); return true; } EXPORTFUNC bool get_ir_data(UINT16* array) { buffer_ir_lock.lock(); memcpy(array, buffer_ir, IR_WIDTH * IR_HEIGHT * sizeof(UINT16)); buffer_ir_lock.unlock(); return true; } EXPORTFUNC bool get_depth_data(UINT16* array) { buffer_depth_lock.lock(); memcpy(array, buffer_depth, DEPTH_WIDTH * DEPTH_HEIGHT * sizeof(UINT16)); buffer_depth_lock.unlock(); return true; } EXPORTFUNC bool get_body_data(UINT8* body_array, INT32* joint_array) { buffer_body_lock.lock(); memcpy(body_array, buffer_bodies, MAX_BODIES * BODY_PROPS * sizeof(UINT8)); memcpy(joint_array, buffer_joints, MAX_BODIES * MAX_JOINTS * JOINT_PROPS * sizeof(INT32)); buffer_body_lock.unlock(); return true; } EXPORTFUNC int get_audio_data(FLOAT* array, FLOAT* meta_array) { if (buffer_audio_used == 0) { return 0; } buffer_audio_lock.lock(); int len = buffer_audio_used; memcpy(array, buffer_audio, AUDIO_BUF_LEN * SUBFRAME_SIZE * sizeof(FLOAT)); memcpy(meta_array, buffer_audio_meta, AUDIO_BUF_LEN * 2 * sizeof(FLOAT)); buffer_audio_used = 0; buffer_audio_lock.unlock(); return len; } EXPORTFUNC bool get_map_color_to_camera(FLOAT* array) { buffer_map_lock.lock(); memcpy(array, map_color_camera, COLOR_HEIGHT * COLOR_WIDTH * 3 * sizeof(FLOAT)); buffer_map_lock.unlock(); return true; } EXPORTFUNC bool get_map_depth_to_camera(FLOAT* array) { buffer_map_lock.lock(); memcpy(array, map_depth_camera, DEPTH_HEIGHT * DEPTH_WIDTH * 3 * sizeof(FLOAT)); buffer_map_lock.unlock(); return true; } EXPORTFUNC bool get_map_depth_to_color(FLOAT* array) { buffer_map_lock.lock(); memcpy(array, map_depth_color, DEPTH_HEIGHT * DEPTH_WIDTH * 2 * sizeof(FLOAT)); buffer_map_lock.unlock(); return true; } EXPORTFUNC bool get_map_color_depth(FLOAT* array) { buffer_map_lock.lock(); memcpy(array, map_color_depth, COLOR_HEIGHT * COLOR_WIDTH * 2 * sizeof(FLOAT)); buffer_map_lock.unlock(); return true; }<file_sep>""" Demonstrating basic usage with bodies. """ from libkinect2 import Kinect2 from libkinect2.utils import draw_skeleton, dist import numpy as np import cv2 # Init kinect w/all visual sensors kinect = Kinect2(use_sensors=['color', 'body']) kinect.connect() kinect.wait_for_worker() def draw_hand(color_img, hand, wrist): # Draw hand based on its state if hand.tracking == 'tracked' and wrist.tracking == 'tracked': size = dist(hand.color_pos, wrist.color_pos) if hand.state == 'closed': cv2.circle(color_img, hand.color_pos, size, (0, 0, 255), -1) else: cv2.circle(color_img, hand.color_pos, size, (0, 255, 0), 2) def draw_face(color_img, face): # Draw face with facial landmarks if face is not None: for i in range(68): x, y = face.points[i] cv2.circle(color_img, (x, y), 2, (255, 255, 255), -1) for _, color_img, bodies in kinect.iter_frames(): for body in bodies: face = body.get_face(color_img) draw_skeleton(color_img, body) draw_hand(color_img, body['hand_left'], body['wrist_left']) draw_hand(color_img, body['hand_right'], body['wrist_right']) draw_face(color_img, face) cv2.imshow('sensors', color_img) key = cv2.waitKey(1) & 0xFF if key == ord('q'): break kinect.disconnect()<file_sep>#!/usr/bin/env python import os try: from setuptools import setup, find_packages except: raise Exception('setuptools is required for installation') def join(*paths): return os.path.normpath(os.path.join(*paths)) VERSION_PATH = join(__file__, '..', 'libkinect2', 'version.py') def get_version(): with open(VERSION_PATH, 'r') as version: out = {} exec(version.read(), out) return out['__version__'] setup( name='libkinect2', version=get_version(), author='<NAME>', url='https://github.com/sshh12/LibKinect2', packages=find_packages(), package_data={ 'libkinect2': [ 'data/Kinect2-API.dll', 'data/shape_predictor_68_face_landmarks.dat' ] }, license='MIT' ) <file_sep>""" Helpful functions. """ from .audio import AudioFrame import numpy as np import cv2 ## Adapted from: ## https://github.com/Kinect/PyKinect2/blob/c86d575175edf5fc0834590acdfb82b55b5ccd96/examples/PyKinectBodyGame.py#L76 BODY_EDGES = [ # Torso ('head', 'neck'), ('neck', 'spine_shoulder'), ('spine_shoulder', 'spine_mid'), ('spine_mid', 'spine_base'), ('spine_shoulder', 'shoulder_right'), ('spine_shoulder', 'shoulder_left'), ('spine_base', 'hip_right'), ('spine_base', 'hip_left'), # Right Arm ('shoulder_right', 'elbow_right'), ('elbow_right', 'wrist_right'), ('wrist_right', 'hand_right'), ('hand_right', 'hand_right_tip'), ('wrist_right', 'thumb_right'), # Left Arm ('shoulder_left', 'elbow_left'), ('elbow_left', 'wrist_left'), ('wrist_left', 'hand_left'), ('hand_left', 'hand_left_tip'), ('wrist_left', 'thumb_left'), # Right Leg ('hip_right', 'knee_right'), ('knee_right', 'ankle_right'), ('ankle_right', 'foot_right'), # Left Leg ('hip_left', 'knee_left'), ('knee_left', 'ankle_left'), ('ankle_left', 'foot_left') ] def draw_skeleton(color_img, body, color=(0, 255, 0), allow_inferred=False): """ Draw skeleton onto `color_img` (an array of shape (height, width, colors)) using joints from `body`. """ for part_a, part_b in BODY_EDGES: joint_a = body[part_a] joint_b = body[part_b] if allow_inferred or (joint_a.tracking == 'tracked' and joint_b.tracking == 'tracked'): cv2.line(color_img, body[part_a].color_pos, body[part_b].color_pos, color, 2) def depth_map_to_image(depth_map): """ Convert `depth_map` to a multicolor image for visualization. """ h, w, _ = depth_map.shape img = np.empty((h, w, 3)) normalized_map = (depth_map[:, :, 0] / 8000.0) img[:, :, 0] = normalized_map * 180 img[:, :, 1] = 150 + normalized_map * 100 img[:, :, 2] = 150 + normalized_map * 100 return cv2.cvtColor(img.astype(np.uint8), cv2.COLOR_HSV2BGR) def ir_to_image(ir_image): """ Convert `ir_image` to a multicolor image for visualization. """ h, w, _ = ir_image.shape img = np.empty((h, w, 3)) normalized_img = (ir_image[:, :, 0] / 65535.0) img[:, :, 0] = normalized_img * 255 img[:, :, 1] = normalized_img * 255 img[:, :, 2] = normalized_img * 255 return img.astype(np.uint8) def dist(pos_a, pos_b): """ Distance between two points """ return int(((pos_a[0] - pos_b[0])**2 + (pos_a[1] - pos_b[1])**2)**0.5) def merge_audio_frames(audio_frames): """ Merge `AudioFrame`s into a single `AudioFrame` by combining / averaging data. """ data = np.concatenate([frame.data for frame in audio_frames]) beam_angle = np.mean([frame.beam_angle for frame in audio_frames]) beam_conf = np.mean([frame.beam_conf for frame in audio_frames]) return AudioFrame(beam_angle, beam_conf, data)<file_sep>""" The Kinect2 class """ from .dll_lib import * from .body import Body, Joint from .audio import AudioFrame import numpy as np import time import cv2 class Kinect2: """ The main Kinect2 class for interacting with the sensor. """ def __init__(self, use_sensors=['color'], use_mappings=[]): """ Create a Kinect obj to use the given sensors. Args: use_sensors: [color, depth, ir, body, audio] use_mappings: [ (color, camera), (depth, camera), (depth, color), (color, depth) ] Note: * At least one sensor must be provided. * Mappings are (from_type, to_type). """ self._kinect = init_lib() self.sensor_flags = 0 self.mapping_flags = 0 if 'color' in use_sensors: self.sensor_flags |= F_SENSOR_COLOR if 'depth' in use_sensors: self.sensor_flags |= F_SENSOR_DEPTH if 'ir' in use_sensors: self.sensor_flags |= F_SENSOR_IR if 'body' in use_sensors: self.sensor_flags |= F_SENSOR_BODY if 'audio' in use_sensors: self.sensor_flags |= F_SENSOR_AUDIO if ('color', 'camera') in use_mappings: self.mapping_flags |= F_MAP_COLOR_CAM if ('depth', 'camera') in use_mappings: self.mapping_flags |= F_MAP_DEPTH_CAM if ('depth', 'color') in use_mappings: self.mapping_flags |= F_MAP_DEPTH_COLOR if ('color', 'depth') in use_mappings: self.mapping_flags |= F_MAP_COLOR_DEPTH if self.sensor_flags == 0: raise ValueError('At least one sensor must be provided.') def connect(self): """ Connect to the device. """ if not self._kinect.init_kinect(self.sensor_flags, self.mapping_flags): raise IOError('Unable to init Kinect2 Sensor.') return True def disconnect(self): """ Disconnect from the device. """ self._kinect.close_kinect() def get_color_image(self, color_format='bgr'): """ Get the current color image. Args: color_format: rgba, bgr, or rgb Returns: numpy array """ color_ary = np.empty((COLOR_HEIGHT, COLOR_WIDTH, COLOR_CHANNELS), np.uint8) if self._kinect.get_color_data(color_ary): if color_format == 'rgba': return color_ary elif color_format == 'bgr': return cv2.cvtColor(color_ary, cv2.COLOR_RGBA2BGR) elif color_format == 'rgb': return cv2.cvtColor(color_ary, cv2.COLOR_RGBA2RGB) else: raise NotImplementedError() return None def get_ir_image(self): """ Get the current inferred image. Returns: numpy array """ ir_ary = np.empty((IR_HEIGHT, IR_WIDTH, 1), np.uint16) if self._kinect.get_ir_data(ir_ary): return ir_ary return None def get_depth_map(self): """ Get the current depth map. Returns: numpy array """ depth_ary = np.empty((DEPTH_HEIGHT, DEPTH_WIDTH, 1), np.uint16) if self._kinect.get_depth_data(depth_ary): return depth_ary return None def _get_raw_bodies(self): body_ary = np.empty((MAX_BODIES, BODY_PROPS), np.uint8) joint_ary = np.empty((MAX_BODIES, MAX_JOINTS, JOINT_PROPS), np.int32) if self._kinect.get_body_data(body_ary, joint_ary): return body_ary, joint_ary return None, None def get_bodies(self): """ Get the currently tracked bodies. Returns: `Body` array """ body_ary, joint_ary = self._get_raw_bodies() bodies = [] if body_ary is not None: for i in range(MAX_BODIES): if body_ary[i, 0]: bodies.append(Body(i, body_ary[i], joint_ary[i])) return bodies def _get_raw_audio(self): audio_ary = np.empty((AUDIO_BUF_LEN * SUBFRAME_SIZE), np.float32) meta_ary = np.empty((AUDIO_BUF_LEN, 2), np.float32) frame_cnt = self._kinect.get_audio_data(audio_ary, meta_ary) return frame_cnt, audio_ary, meta_ary def get_audio_frames(self): """ Get the latest audio frames. Returns: array of `AudioFrame` """ frame_cnt, audio_ary, meta_ary = self._get_raw_audio() frames = [] for i in range(frame_cnt): beam_angle = meta_ary[i, 0] beam_conf = meta_ary[i, 1] samples = audio_ary[i*SUBFRAME_SIZE:(i+1)*SUBFRAME_SIZE] frames.append(AudioFrame(beam_angle, beam_conf, samples)) return frames def map(self, from_type, to_type): """ Get a mapping between visual sensors. Args: from_type: The sensor space to convert (color, depth) to_type: The target sensor space (color, depth, camera) Returns: numpy array of mapping """ result = None if from_type == 'color' and to_type == 'camera': map_ary = np.empty((COLOR_HEIGHT, COLOR_WIDTH, 3), np.float32) if self._kinect.get_map_color_to_camera(map_ary): result = map_ary elif from_type == 'depth' and to_type == 'camera': map_ary = np.empty((DEPTH_HEIGHT, DEPTH_WIDTH, 3), np.float32) if self._kinect.get_map_depth_to_camera(map_ary): result = map_ary elif from_type == 'depth' and to_type == 'color': map_ary = np.empty((DEPTH_HEIGHT, DEPTH_WIDTH, 2), np.float32) if self._kinect.get_map_depth_to_color(map_ary): result = map_ary elif from_type == 'color' and to_type == 'depth': map_ary = np.empty((COLOR_HEIGHT, COLOR_WIDTH, 2), np.float32) if self._kinect.get_map_color_depth(map_ary): result = map_ary return result def wait_for_worker(self, first_tick=0, timeout=5): """ Wait for the frame fetching working to collect data for the first frame. """ start = time.time() while self._kinect.get_tick() == first_tick: time.sleep(0.1) if time.time() - start >= timeout: raise IOError('Kinect took too long. Try restarting the device.') def iter_frames(self, limit_fps=60): """ Iterate through sensor data. Args: limit_fps: Cap the framerate/datarate Returns: array of each type of data being collected. """ i = 0 frame_time = 1.0 / limit_fps start_time = time.time() while True: data = [i] if self.sensor_flags & F_SENSOR_COLOR: data.append(self.get_color_image()) if self.sensor_flags & F_SENSOR_DEPTH: data.append(self.get_depth_map()) if self.sensor_flags & F_SENSOR_IR: data.append(self.get_ir_image()) if self.sensor_flags & F_SENSOR_BODY: data.append(self.get_bodies()) if self.sensor_flags & F_SENSOR_AUDIO: data.append(self.get_audio_frames()) if self.mapping_flags & F_MAP_COLOR_CAM: data.append(self.map('color', 'camera')) if self.mapping_flags & F_MAP_DEPTH_CAM: data.append(self.map('depth', 'camera')) if self.mapping_flags & F_MAP_DEPTH_COLOR: data.append(self.map('depth', 'color')) if self.mapping_flags & F_MAP_COLOR_DEPTH: data.append(self.map('color', 'depth')) yield data end_time = time.time() if start_time - end_time < frame_time: time.sleep(frame_time - (start_time - end_time)) end_time = time.time() start_time = end_time i += 1<file_sep>""" Demonstrating basic usage with audio data. """ from libkinect2 import Kinect2 from libkinect2.utils import merge_audio_frames import numpy as np import time import cv2 # Init kinect w/all visual sensors kinect = Kinect2(use_sensors=['color', 'audio']) kinect.connect() kinect.wait_for_worker() # Playback ## import pyaudio ## p = pyaudio.PyAudio() ## stream = p.open(format=pyaudio.paFloat32, ## channels=1, ## rate=16000, ## output=True) # A super simple (less accurate) method to map beam angle to position. # Using linear regression calibrated w/random test data. def beam_angle_to_pos(angle): return int(angle * 1419.2 + 23.898 + 1920 / 2) # Average angle of audio beams rolling_avg_angle = 0 for _, color_img, audio_frames in kinect.iter_frames(): if audio_frames: # Combine raw audio samples from frames full_frame = merge_audio_frames(audio_frames) # Update beam angle avg_angle = full_frame.beam_angle rolling_avg_angle = rolling_avg_angle * 0.8 + avg_angle * 0.2 ## stream.write(full_frame.data) # Plot the audio direction sound_x = beam_angle_to_pos(rolling_avg_angle) cv2.putText(color_img, "Audio Beam", (sound_x - 90, 200), cv2.FONT_HERSHEY_SIMPLEX, 1, (255,0,0), 2, cv2.LINE_AA) color_img[:, sound_x-5:sound_x+5, 0] += 100 color_img[:, sound_x-5:sound_x+5, 1] -= 100 color_img[:, sound_x-5:sound_x+5, 2] -= 100 cv2.imshow('sensors', color_img) key = cv2.waitKey(1) & 0xFF if key == ord('q'): break ## stream.close() kinect.disconnect()<file_sep>""" Demonstrating basic usage of Kinect2 cameras. """ from libkinect2 import Kinect2 import numpy as np import cv2 # Init kinect w/color->camera mapping kinect = Kinect2(use_sensors=['color', 'depth'], use_mappings=[('color', 'camera')]) kinect.connect() kinect.wait_for_worker() # 3D Plot (super slow, but works for demo) from mpl_toolkits.mplot3d import Axes3D import matplotlib.pyplot as plt fig = plt.figure() ax = fig.add_subplot(111, projection='3d') for _, color_img, depth_map, color_cam_map in kinect.iter_frames(): # Display color image as reference cv2.imshow('sensors', color_img) key = cv2.waitKey(1) & 0xFF if key == ord('q'): break h, w, _ = color_img.shape X = [] Y = [] Z = [] C = [] # Read (every 10th) point pos and color for y_pixel in range(0, h, 10): for x_pixel in range(0, w, 10): x, z, y = color_cam_map[y_pixel, x_pixel] b, g, r = color_img[y_pixel, x_pixel] / 255.0 X.append(x) Y.append(y) Z.append(z) C.append([r, g, b]) # Plot ax.clear() ax.set_xlim((-2,2)) ax.set_ylim((-2,2)) ax.set_zlim((-1,2)) ax.scatter(X, Y, Z, s=1, c=C) ax.set_xlabel('X') ax.set_ylabel('Y') ax.set_zlabel('Z') fig.canvas.draw() plt.pause(0.001) kinect.disconnect()<file_sep># LibKinect2 > A Python API for interfacing with the [Kinect2](https://www.amazon.com/Xbox-One-Kinect-Sensor/dp/B00INAX3Q2/). ## Usage #### Install (x64 Only) `pip install https://github.com/sshh12/LibKinect2/releases/download/v0.1.0/libkinect2-0.1.0.tar.gz` #### Demos ```python from libkinect2 import Kinect2 from libkinect2.utils import depth_map_to_image import numpy as np import cv2 # Init Kinect2 w/2 sensors kinect = Kinect2(use_sensors=['color', 'depth']) kinect.connect() kinect.wait_for_worker() for _, color_img, depth_map in kinect.iter_frames(): # Display color and depth data cv2.imshow('color', color_img) cv2.imshow('depth', depth_map_to_image(depth_map)) key = cv2.waitKey(1) & 0xFF if key == ord('q'): break kinect.disconnect() ``` [Example Scripts](https://github.com/sshh12/LibKinect2/tree/master/examples) ![cameras](https://user-images.githubusercontent.com/6625384/59576903-088db480-9087-11e9-96f6-251240d25f0c.gif) ![body](https://user-images.githubusercontent.com/6625384/59576877-e8f68c00-9086-11e9-826b-eceb6eb80573.gif) ![audio](https://user-images.githubusercontent.com/6625384/59576951-3672f900-9087-11e9-9c4d-aeebc676a500.gif) ![mapper](https://user-images.githubusercontent.com/6625384/59576934-222efc00-9087-11e9-94cd-01e9cd634722.gif) #### Docs For python docs, run: ```python from libkinect2 import Kinect2 help(Kinect2) ``` [Other Docs](https://github.com/sshh12/LibKinect2/tree/master/Kinect2-API) ## Issues I'm sure there are plenty of issues, so fill free to [create one](https://github.com/sshh12/LibKinect2/issues) or [fix one](https://github.com/sshh12/LibKinect2/pulls). ## Related There's a whole bunch of these... * [Kinect/PyKinect2](https://github.com/Kinect/PyKinect2) * [Qirky/PyKinectTk](https://github.com/Qirky/PyKinectTk) * [kiddos/pykinect2](https://github.com/kiddos/pykinect2) * [amiller/libfreenect-goodies](https://github.com/amiller/libfreenect-goodies) * [colincsl/pyKinectTools](https://github.com/colincsl/pyKinectTools) * [maxime-tournier/kinect2](https://github.com/maxime-tournier/kinect2)<file_sep>#pragma once #include "stdafx.h" #include <Kinect.h> #define EXPORTFUNC extern "C" __declspec(dllexport) #define SAFE_RELEASE(p) { if ( (p) ) { (p)->Release(); (p) = 0; } } EXPORTFUNC bool init_kinect(int sensor_flags, int mapping_flags); EXPORTFUNC void close_kinect(); EXPORTFUNC void pause_worker(); EXPORTFUNC void resume_worker(); EXPORTFUNC int get_tick(); HRESULT run_multi_worker(); DWORD WINAPI multi_worker_wrapper(_In_ LPVOID lp_param); inline void process_body(IBody* body, int body_idx, Joint* joints, JointOrientation* joint_orients); HRESULT run_audio_worker(); DWORD WINAPI audio_worker_wrapper(_In_ LPVOID lp_param); inline void process_audio_subframe(IAudioBeamSubFrame* subframe, int index); EXPORTFUNC bool get_color_data(UINT8* array); EXPORTFUNC bool get_ir_data(UINT16* array); EXPORTFUNC bool get_depth_data(UINT16* array); EXPORTFUNC bool get_body_data(UINT8* body_array, INT32* joint_array); EXPORTFUNC int get_audio_data(FLOAT* array, FLOAT* meta_array); EXPORTFUNC bool get_map_color_to_camera(FLOAT* array); EXPORTFUNC bool get_map_depth_to_camera(FLOAT* array); EXPORTFUNC bool get_map_depth_to_color(FLOAT* array); EXPORTFUNC bool get_map_color_depth(FLOAT* array);<file_sep>""" Demonstrating basic usage of Kinect2 cameras. """ from libkinect2 import Kinect2 from libkinect2.utils import draw_skeleton, depth_map_to_image, ir_to_image import numpy as np import cv2 # Init kinect w/all visual sensors kinect = Kinect2(use_sensors=['color', 'depth', 'ir', 'body']) kinect.connect() kinect.wait_for_worker() for _, color_img, depth_map, ir_data, bodies in kinect.iter_frames(): # Use the color image as the background bg_img = color_img # Paste on the depth and ir images bg_img[-424:, :512, :] = depth_map_to_image(depth_map) bg_img[-424:, -512:, :] = ir_to_image(ir_data) # Draw simple skeletons body_img = np.zeros(color_img.shape) for body in bodies: draw_skeleton(body_img, body) bg_img[:424, -512:, :] = cv2.resize(body_img, (512, 424)) cv2.imshow('sensors', bg_img) key = cv2.waitKey(1) & 0xFF if key == ord('q'): break kinect.disconnect()<file_sep>""" Code related to audio processing. """ from .dll_lib import * import math class AudioFrame: """ A frame of Audio Attributes: beam_angle: The audio angle in radians beam_conf: The device's confidence in the beam angle data: The raw sample data as a numpy array """ def __init__(self, beam_angle, beam_conf, samples): """ Create a body from raw body/joint data. Note: Should not be called by user. Use `kinect.get_audio_frames()`. """ self.beam_angle = beam_angle self.beam_conf = beam_conf self.data = samples def __repr__(self): degs = round(math.degrees(self.beam_angle), 1) return '<AudioFrame [{}°]>'.format(degs)<file_sep>""" Entry point. """ from .version import __version__ from .kinect import Kinect2<file_sep>""" Code for interfacing with the compiled library. """ from pkg_resources import resource_filename import numpy as np import ctypes ### Constants ### F_SENSOR_COLOR = 0x00000001 F_SENSOR_DEPTH = 0x00000010 F_SENSOR_IR = 0x00000100 F_SENSOR_BODY = 0x00001000 F_SENSOR_MULTI = 0x00001111 F_SENSOR_AUDIO = 0x00010000 F_MAP_COLOR_CAM = 0x00000002 F_MAP_DEPTH_CAM = 0x00000020 F_MAP_DEPTH_COLOR = 0x00000200 F_MAP_COLOR_DEPTH = 0x00002000 COLOR_WIDTH = 1920 COLOR_HEIGHT = 1080 COLOR_CHANNELS = 4 DEPTH_WIDTH = 512 DEPTH_HEIGHT = 424 IR_WIDTH = 512 IR_HEIGHT = 424 MAX_SUBFRAMES = 8 SUBFRAME_SIZE = 256 AUDIO_BUF_LEN = 512 SUBFRAME_SIZE = 256 MAX_BODIES = 6 BODY_PROPS = 15 MAX_JOINTS = 25 JOINT_PROPS = 9 FLOAT_MULT = 100000 ### Enum Mappings ### JOINT_MAP = { 'spine_base': 0, 'spine_mid': 1, 'neck': 2, 'head': 3, 'shoulder_left': 4, 'elbow_left': 5, 'wrist_left': 6, 'hand_left': 7, 'shoulder_right': 8, 'elbow_right': 9, 'wrist_right': 10, 'hand_right': 11, 'hip_left': 12, 'knee_left': 13, 'ankle_left': 14, 'foot_left': 15, 'hip_right': 16, 'knee_right': 17, 'ankle_right': 18, 'foot_right': 19, 'spine_shoulder': 20, 'hand_left_tip': 21, 'thumb_left': 22, 'hand_right_tip': 23, 'thumb_right': 24, 'eye_left': -1, 'eye_right': -1, 'mouth': -1 } TRACKING_MAP = [None, 'inferred', 'tracked'] HIGH_CONFIDENCE_MAP = [False, True] HAND_MAP = ['unk', None, 'open', 'closed', 'lasso'] DETECTION_MAP = ['unk', None, 'maybe', 'yes'] def init_lib(dll_path=None): """ Load the dll and add arg/return types. """ if dll_path is None: dll_path = resource_filename(__name__, 'data/Kinect2-API.dll') kinectDLL = ctypes.cdll.LoadLibrary(dll_path) kinectDLL.init_kinect.argtypes = [ctypes.c_int, ctypes.c_int] kinectDLL.init_kinect.restype = ctypes.c_bool kinectDLL.close_kinect.argtypes = [] kinectDLL.close_kinect.restype = None kinectDLL.pause_worker.argtypes = [] kinectDLL.pause_worker.restype = None kinectDLL.resume_worker.argtypes = [] kinectDLL.resume_worker.restype = None kinectDLL.get_tick.argtypes = [] kinectDLL.get_tick.restype = ctypes.c_int32 kinectDLL.get_color_data.argtypes = [np.ctypeslib.ndpointer(dtype=np.uint8)] kinectDLL.get_color_data.restype = ctypes.c_bool kinectDLL.get_ir_data.argtypes = [np.ctypeslib.ndpointer(dtype=np.uint16)] kinectDLL.get_ir_data.restype = ctypes.c_bool kinectDLL.get_depth_data.argtypes = [np.ctypeslib.ndpointer(dtype=np.uint16)] kinectDLL.get_depth_data.restype = ctypes.c_bool kinectDLL.get_body_data.argtypes = [np.ctypeslib.ndpointer(dtype=np.uint8), np.ctypeslib.ndpointer(dtype=np.int32)] kinectDLL.get_body_data.restype = ctypes.c_bool kinectDLL.get_audio_data.argtypes = [np.ctypeslib.ndpointer(dtype=np.float32), np.ctypeslib.ndpointer(dtype=np.float32)] kinectDLL.get_audio_data.restype = ctypes.c_int32 kinectDLL.get_map_color_to_camera.argtypes = [np.ctypeslib.ndpointer(dtype=np.float32)] kinectDLL.get_map_color_to_camera.restype = ctypes.c_bool kinectDLL.get_map_depth_to_camera.argtypes = [np.ctypeslib.ndpointer(dtype=np.float32)] kinectDLL.get_map_depth_to_camera.restype = ctypes.c_bool kinectDLL.get_map_depth_to_color.argtypes = [np.ctypeslib.ndpointer(dtype=np.float32)] kinectDLL.get_map_depth_to_color.restype = ctypes.c_bool kinectDLL.get_map_color_depth.argtypes = [np.ctypeslib.ndpointer(dtype=np.float32)] kinectDLL.get_map_color_depth.restype = ctypes.c_bool return kinectDLL <file_sep>""" Code related to body tracking. """ from pkg_resources import resource_filename from .dll_lib import * from .utils import dist import cv2 try: import dlib face_detector = dlib.get_frontal_face_detector() face_feat_detector = dlib.shape_predictor( resource_filename(__name__, 'data/shape_predictor_68_face_landmarks.dat')) DLIB_LOADED = True except ImportError: DLIB_LOADED = False class Body: """ A body tracked by the Kinect. Attributes: idx: The tracking index tracked: If this body is tracked engaged: State of person's engagement restricted: If the body is restricted """ def __init__(self, idx, body_ary, joints_ary): """ Create a body from raw body/joint data. Note: Should not be called by user. Use `kinect.get_bodies()`. """ self.idx = idx self._body_ary = body_ary self._joints_ary = joints_ary self._joints_cache = {} self._load_props() def _load_props(self): self.tracked = bool(self._body_ary[0]) ## These are not yet supported by Kinect2 )': ## self.engaged = DETECTION_MAP[self._body_ary[1]] ## self.restricted = bool(self._body_ary[2]) ## self.neutral = DETECTION_MAP[self._body_ary[7]] ## self.happy = DETECTION_MAP[self._body_ary[8]] ## self.looking_away = DETECTION_MAP[self._body_ary[13]] ## self.glasses = DETECTION_MAP[self._body_ary[14]] def get_face(self, color_img): """ Use body data (self) and color_img to extract their face. """ if not DLIB_LOADED: raise Exception('Dlib is required to use this method.') head = self.__getitem__('head') neck = self.__getitem__('neck') face = Face(color_img, head, neck) if face.exists: return face return None def keys(self): """ Return a list of keys (joints). Returns: list of joint names """ return JOINT_MAP.keys() def __getitem__(self, joint_name): """ Get a joint of this body by name. Returns: `Joint` Note: Use `body.keys()` for list of joints. """ joint_name = joint_name.lower() if joint_name in self._joints_cache: return self._joints_cache[joint_name] joint_idx = JOINT_MAP[joint_name.lower()] if joint_idx == -1: raise NotImplementedError() joint = Joint(joint_name, self._body_ary, self._joints_ary[joint_idx]) self._joints_cache[joint_name] = joint return joint def __repr__(self): if self.tracked: state = ' [Tracked]' else: state = '' return '<Body ({}){}>'.format(self.idx, state) class Joint: """ A joint. Attributes: name: Name of the joint tracking: The current tracking state color_pos: Position in the color camera space - (x, y) depth_pos: Position in the depth sensor space - (x, y) orientation: Orientation as (w, x, y, z) state: The state of the joint if provided by Kinect API """ def __init__(self, joint_name, body_ary, joint_ary): """ Create a joint from raw body/joint data. Note: Should not be called by user. Use `body[joint_name]`. """ self.name = joint_name self._body_ary = body_ary self._joint_ary = joint_ary self._load_props() def _load_props(self): self.tracking = TRACKING_MAP[self._joint_ary[0]] self.color_pos = (self._joint_ary[1], self._joint_ary[2]) self.depth_pos = (self._joint_ary[3], self._joint_ary[4]) self.orientation = ( self._joint_ary[5] / FLOAT_MULT, self._joint_ary[6] / FLOAT_MULT, self._joint_ary[7] / FLOAT_MULT, self._joint_ary[8] / FLOAT_MULT ) if self.name == 'hand_left': self.confidence = HIGH_CONFIDENCE_MAP[self._body_ary[3]] self.state = HAND_MAP[self._body_ary[4]] elif self.name == 'hand_right': self.confidence = HIGH_CONFIDENCE_MAP[self._body_ary[5]] self.state = HAND_MAP[self._body_ary[6]] else: self.confidence = None self.state = None def __repr__(self): if self.state: return '<Joint {} [{}] [{}]>'.format(self.name.title(), self.state, self.tracking) else: return '<Joint {} [{}]>'.format(self.name.title(), self.tracking) class Face: """ A person's face. Attributes: color_img: The original img this face was extracted from head: The head `Joint` used neck: The neck `Joint` used pos: The postion of this face - (x, y) exists: If this face was actual found / exists rect: A rectangle bbox of the face point: A numpy array containing 68 facial landmark positions """ def __init__(self, color_img, head, neck): """ Create face from an image and head/neck joints. Note: Should not be called by user. Use `body.get_face()`. """ self.color_img = color_img self.head = head self.pos = head.color_pos self.neck = neck self.exists = False self.rect = None self.points = None self._find() def _find(self): head_x, head_y = self.pos if self.head.tracking != 'tracked' or self.neck.tracking != 'tracked' or min(head_x, head_y) <= 0: return radius = int(dist(self.head.color_pos, self.neck.color_pos) * 1.5) radius = min([head_x, head_y, radius]) x1, x2 = head_x-radius, head_x+radius y1, y2 = head_y-radius, head_y+radius face_img = self.color_img[y1:y2, x1:x2, :] if 0 in face_img.shape: return self.face_img = np.copy(face_img) rects = face_detector(self.face_img, 1) if len(rects) == 0: return self.exists = True self.rect = rects[0] self.points = np.zeros((68, 2), dtype=np.int) shape = face_feat_detector(self.face_img, self.rect) for i in range(68): self.points[i] = (shape.part(i).x + x1, shape.part(i).y + y1) def __repr__(self): if self.exists: return '<Face [Valid @ {}]>'.format(self.pos) else: return '<Face [Invalid]>'
10f8bd69c2674b314e14b30a96b43e4b68d4dfee
[ "Markdown", "C", "Python", "C++" ]
16
Markdown
sshh12/LibKinect2
e1e696b66b79d55c69c77913cbcfcb03a3259e9d
45041d699f1d672d4386f1209eb2ba73b2e8d86d
refs/heads/master
<file_sep> (function ($) { function renderMaps(data) { ymaps.ready(init); function init() { var myMap = new ymaps.Map("map", { center: [55.76, 37.64], zoom: 2 }, { searchControlProvider: 'yandex#search' }); $.each(data, function (i, item) { moment.tz.add(item.timezone); var time2; function updateTime() { time2 = moment.tz(item.timezoneName).format('HH:mm:ss'); $('.' + item.capitalName).html(time2); }; updateTime(); setInterval(function () { updateTime(); }, 1000); myGeoObject = new ymaps.GeoObject({ geometry: { type: "Point", coordinates: [item.latitude, item.longtitude] }, properties: { hintContent: item.capitalName, balloonContentHeader: '<div><div>' + item.capitalName + '</div><div>' + item.country + '</div></div> ', iconContent: '<div class="' + item.capitalName + '">' + time2 + '</div>' } } , { preset: 'islands#blackStretchyIcon', draggable: false }), myMap.geoObjects .add(myGeoObject); }); } } $.fn.Timeinmaps = function (map) { console.log("dsfs"); renderMaps(map.data); }; }(jQuery)); <file_sep>index.html dosyasını herhangi bir browserda açarak deneyebilirsiniz.
8e81b209ebb38d59c26cee67cf350ccabed43fc0
[ "JavaScript", "Text" ]
2
JavaScript
bahareceyalcin/timeinmaps
9c04934e54f14bbd50fc63dbe746aac7aff5dff9
b3993e2c4889fa530efd73904b522121f8e3b447
refs/heads/master
<repo_name>datatube-owner/takeout<file_sep>/zhihu/export-opml.js 'use strict'; const fs = require('fs'); let zhihuURLs = JSON.parse(fs.readFileSync(process.argv[2])); let outlines = "" for (let url of zhihuURLs) { let xmlUrl = url.split("/").pop(); // You can customize the RSSHub host and route here if this one is not working well. outlines += `<outline type="rss" xmlUrl="https://datatube.dev/api/rss/zhihu/people/activities/${xmlUrl}"/>\n`; } let opml = `<?xml version="1.0" encoding="UTF-8"?> <opml version="1.0"> <head> <title>Zhihu OPML Followings Export</title> </head> <body> <outline title="Zhihu" text="Zhihu"> ${outlines} </outline> </body> </opml>`; fs.writeFileSync(process.argv[3], opml)<file_sep>/README.md Takeout for your subscriptions 拿回属于你的订阅数据 ==== This repo provides some scripts for getting back your subscription data on some social networks, such as Zhihu, in case you don't like it but want to get back your data and keep subscripting to people you follow in a read-only mode. 这个仓库提供了用于拿回在某些社交网站(如知乎)上的订阅数据的脚本(比如你不想使用知乎,但是想继续 用“只读模式”关注你关注的人)。 导出后的 OPML 格式文件可以在任何 RSS 阅读器上使用。 * Usage for exporting Zhihu followings to OPML: See `zhihu/main.js` ## Contribution 贡献 The code and instructions are mostly intended for English readers now, also, the currently supported exporting code is insufficient and might be broken in future. You are welcomed to open a PR to contribute to us! <file_sep>/zhihu/main.js // Part I: Run this in console after opening https://www.zhihu.com/people/<your-zhihu-username>/following let links = new Set(); function collect() { console.log("Collecting from this page..."); document.querySelectorAll('a.UserLink-link').forEach(a => links.add(a.href)) document.getElementsByClassName("PaginationButton-next")[0].click(); console.log("Waiting page loading..."); setTimeout(collect, 2000); } // When there is a "Uncaught TypeError: Cannot read property 'click' of undefined", the collection has finished, run the // link below to export your followings (In Chrome, you can "Copy" the output conveniently from console) console.log(JSON.stringify(Array.from(links))); // Save the copied file somewhere, like "path/to/following.json", and run // node export-opml.js path/to/following.json zhihu.opml // The output OPML is zhihu.opml
253cb8d890bca63cfd9e4c329b833558795b5b2c
[ "JavaScript", "Markdown" ]
3
JavaScript
datatube-owner/takeout
21b7ecda4fe49462093475a8fad3e80800808876
dd9822499621e9f29aed33962910949949b1eb86
refs/heads/master
<repo_name>kgoransika/C-sharp-Final-Assignment<file_sep>/C sharp Final Assignment/Form1.cs using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Linq; using System.Text; using System.Threading.Tasks; using System.Windows.Forms; using System.Data.SqlClient; namespace C_sharp_Final_Assignment { public partial class Form1 : Form { public Form1() { InitializeComponent(); } private void label1_Click(object sender, EventArgs e) { } private void label3_Click(object sender, EventArgs e) { } private void button2_Click(object sender, EventArgs e) { } private void label3_Click_1(object sender, EventArgs e) { } private void button1_Click(object sender, EventArgs e) { DateTime now = DateTime.Now; int month = now.Month-1; int year = now.Year; int endDate = 0; switch (month) { case 1: endDate = 31; break; case 2: endDate = 28; break; case 3: endDate = 31; break; case 4: endDate = 30; break; case 5: endDate = 31; break; case 6: endDate = 30; break; case 7: endDate = 31; break; case 8: endDate = 31; break; case 9: endDate = 30; break; case 10: endDate = 31; break; case 11: endDate = 30; break; case 12: endDate = 31; break; default: break; } lblReportPeriod.Text = year.ToString() + " / " + month.ToString("00")+" / 01 - "+ year.ToString()+" / " + month.ToString("00")+" / "+endDate.ToString(); SqlConnection con = new SqlConnection(@"Data Source=(LocalDB)\MSSQLLocalDB;AttachDbFilename=C:\Users\ovinr\OneDrive\Documents\testdb.mdf;Integrated Security=True;Connect Timeout=30"); con.Open(); string totalQtySold = "SELECT qtySold FROM Sales"; string tot = totalQtySold[0].ToString(); lblItemSalesCount.Text = tot; } private void label4_Click(object sender, EventArgs e) { } private void label10_Click(object sender, EventArgs e) { } } }
476f291cd97997da4389e51b88d5b5d3f9bf6ac5
[ "C#" ]
1
C#
kgoransika/C-sharp-Final-Assignment
6c4f18d8f15fa202e1aefff38e867ba4ef3bbde3
4941a2e831c32976c84f25e75f3133f6b8348301
refs/heads/main
<repo_name>wanchanglin/mt<file_sep>/man/osc_sjoblom.Rd % wll-05-06-2007: % \name{osc_sjoblom} \alias{osc_sjoblom} \title{ Orthogonal Signal Correction (OSC) Approach by Sjoblom et al. } \description{ Orthogonal signal correction (OSC) approach by Sjoblom et al. } \usage{ osc_sjoblom(x, y, center=TRUE,osc.ncomp=4,pls.ncomp=10, tol=1e-3,iter=20,\dots) } % ---------------------------------------------------------------------------- \arguments{ \item{x}{ A numeric data frame or matrix to be pre-processed. } \item{y}{ A vector or factor specifying the class for each observation. } \item{center}{ A logical value indicating whether the data set should be centred by column-wise. } \item{osc.ncomp}{ The number of components to be used in the OSC calculation. } \item{pls.ncomp}{ The number of components to be used in the PLS calculation. } \item{tol}{ A scalar value of tolerance for OSC computation. } \item{iter}{ The number of iteration used in OSC calculation. } \item{\dots}{ Arguments passed to or from other methods. } } % ---------------------------------------------------------------------------- \value{ A list containing the following components: \item{x}{ A matrix of OSC corrected data set. } \item{R2}{ R2 statistics. It is calculated as the fraction of variation in X after OSC correction. } \item{angle}{ An angle used for checking if scores \code{t} is orthogonal to \code{y}. An angle close to 90 degree means that orthogonality is achieved in the correction process. } \item{w}{ A matrix of OSC weights. } \item{p}{ A matrix of OSC loadings. } \item{t}{ A matrix of OSC scores. } \item{center}{ A logical value indicating whether the data set has been centred by column-wise. } } % ---------------------------------------------------------------------------- \references{ <NAME>., <NAME>., <NAME>., <NAME>., <NAME>. (1998). An evaluation of orthogonal signal correction applied to calibration transfer of near infrared spectra. \emph{Chemometrics Intell. Lab. Syst.},44: 229-244. <NAME>., <NAME>. and <NAME>. (2002). An investigation of orthogonal correction algorithms and their characteristics. \emph{Journal of Chemometrics}, 16:176-188. <NAME>., <NAME>., <NAME>. (2001). Direct orthogonal signal correction. \emph{Chemometrics Intell. Lab. Syst.}, 56: 13-25. } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------------- \seealso{ \code{\link{osc}}, \code{\link{predict.osc}}, \code{\link{osc_wold}}, \code{\link{osc_wise}} } % ---------------------------------------------------------------------------- \examples{ data(abr1) cl <- factor(abr1$fact$class) dat <- abr1$pos ## divide data as training and test data idx <- sample(1:nrow(dat), round((2/3)*nrow(dat)), replace=FALSE) ## construct train and test data train.dat <- dat[idx,] train.t <- cl[idx] test.dat <- dat[-idx,] test.t <- cl[-idx] ## build OSC model based on the training data res <- osc_sjoblom(train.dat, train.t) names(res) ## pre-process test data by OSC test.dat.1 <- predict.osc(res,test.dat)$x } \keyword{manip} <file_sep>/man/pcalda.Rd % wll-02-06-2007 % \name{pcalda} \alias{pcalda} \alias{pcalda.default} \alias{pcalda.formula} \alias{print.pcalda} \alias{summary.pcalda} \alias{print.summary.pcalda} \title{ Classification with PCADA } \description{ Classification with combination of principal component analysis (PCA) and linear discriminant analysis (LDA). } \usage{ pcalda(x, \dots) \method{pcalda}{default}(x, y, center = TRUE, scale. = FALSE, ncomp = NULL, tune=FALSE,\dots) \method{pcalda}{formula}(formula, data = NULL, \dots, subset, na.action = na.omit) } % ---------------------------------------------------------------------------- \arguments{ \item{formula}{ A formula of the form \code{groups ~ x1 + x2 + \dots} That is, the response is the grouping factor and the right hand side specifies the (non-factor) discriminators. } \item{data}{ Data frame from which variables specified in \code{formula} are preferentially to be taken. } \item{x}{ A matrix or data frame containing the explanatory variables if no formula is given as the principal argument. } \item{y}{ A factor specifying the class for each observation if no formula principal argument is given. } \item{center}{ A logical value indicating whether \code{x} should be shifted to zero centred by column-wise. } \item{scale.}{ A logical value indicating whether \code{x} should be scaled to have unit variance by column-wise before the analysis takes place. } \item{ncomp}{ The number of principal components to be used in the classification. If \code{NULL} and \code{tune=TRUE}, it is the row number of \code{x} minus the number of class indicating in \code{y}. If \code{NULL} and \code{tune=FALSE}, it is the half of row number of \code{x}. } \item{tune}{ A logical value indicating whether the best number of components should be tuned. } \item{\dots}{ Arguments passed to or from other methods. } \item{subset}{ An index vector specifying the cases to be used in the training sample. } \item{na.action}{ A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. } } % ---------------------------------------------------------------------------- \details{ A critical issue of applying linear discriminant analysis (LDA) is both the singularity and instability of the within-class scatter matrix. In practice, there are often a large number of features available, but the total number of training patterns is limited and commonly less than the dimension of the feature space. To tackle this issue, \code{pcalda} combines PCA and LDA for classification. It uses PCA for dimension reduction. The rotated data resulted from PCA will be the input variable to LDA for classification. } % ---------------------------------------------------------------------------- \value{ An object of class \code{pcalda} containing the following components: \item{x}{ The rotated data on discriminant variables. } \item{cl}{ The observed class labels of training data. } \item{pred}{ The predicted class labels of training data. } \item{posterior}{ The posterior probabilities for the predicted classes. } \item{conf}{ The confusion matrix based on training data. } \item{acc}{ The accuracy rate of training data. } \item{ncomp}{ The number of principal components used for classification. } \item{pca.out}{ The output of PCA. } \item{lda.out}{ The output of LDA. } \item{call}{ The (matched) function call. } } % ---------------------------------------------------------------------------- \note{ This function may be called giving either a formula and optional data frame, or a matrix and grouping factor as the first two arguments. } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------------- \seealso{ \code{\link{predict.pcalda}}, \code{\link{plot.pcalda}}, \code{\link{tune.func}} } % ---------------------------------------------------------------------------- \examples{ data(abr1) cl <- factor(abr1$fact$class) dat <- abr1$pos ## divide data as training and test data idx <- sample(1:nrow(dat), round((2/3)*nrow(dat)), replace=FALSE) ## construct train and test data train.dat <- dat[idx,] train.t <- cl[idx] test.dat <- dat[-idx,] test.t <- cl[-idx] ## apply pcalda model <- pcalda(train.dat,train.t) model summary(model) ## plot plot(model,dimen=c(1,2),main = "Training data",abbrev = TRUE) plot(model,main = "Training data",abbrev = TRUE) ## confusion matrix pred.te <- predict(model, test.dat)$class table(test.t,pred.te) } \keyword{classif} <file_sep>/man/pcaplot.Rd % wll-13-12-2007: % wll-15-07-2015: move 'ep' for 'pcaplot' into \dots. \name{pcaplot} \alias{pcaplot} \alias{pca.plot} \alias{pca.comp} \title{Plot Function for PCA with Grouped Values} \description{ Plot function for PCA with grouped values. } \usage{ pcaplot(x, y, scale = TRUE, pcs = 1:2, \dots) pca.plot(x, y, scale=TRUE, abbrev = FALSE, ep.plot=FALSE,\dots) pca.comp(x, scale=FALSE, pcs=1:2,\dots) } \arguments{ \item{x}{A matrix or data frame to be plotted. } \item{y}{A factor or vector giving group information of columns of \code{x}.} \item{scale}{ A logical value indicating whether the data set \code{x} should be scaled. } \item{pcs}{A vector of index of PCs to be plotted.} \item{ep.plot}{ A logical value indicating whether the ellipse should be plotted. } \item{abbrev}{ Whether the group labels are abbreviated on the plots. If \code{abbrev > 0} this gives \code{minlength} in the call to \code{abbreviate}. } \item{\dots}{ Further arguments to \code{\link{prcomp}} or \code{lattice}. See corresponding entry in \code{\link{xyplot}} for non-trivial details of \code{lattice}. For \code{pcaplot}, one argument is \code{ep}: an integer for plotting ellipse. \code{1} and \code{2} for plotting overall and group ellipse, respectively. Otherwise, none. For details, see \code{\link{panel.elli.1}}. } } \value{ \code{pcaplot} returns an object of class \code{"trellis"}. \code{pca.comp} returns a list with components: \item{scores}{ PCA scores} \item{vars}{Proportion of variance} \item{varsn}{A vector of string indicating the percentage of variance.} } \note{ Number of columns of \code{x} must be larger than 1. \code{pcaplot} uses \code{lattice} to plot PCA while \code{pca.plot} uses the basic graphics to do so. \code{pca.plot} plots PC1 and PC2 only. } \author{ <NAME> } \seealso{ \code{\link{grpplot}}, \code{\link{panel.elli.1}}, \code{\link{pca.plot.wrap}} } % ---------------------------------------------------------------------------- \examples{ ## examples of 'pcaplot' data(iris) pcaplot(iris[,1:4], iris[,5],pcs=c(2,1),ep=2) ## change confidence interval (see 'panel.elli.1') pcaplot(iris[,1:4], iris[,5],pcs=c(1,2),ep=2, conf.level = 0.9) pcaplot(iris[,1:4], iris[,5],pcs=c(2,1),ep=1, auto.key=list(space="top", columns=3)) pcaplot(iris[,1:4], iris[,5],pcs=c(1,3,4)) tmp <- pcaplot(iris[,1:4], iris[,5],pcs=1:3,ep=2) tmp ## change symbol's color, type and size pcaplot(iris[,1:4], iris[,5],pcs=c(2,1),main="IRIS DATA", cex=1.2, auto.key=list(space="right", col=c("black","blue","red"), cex=1.2), par.settings = list(superpose.symbol = list(col=c("black","blue","red"), pch=c(1:3)))) ## compare pcaplot and pca.plot. pcaplot(iris[,1:4], iris[,5],pcs=c(1,2),ep=2) pca.plot(iris[,1:4], iris[,5], ep.plot = TRUE) ## an example of 'pca.comp' pca.comp(iris[,1:4], scale = TRUE, pcs=1:3) } \keyword{plot} <file_sep>/man/feat.agg.Rd % lwc-15-02-2010 % lwc-25-02-2010: give example \name{feat.agg} \alias{feat.agg} \title{ Rank aggregation by Borda count algorithm } \description{ Use Borda count to get the final feature order. } \usage{ feat.agg(fs.rank.list) } \arguments{ \item{fs.rank.list}{ A data frame of feature orders by different feature selectors. } } \value{ A list with components: \item{fs.order}{Final feature order. } \item{fs.rank}{Aggregated rank list by Borda count. } } \author{ <NAME> } \seealso{ \code{\link{feat.rank.re}}, \code{\link{feat.mfs}} } % ---------------------------------------------------------------------- \examples{ data(abr1) dat <- preproc(abr1$pos[,200:400], method="log10") cls <- factor(abr1$fact$class) ## feature selection without resampling fs <- feat.mfs(dat, cls, method=c("fs.anova","fs.rf","fs.rfe"), is.resam=FALSE) ## rank aggregation fs.1 <- feat.agg(fs$fs.rank) names(fs.1) } \keyword{classif} <file_sep>/man/frank.err.Rd % lwc-13-11-2006 \name{frank.err} \alias{frank.err} \title{ Feature Ranking and Validation on Feature Subset } \description{ Get feature ranking on the training data and validate selected feature subsets by estimating their classification error rate. } \usage{ frank.err(dat.tr, cl.tr, dat.te, cl.te, cl.method="svm", fs.method="fs.auc", fs.order=NULL, fs.len="power2", \dots) } % -------------------------------------------------------------------- \arguments{ \item{dat.tr}{ A data frame or matrix of training data. Feature ranking and classification model are carried on this data set. } \item{cl.tr}{ A factor or vector of training class. } \item{dat.te}{ A data frame or matrix of test data. Error rates are calculated on this data set. } \item{cl.te}{ A factor or vector of test class. } \item{cl.method}{ Classification method to be used. Any classification methods can be employed if they have method \code{predict} (except \code{knn}) with output of predicted class label or one component with name of \code{class} in the returned list, such as \code{randomForest}, \code{svm}, \code{knn} and \code{lda}. } \item{fs.method}{ Feature ranking method. If \code{fs.order} is not \code{NULL}, it is ignored. } \item{fs.order}{ A vector of feature order. Default is \code{NULL} and then the feature selection will be performed on the training data. } \item{fs.len}{ The lengths of feature subsets used for validation. For details, see \code{\link{get.fs.len}}. } \item{\dots}{Additional parameters to \code{fs.method} or \code{cl.method}.} } % ---------------------------------------------------------------------------- \value{ A list with components: \item{cl.method}{Classification method used.} \item{fs.len}{The lengths of feature subsets used for validation.} \item{error}{Error rate for each feature length.} \item{fs.method}{Feature ranking method used.} \item{fs.order}{Feature order vector.} \item{fs.rank}{Feature ranking score vector.} } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{frankvali}}, \code{\link{get.fs.len}} } % ---------------------------------------------------------------------- \examples{ data(abr1) dat <- abr1$pos x <- preproc(dat[,110:500], method="log10") y <- factor(abr1$fact$class) dat <- dat.sel(x, y, choices=c("1","6")) x.1 <- dat[[1]]$dat y.1 <- dat[[1]]$cls idx <- sample(1:nrow(x.1), round((2/3)*nrow(x.1)), replace=FALSE) ## construct train and test data train.dat <- x.1[idx,] train.cl <- y.1[idx] test.dat <- x.1[-idx,] test.cl <- y.1[-idx] ## validate feature selection on some feature subsets res <- frank.err(train.dat, train.cl, test.dat, test.cl, cl.method="knn", fs.method="fs.auc", fs.len="power2") names(res) ## full feature order list res$fs.order ## validation on subsets of feature order res$error ## or first apply feature selection fs <- fs.auc(train.dat,train.cl) ## then apply error estimation for each selected feature subset res.1 <- frank.err(train.dat, train.cl, test.dat, test.cl, cl.method="knn", fs.order=fs$fs.order, fs.len="power2") res.1$error } \keyword{classif} <file_sep>/man/binest.Rd % lwc-11-10-2006 % lwc-31-10-2006: minor changes \name{binest} \alias{binest} \title{ Binary Classification } \description{ Binary classification. } \usage{ binest(dat, cl, choices = NULL, method, pars=valipars(),\dots) } % -------------------------------------------------------------------- \arguments{ \item{dat}{ A matrix or data frame containing the explanatory variables. } \item{cl}{ A factor specifying the class for each observation. } \item{choices}{ The vector or list of class labels to be chosen for binary classification. For details, see \code{\link{dat.sel}}. } \item{method}{ Classification method to be used. For details, see \code{\link{accest}}. } \item{pars}{ A list of parameters of the resampling method. For details, see \code{\link{valipars}}. } \item{\dots}{Additional parameters to \code{method}.} } % ---------------------------------------------------------------------- \value{ A list with components: \item{com}{A matrix of combination of the binary class labels.} \item{acc}{A table of classification accuracy for the binary combination in each iteration. } \item{method}{Classification method used.} \item{sampling}{Sampling scheme used.} \item{niter}{Number of iterations.} \item{nreps}{Number of replications in each iteration if resampling is not \code{loocv}. } } % ----------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{accest}}, \code{\link{valipars}}, \code{\link{dat.sel}} } % ---------------------------------------------------------------------- \examples{ # iris data set data(iris) dat <- subset(iris, select = -Species) cl <- iris$Species ## PCALDA with cross-validation pars <- valipars(sampling="cv",niter = 6, nreps = 5) binpcalda <- binest(dat,cl,choices=c("setosa"), method="pcalda", pars = pars) ## SVM with leave-one-out cross-validation. SVM kernel is 'linear'. pars <- valipars(sampling="loocv") binsvm <- binest(dat,cl,choices=c("setosa","virginica"), method="svm", pars = pars, kernel="linear") ## randomForest with bootstrap pars <- valipars(sampling="boot",niter = 5, nreps = 5) binrf <- binest(dat,cl,choices=c("setosa","virginica"), method="randomForest", pars = pars) ## KNN with randomised validation. The number of neighbours is 3. pars <- valipars(sampling="rand",niter = 5, nreps = 5) binknn <- binest(dat,cl,choices = list(c("setosa","virginica"), c("virginica","versicolor")), method="knn",pars = pars, k = 3) } \keyword{classif} <file_sep>/man/predict.osc.Rd % wll-05-06-2007: % \name{predict.osc} \alias{predict.osc} \title{ Predict Method for Class 'osc' } \description{ Pre-processing of new data by \code{osc}. } \usage{ \method{predict}{osc}(object, newdata,\dots) } % ---------------------------------------------------------------------------- \arguments{ \item{object}{ Object of class \code{osc}. } \item{newdata}{ A matrix or data frame of cases to be corrected by OSC. } \item{\dots}{ Arguments based from or to other methods. } } % ---------------------------------------------------------------------------- \details{ This function is a method for the generic function \code{predict()} for class \code{osc}. If \code{newdata} is omitted, the corrected data set used in model of \code{osc} will be returned. } % ---------------------------------------------------------------------------- \value{ A list containing the following components: \item{x}{ A matrix of OSC corrected data set. } \item{Q2}{ The fraction of variation in X after OSC correction for the new data. } } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------------- \seealso{ \code{\link{osc}}, \code{\link{osc_wold}}, \code{\link{osc_sjoblom}}, \code{\link{osc_wise}} } % ---------------------------------------------------------------------------- \examples{ data(abr1) cl <- factor(abr1$fact$class) dat <- abr1$pos ## divide data as training and test data idx <- sample(1:nrow(dat), round((2/3)*nrow(dat)), replace=FALSE) ## construct train and test data train.dat <- dat[idx,] train.t <- cl[idx] test.dat <- dat[-idx,] test.t <- cl[-idx] ## build OSC model based on the training data res <- osc(train.dat, train.t, method="wold",osc.ncomp=2, pls.ncomp=4) names(res) res summary(res) ## pre-process test data by OSC test <- predict(res,test.dat) test.dat.1 <- test$x } \keyword{manip} <file_sep>/man/fs.relief.Rd % lwc-12-04-2007 \name{fs.relief} \alias{fs.relief} \title{ Feature Selection Using RELIEF Method } \description{ Feature selection using RELIEF method. } \usage{ fs.relief(x,y, m=NULL, k=10, \dots) } % -------------------------------------------------------------------- \arguments{ \item{x}{ A data frame or matrix of data set. } \item{y}{ A factor or vector of class. } \item{m}{ Number of instances to sample without replacement. Default is \code{NULL} which takes all instances for computation. } \item{k}{ Number of nearest neighbours used to estimate feature relevance. } \item{\dots}{ Arguments to pass to method (current ignore). } } \details{ This function implements the \bold{Relief} algorithm's extension called \bold{ReliefF}, which applies to multi-class problem and searches for \code{k} of its nearest neighbours from the same class, called \emph{hits}, and also \code{k} nearest neighbours from each of the different classes, called \emph{misses}. } % ---------------------------------------------------------------------------- \value{ A list with components: \item{fs.rank}{A vector of feature ranking scores.} \item{fs.order}{A vector of feature order from best to worst.} \item{stats}{A vector of measurements.} } % ---------------------------------------------------------------------------- \references{ <NAME>. and <NAME>. (1992). The Feature Selection Problem: Traditional Methods and a new algorithm. \emph{Proc. Tenth National Conference on Artificial Intelligence}, MIT Press, 129 - 134. <NAME>., <NAME>., and <NAME>. (1997). Overcoming the Myopia of Induction Learning Algorithms with RELIEFF. \emph{Applied Intelligence}, Vol.7, 1, 39-55. <NAME>. (1994) Estimating Attributes: Analysis and Extensions of RELIEF, \emph{European Conference on Machine Learning}, Ed. <NAME> and <NAME>, 171-182, Springer <NAME>. and <NAME>. (2003) Theoretical and Empirical Analysis of ReliefF and RReliefF, \emph{Machine Learning}, 53, 23 - 69. } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------- \examples{ data(iris) x <- subset(iris, select = -Species) y <- iris$Species fs <- fs.relief(x, y, m=20,k=10) } \keyword{classif} <file_sep>/man/mbinest.Rd % wll-30-01-2007:commence % wll-03-07-2007:add auc and margin \name{mbinest} \alias{mbinest} \title{ Binary Classification by Multiple Classifier } \description{ Binary classification by multiple classifier. } \usage{ mbinest(dat, cl, choices = NULL, method, pars=valipars(),\dots) } % -------------------------------------------------------------------- \arguments{ \item{dat}{ A matrix or data frame containing the explanatory variables. } \item{cl}{ A factor specifying the class for each observation. } \item{choices}{ The vector or list of class labels to be chosen for binary classification. For details, see \code{\link{dat.sel}}. } \item{method}{ Multiple classification methods to be used. For details, see \code{\link{maccest}}. } \item{pars}{ A list of parameters of the resampling method. See \code{\link{valipars}} for details. } \item{\dots}{Additional parameters to \code{method}.} } % ---------------------------------------------------------------------- \value{ A list with components: \item{all}{All results of classification.} \item{com}{A matrix of the combinations of the binary class labels.} \item{acc}{A table of classification accuracy for the binary combination.} \item{mar}{Prediction margin.} \item{auc}{The area under receiver operating curve (AUC).} \item{method}{Classification methods used.} \item{niter}{Number of iterations.} \item{sampling}{Sampling scheme used.} \item{nreps}{Number of replications in each iteration if sampling is not \code{loocv}. } } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{maccest}}, \code{\link{maccest}},\code{\link{valipars}}, \code{\link{dat.sel}} } % ---------------------------------------------------------------------- \examples{ ## iris data set data(iris) dat <- subset(iris, select = -Species) cl <- iris$Species method <- c("svm","pcalda") pars <- valipars(sampling="cv",niter = 10, nreps = 5) res <- mbinest(dat,cl,choices=c("setosa"), method=method, pars = pars, kernel="linear") ## combine prediction accuracy, AUC and margin z <- round(cbind(res$acc,res$auc,res$mar),digits=3) colnames(z) <- c(paste(method,".acc", sep=""),paste(method,".auc", sep=""), paste(method,".mar", sep="")) } \keyword{classif} <file_sep>/man/osc.Rd % wll-05-06-2007: % \name{osc} \alias{osc} \alias{osc.default} \alias{osc.formula} \alias{print.osc} \alias{summary.osc} \alias{print.summary.osc} \title{ Orthogonal Signal Correction (OSC) } \description{ Data pre-processing by orthogonal signal correction (OSC). } \usage{ osc(x, \dots) \method{osc}{default}(x, y, method="wold",center=TRUE,osc.ncomp=4,pls.ncomp=10, tol=1e-3, iter=20,\dots) \method{osc}{formula}(formula, data = NULL, \dots, subset, na.action = na.omit) } % ---------------------------------------------------------------------------- \arguments{ \item{formula}{ A formula of the form \code{groups ~ x1 + x2 + \dots} That is, the response is the grouping factor and the right hand side specifies the (non-factor) discriminators. } \item{data}{ Data frame from which variables specified in \code{formula} are preferentially to be taken. } \item{x}{ A matrix or data frame containing the explanatory variables if no formula is given as the principal argument. } \item{y}{ A factor specifying the class for each observation if no formula principal argument is given. } \item{method}{ A method for calculating OSC weights, loadings and scores. The following methods are supported: \itemize{ \item \code{wold:} Original Wold et al approach. \item \code{sjoblom:} Sjoblom et al approach. \item \code{wise:} Wise and Gallagher approach. } } \item{center}{ A logical value indicating whether the data set should be centred by column-wise. } \item{osc.ncomp}{ The number of components to be used in the OSC calculation. } \item{pls.ncomp}{ The number of components to be used in the PLS calculation. } \item{tol}{ A scalar value of tolerance for OSC computation. } \item{iter}{ The number of iteration used in OSC calculation. } \item{\dots}{ Arguments passed to or from other methods. } \item{subset}{ An index vector specifying the cases to be used in the training sample. } \item{na.action}{ A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. } } % ---------------------------------------------------------------------------- \value{ An object of class \code{osc} containing the following components: \item{x}{ A matrix of OSC corrected data set. } \item{R2}{ R2 statistics. It is calculated as the fraction of variation in X after OSC correction for the calibration (training) data. } \item{angle}{ An angle used for checking if scores \code{t} is orthogonal to \code{y}. An angle close to 90 degree means that orthogonality is achieved in the correction process. } \item{w}{ A matrix of OSC weights. } \item{p}{ A matrix of OSC loadings. } \item{t}{ A matrix of OSC scores. } \item{call}{ The (matched) function call. } \item{center}{ A logical value indicating whether the data set has been centred by column-wise. } \item{osc.ncomp}{ The number of component used in OSC computation. } \item{pls.ncomp}{ The number of component used in PLS computation. } \item{method}{ The OSC algorithm used. } } % ---------------------------------------------------------------------------- \note{ This function may be called giving either a formula and optional data frame, or a matrix and grouping factor as the first two arguments. } % ---------------------------------------------------------------------------- \references{ <NAME>., <NAME>., <NAME>., <NAME>.(1998). Orthogonal signal correction of near infrared spectra. \emph{Chemometrics Intell. Lab. Syst.}, 44: 175-185. <NAME>., <NAME>., <NAME>. (2001). Direct orthogonal signal correction. \emph{Chemometrics Intell. Lab. Syst.}, 56: 13-25. <NAME>., <NAME>., <NAME>., <NAME>., <NAME>. (1998). An evaluation of orthogonal signal correction applied to calibration transfer of near infrared spectra. \emph{Chemometrics Intell. Lab. Syst.},44: 229-244. <NAME>., <NAME>. and <NAME>. (2002). An investigation of orthogonal correction algorithms and their characteristics. \emph{Journal of Chemometrics}, 16:176-188. <NAME>. and <NAME>. \emph{http://www.eigenvector.com/MATLAB/OSC.html}. } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------------- \seealso{ \code{\link{predict.osc}}, \code{\link{osc_wold}}, \code{\link{osc_sjoblom}}, \code{\link{osc_wise}} } % ---------------------------------------------------------------------------- \examples{ data(abr1) cl <- factor(abr1$fact$class) dat <- abr1$pos ## divide data as training and test data idx <- sample(1:nrow(dat), round((2/3)*nrow(dat)), replace=FALSE) ## construct train and test data train.dat <- dat[idx,] train.t <- cl[idx] test.dat <- dat[-idx,] test.t <- cl[-idx] ## build OSC model based on the training data res <- osc(train.dat, train.t, method="wise", osc.ncomp=2, pls.ncomp=4) names(res) res summary(res) ## pre-process test data by OSC test.dat.1 <- predict(res,test.dat)$x } \keyword{manip} <file_sep>/man/frankvali.Rd % lwc-13-11-2006: First draft % lwc-12-10-2007: major changes % lwc-17-02-2010: Add fs.cl and fs.cl.1 % \name{frankvali} \alias{frankvali} \alias{fs.cl} \alias{fs.cl.1} \alias{frankvali.formula} \alias{frankvali.default} \alias{print.frankvali} \alias{summary.frankvali} \alias{print.summary.frankvali} \title{ Estimates Feature Ranking Error Rate with Resampling } % ---------------------------------------------------------------------------- \description{ Estimates error rate of feature ranking with resampling methods. } % ---------------------------------------------------------------------------- \usage{ frankvali(dat, \dots) \method{frankvali}{default}(dat,cl,cl.method = "svm", fs.method="fs.auc", fs.order=NULL, fs.len="power2", pars = valipars(), tr.idx=NULL,\dots) \method{frankvali}{formula}(formula, data = NULL, \dots, subset, na.action = na.omit) fs.cl(dat,cl,fs.order=colnames(dat), fs.len=1:ncol(dat), cl.method = "svm", pars = valipars(), all.fs=FALSE, \dots) fs.cl.1(dat,cl,fs.order=colnames(dat), cl.method = "svm", pars = valipars(), agg_f=FALSE,\dots) } % ---------------------------------------------------------------------------- \arguments{ \item{formula}{ A formula of the form \code{groups ~ x1 + x2 + \dots} That is, the response is the grouping factor and the right hand side specifies the (non-factor) discriminators. } \item{data}{ Data frame from which variables specified in \code{formula} are preferentially to be taken. } \item{dat}{ A matrix or data frame containing the explanatory variables if no formula is given as the principal argument. } \item{cl}{ A factor specifying the class for each observation if no formula principal argument is given. } \item{cl.method}{ Classification method to be used. Any classification methods can be employed if they have method \code{predict} (except \code{knn}) with output of predicted class label or one component with name of \code{class} in the returned list, such as \code{randomForest}, \code{svm}, \code{knn} and \code{lda}. } \item{fs.method}{ Feature ranking method to be used. If \code{fs.order} is not \code{NULL}, it will be overridden. } \item{fs.order}{ A vector of ordered feature order. In \code{frankvali} its default is \code{NULL} and then the feature selection will be performed on the training data. } \item{fs.len}{ Feature length used for validation. For details, see \code{\link{get.fs.len}}. } \item{pars}{ A list of resampling scheme method such as \emph{Cross-validation}, \emph{Stratified cross-validation}, \emph{Leave-one-out cross-validation}, \emph{Randomised validation (holdout)}, \emph{Bootstrap}, \emph{.632 bootstrap} and \emph{.632 plus bootstrap}, and control parameters for the calculation of accuracy. See \code{\link{valipars}} for details. } \item{tr.idx}{ User defined index of training samples. Can be generated by \code{trainind}. } \item{all.fs}{ A logical value indicating whether all features should be used for evaluation. } \item{agg_f}{ A logical value indicating whether aggregated features should be used for evaluation. } \item{\dots}{Additional parameters to \code{fs.method} or \code{cl.method}.} \item{subset}{ Optional vector, specifying a subset of observations to be used. } \item{na.action}{ Function which indicates what should happen when the data contains \code{NA}'s, defaults to \code{\link{na.omit}}. } } % ---------------------------------------------------------------------------- \details{ These functions validate the selected feature subsets by classification and resampling methods. It can take any classification model if its argument format is \code{model(formula, data, subset, ...)} and their corresponding method \code{predict.model(object, newdata, ...)} can either return the only predicted class label or in a list with name as \code{class}, such as \code{lda} and \code{pcalda}. The resampling method can be one of \code{cv}, \code{scv}, \code{loocv}, \code{boot}, \code{632b} and \code{632pb}. The feature ranking method can take one of \code{fs.rf}, \code{fs.auc}, \code{fs.welch}, \code{fs.anova}, \code{fs.bw}, \code{fs.snr}, \code{fs.kruskal}, \code{fs.relief} and \code{fs.rfe}. } % ---------------------------------------------------------------------------- \value{ \code{frankvali} returns an object of class including the components: \item{fs.method}{Feature ranking method used.} \item{cl.method}{Classification method used.} \item{fs.len}{Feature lengths used.} \item{fs.rank}{Final feature ranking. It is obtained based on \code{fs.list} by Borda vote method.} \item{err.all}{Error rate for all computation.} \item{err.iter}{Error rate for each iteration.} \item{err.avg}{Average error rate for all iterations.} \item{sampling}{Sampling scheme used.} \item{niter}{Number of iterations.} \item{nboot}{ Number of bootstrap replications if the sampling method is one of \code{boot}, \code{632b} and \code{632pb}. } \item{nfold}{Fold of cross-validations if the sampling is \code{cv} or \code{scv}.} \item{nrand}{Number of replications if the sampling is \code{random}.} \item{fs.list}{Feature list of all computation if \code{fs.order} is \code{NULL}.} \code{fs.cl} and \code{fs.cl.1} return a matrix with columns of \code{acc} (accuracy), \code{auc}(area under ROC curve) and \code{mar}(class margin). } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{feat.rank.re}}, \code{\link{frank.err}}, \code{\link{valipars}}, \code{\link{boxplot.frankvali}}, \code{\link{get.fs.len}} } \note{ \code{fs.cl} is the simplified version of \code{frankvali}. Both \code{frankvali} and \code{fs.cl} are used for validation of aggregated features from top to bottom only, but \code{fs.cl.1} can be used for validation of either individual or aggregated features. } % ---------------------------------------------------------------------------- \examples{ data(abr1) dat <- abr1$pos x <- preproc(dat[,110:500], method="log10") y <- factor(abr1$fact$class) dat <- dat.sel(x, y, choices=c("1","2")) x.1 <- dat[[1]]$dat y.1 <- dat[[1]]$cls len <- c(1:20,seq(25,50,5),seq(60,90,10),seq(100,300,50)) pars <- valipars(sampling="boot",niter=2, nreps=4) res <- frankvali(x.1,y.1,cl.method = "knn", fs.method="fs.auc", fs.len=len, pars = pars) res summary(res) boxplot(res) \dontrun{ ## or apply feature selection with re-sampling procedure at first fs <- feat.rank.re(x.1,y.1,method="fs.auc",pars = pars) ## then estimate error of feature selection. res.1 <- frankvali(x.1,y.1,cl.method = "knn", fs.order=fs$fs.order, fs.len=len, pars = pars) res.1 ## use formula data.bin <- data.frame(y.1,x.1) pars <- valipars(sampling="cv",niter=2,nreps=4) res.2 <- frankvali(y.1~., data=data.bin,fs.method="fs.rfe",fs.len=len, cl.method = "knn",pars = pars) res.2 ## examples of fs.cl and fs.cl.1 fs <- fs.rf(x.1, y.1) res.3 <- fs.cl(x.1,y.1,fs.order=fs$fs.order, fs.len=len, cl.method = "svm", pars = pars, all.fs=TRUE) ord <- fs$fs.order[1:50] ## aggregated features res.4 <- fs.cl.1(x.1,y.1,fs.order=ord, cl.method = "svm", pars = pars, agg_f=TRUE) ## individual feature res.5 <- fs.cl.1(x.1,y.1,fs.order=ord, cl.method = "svm", pars = pars, agg_f=FALSE) } } \keyword{classif} <file_sep>/man/fs.pca.Rd % wll-04-02-2008 \name{fs.pca} \alias{fs.pca} \title{ Feature Selection by PCA } \description{ Feature selection using PCA loadings. } \usage{ fs.pca(x,thres=0.8, \dots) } % -------------------------------------------------------------------- \arguments{ \item{x}{ A data frame or matrix of data set. } \item{thres}{ The threshold of the cumulative percentage of PC's explained variances. } \item{\dots}{ Additional arguments to \code{\link{prcomp}}. } } % ---------------------------------------------------------------------------- \value{ A list with components: \item{fs.rank}{A vector of feature ranking scores.} \item{fs.order}{A vector of feature order from best to worst.} \item{stats}{A vector of measurements.} } \details{ Since PCA loadings is a matrix with respect to PCs, the Mahalanobis distance of loadings is applied to select the features. (Other ways, for example, the sum of absolute values of loadings, or squared root of loadings, can be used.) It should be noticed that this feature selection method is unsupervised. } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{feat.rank.re}} } % ---------------------------------------------------------------------- \examples{ ## prepare data set data(abr1) cls <- factor(abr1$fact$class) dat <- abr1$pos ## dat <- abr1$pos[,110:1930] ## fill zeros with NAs dat <- mv.zene(dat) ## missing values summary mv <- mv.stats(dat, grp=cls) mv ## View the missing value pattern ## filter missing value variables ## dim(dat) dat <- dat[,mv$mv.var < 0.15] ## dim(dat) ## fill NAs with mean dat <- mv.fill(dat,method="mean") ## log transformation dat <- preproc(dat, method="log10") ## select class "1" and "2" for feature ranking ind <- grepl("1|2", cls) mat <- dat[ind,,drop=FALSE] mat <- as.matrix(mat) grp <- cls[ind, drop=TRUE] ## feature selection by PCA res <- fs.pca(dat) names(res) } \keyword{classif} <file_sep>/man/preproc.Rd % lwc-01-12-2006: % lwc-22-02-2010: Add other functions \name{preproc} \alias{preproc} \alias{preproc.sd} \alias{preproc.const} \title{ Pre-process Data Set } \description{ Pre-process a data frame or matrix by different methods. } \usage{ preproc (x, y=NULL,method="log",add=1) preproc.sd(x, y=NULL, na.rm = FALSE) preproc.const(x, y, tol = 1.0e-4) } \arguments{ \item{x}{ A numeric data frame or matrix to be pre-processed. } \item{y}{ A factor specifying the group. It is only used by the method \code{TICnorm} in \code{preproc}. } \item{method}{ A method used to pre-process the data set. The following methods are supported: \itemize{ \item \code{center:} Centering \item \code{auto:} Auto scaling \item \code{range:} Range scaling \item \code{pareto:} Pareto scaling \item \code{vast:} Vast scaling \item \code{level:} Level scaling \item \code{log:} Log transformation (default) \item \code{log10:} Log 10 transformation \item \code{sqrt:} Square root transformation \item \code{asinh:} Inverse hyperbolic sine transformation \item \code{TICnorm:} TIC normalisation } } \item{na.rm}{A logical value indicating whether NA values should be stripped before the computation proceeds. } \item{add}{ A numeric value for addition used in the logarmath transformation \code{log} and \code{log10}. } \item{tol}{ A tolerance to decide if a matrix is singular; it will reject variables and linear combinations of unit-variance variables whose variance is less than tol^2. } } \value{ A pre-processed data set. } \details{ \code{preproc} transforms data set by provided \code{method}. \code{preproc.sd} removes variables which have (near) zero S.D with or without respect to class/grouped information. \code{preproc.const} removes variables appears to be constant within groups / classes. } \author{ <NAME> } % ---------------------------------------------------------------------- \references{ <NAME>., <NAME>., <NAME>., <NAME>. and <NAME>. (2006), Centering, scaling, and transformations: improving the biological information content of metabolomics data, \emph{BMC Genomics}, 7:142 } \examples{ data(abr1) cl <- factor(abr1$fact$class) dat <- abr1$pos ## normalise data set using "TICnorm" z.1 <- preproc(dat, y=cl, method="TICnorm") ## scale data set using "log10" z.2 <- preproc(dat,method="log10", add=1) ## or scale data set using "log" and "TICnorm" sequentially z.3 <- preproc(dat,method=c("log","TICnorm"), add=0.1) } \keyword{manip} <file_sep>/man/maccest.Rd % lwc-30-01-2007: First draft % \name{maccest} \alias{maccest} \alias{maccest.formula} \alias{maccest.default} \alias{print.maccest} \alias{summary.maccest} \alias{print.summary.maccest} \title{Estimation of Multiple Classification Accuracy} % ------------------------------------------------------------------------ \description{ Estimation of classification accuracy by multiple classifiers with resampling procedure and comparisons of multiple classifiers. } % ------------------------------------------------------------------------ \usage{ maccest(dat, \dots) \method{maccest}{default}(dat, cl, method="svm", pars = valipars(), tr.idx = NULL, comp="anova",\dots) \method{maccest}{formula}(formula, data = NULL, \dots, subset, na.action = na.omit) } % ------------------------------------------------------------------------ \arguments{ \item{formula}{ A formula of the form \code{groups ~ x1 + x2 + \dots} That is, the response is the grouping factor and the right hand side specifies the (non-factor) discriminators. } \item{data}{ Data frame from which variables specified in \code{formula} are preferentially to be taken. } \item{dat}{ A matrix or data frame containing the explanatory variables if no formula is given as the principal argument. } \item{cl}{ A factor specifying the class for each observation if no formula principal argument is given. } \item{method}{ A vector of multiple classification methods to be used. Classifiers, such as \code{randomForest}, \code{svm}, \code{knn} and \code{lda}, can be used. For details, see \code{note} below. } \item{pars}{ A list of resampling scheme such as \emph{Leave-one-out cross-validation}, \emph{Cross-validation}, \emph{Randomised validation (holdout)} and \emph{Bootstrap}, and control parameters for the calculation of accuracy. See \code{\link{valipars}} for details. } \item{tr.idx}{ User defined index of training samples. Can be generated by \code{trainind}. } \item{comp}{ Comparison method of multiple classifier. If \code{comp} is \code{anova}, the multiple comparisons are performed by \code{ANOVA} and then the pairwise comparisons are performed by \code{HSDTukey}. If \code{comp} is \code{fried}, the multiple comparisons are performed by \code{Friedman Test} and the pairwise comparisons are performed by \code{Wilcoxon Test}. } \item{\dots}{Additional parameters to \code{method}.} \item{subset}{ Optional vector, specifying a subset of observations to be used. } \item{na.action}{ Function which indicates what should happen when the data contains \code{NA}'s, defaults to \code{\link{na.omit}}. } } % --------------------------------------------------------------------- \details{ The accuracy rates for classification are obtained used techniques such as \emph{Random Forest}, \emph{Support Vector Machine}, \emph{k-Nearest Neighbour Classification}, \emph{Linear Discriminant Analysis} and \emph{Linear Discriminant Analysis} based on sampling methods, including \emph{Leave-one-out cross-validation}, \emph{Cross-validation}, \emph{Randomised validation (holdout)} and \emph{Bootstrap}. } % -------------------------------------------------------------------- \note{ The \code{maccest} can take any classification model if its argument format is \code{model(formula, data, subset, na.action, ...)} and their corresponding method \code{predict.model(object, newdata, ...)} can either return the only predicted class label or in a list with name as \code{class}, such as \code{lda} and \code{pcalda}. As for the multiple comparisons by \code{ANOVA}, the following assumptions should be considered: \itemize{ \item The samples are randomly and independently selected. \item The populations are normally distributed. \item The populations all have the same variance. } All the comparisons are based on the results of all iteration. \code{\link{aam.mcl}} is a simplified version which returns \code{acc} (accuracy), \code{auc}(area under ROC curve) and \code{mar}(class margin). } % --------------------------------------------------------------------- \value{ An object of class \code{maccest}, including the components: \item{method}{Classification method used.} \item{acc}{Accuracy rate.} \item{acc.iter}{Accuracy rate of each iteration.} \item{acc.std}{Standard derivation of accuracy rate.} \item{mar}{Prediction margin.} \item{mar.iter}{Prediction margin of each iteration.} \item{auc}{The area under receiver operating curve (AUC).} \item{auc.iter}{AUC of each iteration.} \item{comp}{Multiple comparison method used.} \item{h.test}{Hypothesis test results of multiple comparison.} \item{gl.pval}{Global or overall p-value.} \item{mc.pval}{Pairwise comparison p-values.} \item{sampling}{Sampling scheme used.} \item{niter}{Number of iteration.} \item{nreps}{Number of replications in each iteration. } \item{conf.mat}{Overall confusion matrix.} \item{acc.boot}{ A list of bootrap error such as \code{.632} and \code{.632+} if the validation method is bootrap. } } % ----------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{accest}}, \code{\link{aam.mcl}}, \code{\link{valipars}}, \code{\link{plot.maccest}} \code{\link{trainind}}, \code{\link{boxplot.maccest}},\code{\link{classifier}} } % ----------------------------------------------------------------------- \examples{ # Iris data data(iris) x <- subset(iris, select = -Species) y <- iris$Species method <- c("randomForest","svm","pcalda","knn") pars <- valipars(sampling="boot", niter = 3, nreps=5, strat=TRUE) res <- maccest(Species~., data = iris, method=method, pars = pars, comp="anova") ## or res <- maccest(x, y, method=method, pars=pars, comp="anova") res summary(res) plot(res) boxplot(res) oldpar <- par(mar = c(5,10,4,2) + 0.1) plot(res$h.test$tukey,las=1) ## plot the tukey results par(oldpar) } \keyword{classif} <file_sep>/man/osc_wise.Rd % wll-05-06-2007: % \name{osc_wise} \alias{osc_wise} \title{ Orthogonal Signal Correction (OSC) Approach by Wise and Gallagher. } \description{ Orthogonal signal correction (OSC) approach by Wise and Gallagher. } \usage{ osc_wise(x, y, center=TRUE,osc.ncomp=4,pls.ncomp=10, tol=1e-3,iter=20,\dots) } % ---------------------------------------------------------------------------- \arguments{ \item{x}{ A numeric data frame or matrix to be pre-processed. } \item{y}{ A vector or factor specifying the class for each observation. } \item{center}{ A logical value indicating whether the data set should be centred by column-wise. } \item{osc.ncomp}{ The number of components to be used in the OSC calculation. } \item{pls.ncomp}{ The number of components to be used in the PLS calculation. } \item{tol}{ A scalar value of tolerance for OSC computation. } \item{iter}{ The number of iteration used in OSC calculation. } \item{\dots}{ Arguments passed to or from other methods. } } % ---------------------------------------------------------------------------- \value{ A list containing the following components: \item{x}{ A matrix of OSC corrected data set. } \item{R2}{ R2 statistics. It is calculated as the fraction of variation in X after OSC correction. } \item{angle}{ An angle used for checking if scores \code{t} is orthogonal to \code{y}. An angle close to 90 degree means that orthogonality is achieved in the correction process. } \item{w}{ A matrix of OSC weights. } \item{p}{ A matrix of OSC loadings. } \item{t}{ A matrix of OSC scores. } \item{center}{ A logical value indicating whether the data set has been centred by column-wise. } } % ---------------------------------------------------------------------------- \references{ <NAME>., <NAME>., <NAME>. (2001). Direct orthogonal signal correction. \emph{Chemometrics Intell. Lab. Syst.}, 56: 13-25. <NAME>. and <NAME>. \emph{http://www.eigenvector.com/MATLAB/OSC.html}. } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------------- \seealso{ \code{\link{osc}}, \code{\link{predict.osc}}, \code{\link{osc_sjoblom}}, \code{\link{osc_wold}} } % ---------------------------------------------------------------------------- \examples{ data(abr1) cl <- factor(abr1$fact$class) dat <- abr1$pos ## divide data as training and test data idx <- sample(1:nrow(dat), round((2/3)*nrow(dat)), replace=FALSE) ## construct train and test data train.dat <- dat[idx,] train.t <- cl[idx] test.dat <- dat[-idx,] test.t <- cl[-idx] ## build OSC model based on the training data res <- osc_wise(train.dat, train.t) names(res) ## pre-process test data by OSC test.dat.1 <- predict.osc(res,test.dat)$x } \keyword{manip} <file_sep>/man/trainind.Rd \name{trainind} \alias{trainind} \title{ Generate Index of Training Samples } \description{ Generate index of training samples. The sampling scheme includes leave-one-out cross-validation (\code{loocv}), cross-validation (\code{cv}), randomised validation (\code{random}) and bootstrap (\code{boot}). } \usage{ trainind(cl, pars = valipars()) } \arguments{ \item{cl}{ A factor or vector of class. } \item{pars}{A list of sampling parameters for generating training index. It has the same structure as the output of \code{valipars}. See \code{valipars} for details.} } \value{ Returns a list of training index. } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{\code{\link{valipars}}} \examples{ ## A trivia example x <- as.factor(sample(c("a","b"), 20, replace=TRUE)) table(x) pars <- valipars(sampling="rand", niter=2, nreps=4, strat=TRUE,div=2/3) (temp <- trainind(x,pars=pars)) (tmp <- temp[[1]]) x[tmp[[1]]];table(x[tmp[[1]]]) ## train idx x[tmp[[2]]];table(x[tmp[[2]]]) x[tmp[[3]]];table(x[tmp[[3]]]) x[tmp[[4]]];table(x[tmp[[4]]]) x[-tmp[[1]]];table(x[-tmp[[1]]]) ## test idx x[-tmp[[2]]];table(x[-tmp[[2]]]) x[-tmp[[3]]];table(x[-tmp[[3]]]) x[-tmp[[4]]];table(x[-tmp[[4]]]) # iris data set data(iris) dat <- subset(iris, select = -Species) cl <- iris$Species ## generate 5-fold cross-validation samples cv.idx <- trainind(cl, pars = valipars(sampling="cv", niter=2, nreps=5)) ## generate leave-one-out cross-validation samples loocv.idx <- trainind(cl, pars = valipars(sampling = "loocv")) ## generate bootstrap samples with 25 replications boot.idx <- trainind(cl, pars = valipars(sampling = "boot", niter=2, nreps=25)) ## generate randomised samples with 1/4 division and 10 replications. rand.idx <- trainind(cl, pars = valipars(sampling = "rand", niter=2, nreps=10, div = 1/4)) } \keyword{manip} <file_sep>/man/dat.sel.Rd % lwc-16-09-2006 % lwc-15-02-2010 % lwc-15-04-2010: add combn.pw \name{dat.sel} \alias{dat.sel} \alias{combn.pw} \title{ Generate Pairwise Data Set } \description{ Generate index or data set of pairwise combination based on class labels. } \usage{ combn.pw(cls, choices = NULL) dat.sel(dat, cls, choices = NULL) } % -------------------------------------------------------------------- \arguments{ \item{dat}{ A data frame or matrix of data set. } \item{cls}{ A factor or vector of class labels or categorical data. } \item{choices}{ The vector or list of class labels to be chosen for binary combination. } } \details{ If \code{choices} is \code{NULL}, all binary combinations will be computed. If \code{choices} has one class label, the comparisons between this one and any other classes will be calculated. If \code{choices} has more than two classes, all binary combinations in \code{choices} will be generated. For details, see \code{examples} below. } % ---------------------------------------------------------------------- \value{ \code{combn.pw} returns a data frame of index (logical values). \code{dat.set} returns a list of list with components: \item{dat}{ Pairwise data set. } \item{cls}{ Pairwise class label. } } \seealso{ Applications of \code{dat.sel} in \code{\link{pca.plot.wrap}}, \code{\link{lda.plot.wrap}} and \code{\link{pls.plot.wrap}}. } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------- \examples{ data(iris) x <- subset(iris, select = -Species) y <- iris$Species ## generate data set with class "setosa" and "virginica" binmat.1 <- dat.sel(x,y,choices=c("setosa","virginica")) names(binmat.1) ## generate data sets for "setosa" vs other classes. These are: ## "setosa" and "versicolor", "setosa" and "virginica". binmat.2 <- dat.sel(x,y,choices=c("setosa")) names(binmat.2) ## generate data set with combination of each class. These are: ## "setosa" and "versicolor", "setosa" and "virginica", ## "versicolor" and "virginica" binmat.3 <- dat.sel(x,y,choices= NULL) names(binmat.3) data(abr1) cls <- factor(abr1$fact$class) dat <- preproc(abr1$pos, method="log") ## There are some examples of 'choices' choices <- c("2") choices <- c("2","3","4") choices <- list(c("2","3"),c("4","5")) choices <- NULL idx <- combn.pw(cls,choices=choices) dat.pw <- dat.sel(dat, cls,choices=choices) } \keyword{manip} <file_sep>/man/mc.fried.Rd % lwc-30-01-2007: \name{mc.fried} \alias{mc.fried} \title{ Multiple Comparison by 'Friedman Test' and Pairwise Comparison by 'Wilcoxon Test' } \description{ Performs multiple comparison by \code{Friedman test} and pairwise comparison by \code{Wilcoxon Test}. } \usage{ mc.fried(x, p.adjust.method = p.adjust.methods,\dots) } % -------------------------------------------------------------------- \arguments{ \item{x}{ A matrix or data frame to be tested. } \item{p.adjust.method}{Method for adjusting p values (see \code{\link{p.adjust}}).} \item{\dots}{Additional arguments pass to \code{friedman.test} or \code{pairwise.wilcox.test}.} } % ---------------------------------------------------------------------- \value{ A list with components: \item{fried}{Hypothesis test results of \code{friedman.test}.} \item{wilcox}{Hypothesis test results of \code{pairwise.wilcox.test}.} \item{gl.pval}{Global or overall p value returned by \code{friedman.test}.} \item{mc.pval}{Pairwise p value returned by \code{pairwise.wilcox.test}.} } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{maccest}}, \code{\link{mc.anova}} } % ---------------------------------------------------------------------- \examples{ # Iris data data(iris) x <- subset(iris, select = -Species) y <- iris$Species method <- c("randomForest","svm","pcalda","knn") pars <- valipars(sampling="cv", niter = 10, nreps=4) res <- maccest(x, y, method=method, pars=pars, comp="fried",kernel="linear") res htest <- mc.fried(res$acc.iter) } \keyword{classif} <file_sep>/man/valipars.Rd % lwc-14-09-2006: First draft % lwc-27-10-2006: change name and minor changes. % \name{valipars} \alias{valipars} \title{Generate Control Parameters for Resampling} \description{ Generate the control parameters for resampling process. } % ---------------------------------------------------------------------------- \usage{ valipars(sampling="cv", niter=10, nreps=10, strat=FALSE,div = 2/3) } % ---------------------------------------------------------------------------- \arguments{ \item{sampling}{Sampling scheme. Valid options are: \itemize{ \item \code{loocv}. Leave-one-out cross-validation \item \code{cv}. Cross-validation (default) \item \code{rand}. Randomised validation (holdout) \item \code{boot}. Bootstrap } } \item{niter}{Number of iteration or repeat for validation.} \item{nreps}{ Number of replications in each iteration. } \item{strat}{ A logical value indicating whether the stratification should be applied to \code{cv}, \code{rand} and \code{boot}. } \item{div}{ Proportion of data used for training in randomised validation method. } } % ---------------------------------------------------------------------------- \details{ \code{valipars} provides a list of control parameters for the resampling or validation in the process of accuracy evaluation or feature selection process. } % ---------------------------------------------------------------------------- \value{ An object of class \code{valipars} containing all the above parameters (either the defaults or the user specified values). } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{trainind}} } \examples{ ## generate control parameters for the re-sampling scheme with 5-fold ## cross-validation and iteration of 10 times valipars(sampling = "cv", niter = 10, nreps = 5) ## generate control parameters for the re-sampling scheme with ## 25-replication bootstrap and iteration of 100 times valipars(sampling = "boot", niter = 100, nreps = 25,strat=TRUE) ## generate control parameters for the re-sampling scheme with ## leave-one-out cross-validation valipars(sampling = "loocv") } \keyword{manip} <file_sep>/man/fs.auc.Rd % lwc-21-03-2007 \name{fs.auc} \alias{fs.auc} \title{ Feature Selection Using Area under Receiver Operating Curve (AUC) } \description{ Feature selection using area under receiver operating curve (AUC). } \usage{ fs.auc(x,y,\dots) } % -------------------------------------------------------------------- \arguments{ \item{x}{ A data frame or matrix of data set. } \item{y}{ A factor or vector of class. } \item{\dots}{ Arguments to pass(current ignored). } } \note{ This function is for two-class problem only. } % ---------------------------------------------------------------------------- \value{ A list with components: \item{fs.rank}{A vector of feature ranking scores.} \item{fs.order}{A vector of feature order from best to worst.} \item{stats}{A vector of measurements.} } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------- \examples{ ## prepare data set data(abr1) cls <- factor(abr1$fact$class) dat <- abr1$pos ## dat <- abr1$pos[,110:1930] ## fill zeros with NAs dat <- mv.zene(dat) ## missing values summary mv <- mv.stats(dat, grp=cls) mv ## View the missing value pattern ## filter missing value variables ## dim(dat) dat <- dat[,mv$mv.var < 0.15] ## dim(dat) ## fill NAs with mean dat <- mv.fill(dat,method="mean") ## log transformation dat <- preproc(dat, method="log10") ## select class "1" and "2" for feature ranking ind <- grepl("1|2", cls) mat <- dat[ind,,drop=FALSE] mat <- as.matrix(mat) grp <- cls[ind, drop=TRUE] ## apply AUC method for feature selection/ranking res <- fs.auc(mat,grp) names(res) } \keyword{classif} <file_sep>/man/fs.pls.Rd % wll-22-11-2007 \name{fs.pls} \alias{fs.pls} \alias{fs.plsvip} \alias{fs.plsvip.1} \alias{fs.plsvip.2} \title{ Feature Selection Using PLS } \description{ Feature selection using coefficient of regression and VIP values of PLS. } \usage{ fs.pls(x,y, pls="simpls",ncomp=10,\dots) fs.plsvip(x,y, ncomp=10,\dots) fs.plsvip.1(x,y, ncomp=10,\dots) fs.plsvip.2(x,y, ncomp=10,\dots) } % -------------------------------------------------------------------- \arguments{ \item{x}{ A data frame or matrix of data set. } \item{y}{ A factor or vector of class. } \item{pls}{ A method for calculating PLS scores and loadings. The following methods are supported: \itemize{ \item \code{simpls:} SIMPLS algorithm. \item \code{kernelpls:} kernel algorithm. \item \code{oscorespls:} orthogonal scores algorithm. } For details, see \code{\link[pls]{simpls.fit}}, \code{\link[pls]{kernelpls.fit}} and \code{\link[pls]{oscorespls.fit}} in package \pkg{pls}. } \item{ncomp}{ The number of components to be used. } \item{\dots}{ Arguments passed to or from other methods. } } % ---------------------------------------------------------------------------- \value{ A list with components: \item{fs.rank}{A vector of feature ranking scores.} \item{fs.order}{A vector of feature order from best to worst.} \item{stats}{A vector of measurements.} } \details{ \code{fs.pls} ranks the features by regression coefficient of PLS. Since the coefficient is a matrix due to the dummy multiple response variables designed for the classification (category) problem, the Mahalanobis distance of coefficient is applied to select the features. (Other ways, for example, the sum of absolute values of coefficient, or squared root of coefficient, can be used.) \code{fs.plsvip} and \code{fs.plsvip.1} carry out feature selection based on the the Mahalanobis distance and absolute values of PLS's VIP, respectively. \code{fs.plsvip.2} is similar to \code{fs.plsvip} and \code{fs.plsvip.1}, but the category response is not treated as dummy multiple response matrix. } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{feat.rank.re}} } % ---------------------------------------------------------------------- \examples{ ## prepare data set data(abr1) cls <- factor(abr1$fact$class) dat <- abr1$pos ## dat <- abr1$pos[,110:1930] ## fill zeros with NAs dat <- mv.zene(dat) ## missing values summary mv <- mv.stats(dat, grp=cls) mv ## View the missing value pattern ## filter missing value variables ## dim(dat) dat <- dat[,mv$mv.var < 0.15] ## dim(dat) ## fill NAs with mean dat <- mv.fill(dat,method="mean") ## log transformation dat <- preproc(dat, method="log10") ## select class "1" and "2" for feature ranking ind <- grepl("1|2", cls) mat <- dat[ind,,drop=FALSE] mat <- as.matrix(mat) grp <- cls[ind, drop=TRUE] ## apply PLS methods for feature selection res.pls <- fs.pls(mat,grp, ncomp=4) res.plsvip <- fs.plsvip(mat,grp, ncomp=4) res.plsvip.1 <- fs.plsvip.1(mat,grp, ncomp=4) res.plsvip.2 <- fs.plsvip.2(mat,grp, ncomp=4) ## check differences among these methods fs.order <- data.frame(pls = res.pls$fs.order, plsvip = res.plsvip$fs.order, plsvip.1 = res.plsvip.1$fs.order, plsvip.2 = res.plsvip.2$fs.order) head(fs.order, 20) } \keyword{classif} <file_sep>/R/mt_accest.R #' ======================================================================== #' Calculate classification accuracy. #' History: #' 31-07-06: Commence #' 15-09-06: Not restrict types of classification. Just check the validity #' of models and the output of predict method. #' 10-01-07: some minor changes. #' 30-03-07: revise #' 30-06-07: Add posterior, margin and AUC #' 03-07-07: For loocv, no AUC are available. (Should check in code) #' 14-12-07: method and pred.dunc can be a function or a character string #' naming function. #' 11-05-12: Re-write calculation of err, auc and mar. Keep res.all. accest.default <- function(dat, cl, method, pred.func = predict, pars = valipars(), tr.idx = NULL, ...) { #' validity checking if (missing(dat) || missing(cl)) { stop("data set or class are missing") } if (missing(method)) { #' 01-06-2008: you can use: if(is.na(method)) stop("'method' is missing") } #' if(length(dim(dat)) != 2) #' stop("'mat' must be a matrix or data frame") #' if (!is.factor (cl)) stop("cl must be a factor.") cl <- as.factor(cl) #' some classifier need it as factor, such as SVM. if (nrow(dat) != length(cl)) stop("mat and cl don't match.") if (length(unique(cl)) < 2) { stop("Classification needs at least two classes.") } if (any(is.na(dat)) || any(is.na(cl))) { stop("NA is not permitted in data set or class labels.") } dat <- as.matrix(dat) n <- nrow(dat) rownames(dat) <- NULL # strip off the row names #' lwc-15-09-2006: avoid the warnings messages when calling SVM with #' bootstrap methods. The warnings: 'row.names duplicated' come from: #' df <- na.action(data.frame(y, x)) #' in svm.default's '#' subsetting and na-handling for matrices' segment. #' wll-14-12-2007: either a function or a character string naming the #' function to be pred.func. if (is.function(method)) { method <- deparse(substitute(method)) } pred.func <- if (is.function(pred.func)) { pred.func } else if (is.character(pred.func)) { get(pred.func) } else { eval(pred.func) } #' construct index of train data if (is.null(tr.idx)) { if (pars$sampling == "cv" && pars$nreps > n) { pars$sampling <- "loocv" } tr.idx <- trainind(cl, pars = pars) } pars$niter <- length(tr.idx) pars$nreps <- length(tr.idx[[1]]) #' Main computation res.all <- list() cat("\nACCEST Iteration (", method, ", ", pars$niter, "):", sep = "") for (i in 1:pars$niter) { cat(" ", i, sep = "") flush.console() #' for Windows train.ind <- tr.idx[[i]] res <- list() for (j in 1:length(train.ind)) { dat.tr <- dat[train.ind[[j]], , drop = F] cl.tr <- cl[train.ind[[j]]] dat.te <- dat[-train.ind[[j]], , drop = F] cl.te <- cl[-train.ind[[j]]] res[[j]] <- classifier(dat.tr, cl.tr, dat.te, cl.te, method = method, pred.func = pred.func ) } names(res) <- paste("reps_", seq(1, pars$nreps), sep = "") res.all[[i]] <- res } cat("\n") names(res.all) <- paste("Iter_", seq(1, pars$niter), sep = "") #' calculate error rate err.all <- lapply(res.all, function(x) { func <- function(x) sapply(x, function(y) y$err) func(x) }) err.all <- do.call(rbind, err.all) err.iter <- rowMeans(err.all) #' apply(err.all,1,mean) err <- mean(err.iter) #' calculate mean of margin if (!is.null(res.all[[1]][[1]]$margin)) { mar.all <- lapply(res.all, function(x) { func <- function(x) sapply(x, function(y) mean(y$margin)) func(x) }) mar.all <- do.call(rbind, mar.all) mar.iter <- rowMeans(mar.all) mar <- mean(mar.iter) } else { mar.all <- NULL mar.iter <- NULL mar <- NULL } #' calculate AUC if (!is.null(res.all[[1]][[1]]$auc) && pars$sampling != "loocv") { auc.all <- lapply(res.all, function(x) { func <- function(x) sapply(x, function(y) y$auc) func(x) }) auc.all <- do.call(rbind, auc.all) auc.iter <- rowMeans(auc.all) auc <- mean(auc.iter) } else { auc.all <- NULL auc.iter <- NULL auc <- NULL } #' calculate accuracy rate acc.all <- 1 - err.all acc.iter <- 1 - err.iter acc <- 1 - err #' if (pars$niter > 1) acc.std <- sd(acc.iter) else acc.std <- NULL #' process bootstrap acc if (pars$sampling == "boot") { resub <- classifier(dat, cl, dat.te = NULL, cl.te = NULL, method = method, ... ) err.boot <- lapply(err.iter, function(x) boot.err(x, resub)) #' reshape err.boot <- t(sapply(err.boot, function(x) do.call("c", x))) acc.boot <- 1 - err.boot } #' overall confusion matrix #' lwc-01-06-2007: Do not use sapply here because sometimes get non-equal #' fold. all.cl <- lapply(res.all, function(x) { foo <- function(x) lapply(x, function(y) y$cl) foo(x) }) all.cl <- unlist(unlist(all.cl, recursive = F, use.names = F)) all.pred <- lapply(res.all, function(x) { foo <- function(x) lapply(x, function(y) y$pred) foo(x) }) all.pred <- unlist(unlist(all.pred, recursive = F, use.names = F)) #' overall confusion matrix conf <- table(all.cl, all.pred) #' construct output ret <- list( method = method, acc = acc, acc.iter = acc.iter, acc.all = acc.all, auc = auc, auc.iter = auc.iter, auc.all = auc.all, mar = mar, mar.iter = mar.iter, mar.all = mar.all, err = err, err.iter = err.iter, err.all = err.all, sampling = switch(pars$sampling, "loocv" = "leave-one-out cross-validation", "cv" = "cross validation", "boot" = "bootstrap", "rand" = "randomised validation (holdout)" ), niter = pars$niter, nreps = pars$nreps, conf = conf, res.all = res.all #' lwc-10-05-2012: keep all results. ) if (pars$sampling == "boot") { ret$acc.boot <- acc.boot } class(ret) <- "accest" return(ret) } #' ======================================================================== #' wll-01-07-2007: add AUC and margin print.accest <- function(x, digits = 3, ...) { #' cat("\nCall:\n", deparse(x$call), "\n") cat("\nMethod:\t\t\t", x$method) cat("\nAccuracy:\t\t", round(x$acc, digits)) if (!is.null(x$auc)) { cat("\nAUC:\t\t\t", round(x$auc, digits)) } if (!is.null(x$mar)) { cat("\nMargin:\t\t\t", round(x$mar, digits)) } cat("\n\nNo. of iteration:\t", x$niter) cat("\nSampling:\t\t", x$sampling) cat("\nNo. of replications:\t", x$nreps) cat("\n\nOverall confusion matrix of training data:\n") print(x$conf) invisible(x) } #' ======================================================================== summary.accest <- function(object, ...) { structure(object, class = "summary.accest") } #' ======================================================================== print.summary.accest <- function(x, digits = 3, ...) { print.accest(x) cat("\nAccuracy on each iteration:\n") print(round(x$acc.iter, digits)) invisible(x) } #' ======================================================================= plot.accest <- function(x, main = NULL, xlab = NULL, ylab = NULL, ...) { if (x$niter == 1) { stop("Number of iteration (niter) must be greater than 1") } if (is.null(main)) { main <- paste("Performance of `", x$method, "'", sep = " ", "(", x$sampling, ")" ) } if (is.null(xlab)) xlab <- "Index of niter" if (is.null(ylab)) ylab <- "Accuracies" plot(x$acc.iter, type = "b", main = main, xlab = xlab, ylab = ylab, col = "blue", ... ) } #' ======================================================================== accest <- function(dat, ...) UseMethod("accest") #' ======================================================================== accest.formula <- function(formula, data = NULL, ..., subset, na.action = na.omit) { call <- match.call() if (!inherits(formula, "formula")) { stop("method is only for formula objects") } m <- match.call(expand.dots = FALSE) if (identical(class(eval.parent(m$data)), "matrix")) { m$data <- as.data.frame(eval.parent(m$data)) } m$... <- NULL m$scale <- NULL m[[1]] <- as.name("model.frame") m$na.action <- na.action m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 x <- model.matrix(Terms, m) y <- model.extract(m, "response") attr(x, "na.action") <- attr(y, "na.action") <- attr(m, "na.action") ret <- accest.default(x, y, ..., na.action = na.action) ret$call <- call ret$call[[1]] <- as.name("accest") ret$terms <- Terms if (!is.null(attr(m, "na.action"))) { ret$na.action <- attr(m, "na.action") } class(ret) <- c("accest.formula", class(ret)) return(ret) } #' ======================================================================= #' Estimates the accuracy of pairwise comparison. #' History: #' 10-10-2006: Create #' 03-12-2006: process fold of cv or scv is greater than number of data #' row #' 10-01-2007: vectorised binest <- function(dat, cl, choices = NULL, method, pars = valipars(), ...) { #' construct the pairwise comparison data set dat.sub <- .dat.sel(dat, cl, choices = choices) len <- sapply(dat.sub$cl, length) if (pars$sampling == "cv" && pars$nreps > min(len)) { pars$sampling <- "loocv" } if (pars$sampling == "loocv") pars$niter <- 1 n <- nrow(dat.sub$com) acc <- sapply(c(1:n), function(x) { accest(dat.sub$dat[[x]], dat.sub$cl[[x]], method = method, pars = pars, ... )$acc }) com <- apply(dat.sub$com, 1, paste, collapse = " ~ ") names(acc) <- com ret <- list( com = dat.sub$com, acc = acc, method = method, niter = pars$niter, sampling = pars$sampling ) if (pars$sampling != "loocv") ret$nreps <- pars$nreps return(ret) } #' ======================================================================= #' wll-29-04-2008: Wrapper function for re-sampling based classification #' with multiple classifiers aam.mcl <- function(x, y, method, pars = valipars(), ...) { res <- lapply(method, function(m) { cat("\nClassifier = :", m, "\n") flush.console() aam.cl(x, y, method = m, pars, ...) }) names(res) <- method res <- do.call(rbind, res) } #' ======================================================================== #' wll-29-04-2008: Calculate acc, auc and mar by calling accest aam.cl <- function(x, y, method, pars = valipars(), ...) { val <- accest(x, y, method = method, pars = pars, ...) acc <- val$acc auc <- ifelse(!is.null(val$auc), val$auc, NA) mar <- ifelse(!is.null(val$mar), val$mar, NA) res <- c(acc = acc, auc = auc, mar = mar) return(round(res, digits = 3)) } #' ======================================================================== #' lwc-05-07-2006: Calculate classification rate. #' lwc-16-09-2006: Arguments checking. #' lwc-30-10-2006: Convert arguments as vectors #' lwc-05-05-2010: Change function name from class.rate #' lwc-19-05-2-12: Add kappa and change class.error to class.acc. #' Usage: #' a = c(1,2,1,2,1,2,1,2,1,2) #' b = c(2,2,2,1,1,2,1,2,1,2) #' c = rbind(a,b) #' d = c(1,1,1,1,1,1,1,1,1,1) #' cl.rate(a,b) #' cl.rate(a,d) #' cl.rate(a,c) cl.rate <- function(obs, pre) { observed <- as.vector(obs) predicted <- as.vector(pre) if (length(observed) != length(predicted)) { stop(" 'observed' and 'predicted' must have the same length.") } acc <- sum(observed == predicted) / length(observed) #' NOTE: If arguments are factor, it will trigger an error when they #' have different levels. err <- (1 - acc) con <- table(observed = observed, predicted = predicted) kappa <- classAgreement(con)$kappa #' from e1071 con <- cbind(con, class.acc = diag(con) / rowSums(con)) res <- list(acc = acc, err = err, con.mat = con, kappa = kappa) return(res) } #' ======================================================================= #' lwc-28-04-2010: Classification performance. #' lwc-19-05-2012: add kappa, con.mat and positive in the returned list. #' Note: Also see cl.auc and cl.roc for ROC assessment. #' True positive rate: tpr = TP/P = TP/TP+FN #' False positive rate: fpr = FP/N = FP/FP+TN #' Accuracy: acc = P*tpr + N*(1-fpr) #' References: <NAME>, ROC Graphs: Notes and Practical Considerations #' for Data Mining Researchers, January 7, 2003. #' Arguments: #' obs Factor or vector of observed class. #' pre Factor or vector of predicted class. cl.perf <- function(obs, pre, pos = levels(as.factor(obs))[2]) { obs <- factor(obs) pre <- factor(pre) pos <- match.arg(pos, levels(obs)) obs <- relevel(obs, ref = pos) #' put pos in the 1st position pre <- relevel(pre, ref = pos) #' put pos in the 1st position #' confusion matrix con <- table(obs, pre) kappa <- classAgreement(con)$kappa con <- cbind(con, class.acc = diag(con) / rowSums(con)) #' change levels as pos(Positive) and neg (Negative) levels(obs) <- list("neg" = levels(obs)[2], "pos" = levels(obs)[1]) levels(pre) <- list("neg" = levels(pre)[2], "pos" = levels(pre)[1]) #' levels(obs) <- levels(pre) <- c("pos", "neg") #' Temp function for number of True or False (positive or negative) #' (Not used here) tf <- function(x) { x.obs <- grepl(x, obs) x.pre <- grepl(x, pre) Tr <- sum(x.pre & x.obs) Fa <- sum(x.pre) - Tr list(Tr = Tr, Fa = Fa) } #' pos.tf <- tf("pos") pos.obs <- grepl("pos", obs) pos.pre <- grepl("pos", pre) TP <- sum(pos.pre & pos.obs) Pos <- sum(pos.pre) FP <- Pos - TP #' neg.tf <- tf("neg") neg.obs <- grepl("neg", obs) neg.pre <- grepl("neg", pre) TN <- sum(neg.pre & neg.obs) Neg <- sum(neg.pre) FN <- Neg - TN P <- TP + FN N <- FP + TN tpr <- TP / (TP + FN) fpr <- FP / (FP + TN) acc <- (TP + TN) / (P + N) spec <- 1 - fpr #' Specificity sens <- tpr #' Sensitivity or recall perf <- list( acc = acc, tpr = tpr, fpr = fpr, sens = sens, spec = spec, con.mat = con, kappa = kappa, positive = pos ) return(perf) } #' ===================================================================== #' lwc-11-03-2007: Area under Receiver Operating Curve (ROC). #' lwc-05-05-2010: Major modification. #' lwc-01-05-2012: adjust auc #' Note: 1.) AUC varies between 0.5 and 1.0 for sensible models; the higher #' the better. #' 2.) AUC should be adjusted as: #' auc[auc < 0.5] <- 1 - auc[auc < 0.5] #' 3.) This is the implementation of cutoff-independent AUC. #' 4.) The AUC has an important statistical property: the AUC of a #' classifier is equivalent to the probability that the classifier #' will rank a randomly chosen positive instance higher than a #' randomly chosen negative instance. This is equivalent to the #' Wilcoxon test of ranks. #' -- An introduction to ROC analysis by <NAME> #' 5.) Codes come from auRROC function in package limma. cl.auc <- function(stat, label, pos = levels(as.factor(label))[2]) { if (missing(label) || missing(stat)) stop("arguments miss") if (length(label) != length(stat)) stop("lengths differ") #' convert label as factor label <- factor(label) if (nlevels(label) != 2) stop("'label' must be two categorical data") #' sort out which level is pos and convert it to "1". pos <- match.arg(pos, levels(label)) label <- relevel(label, ref = pos) #' put pos in the 1st position levels(label) <- list("0" = levels(label)[2], "1" = levels(label)[1]) label <- label[order(stat, decreasing = T)] label <- as.numeric(levels(label))[as.integer(label)] tmp <- cumsum(label) / sum(label) auc <- mean(tmp[label == 0]) #' if (auc < 0.5) auc <- 1 - auc #' lwc-08-06-2010: fix it auc[auc < 0.5] <- 1 - auc[auc < 0.5] #' lwc-01-05-2012: uncommented. return(auc) } #' ===================================================================== #' lwc-11-03-2007:Area under Receiver Operating Curve (ROC) #' Note: 1. Internal function with no documents. #' 2. It is old version of cl.auc. #' 3. It is modified from auROC function in package limma. auc <- function(stat, label) { if (missing(label) || missing(stat)) stop("arguments miss") if (length(label) != length(stat)) stop("lengths differ") if (is.factor(label)) { label <- as.numeric(levels(label))[as.integer(label)] } if (!all(sort(unique(label)) == c(0, 1))) { stop("'label' must take values 0 or 1") } label <- label[order(stat, decreasing = T)] tmp <- cumsum(label) / sum(label) auc <- mean(tmp[label == 0]) return(auc) } #' ======================================================================= #' lwc-12-03-2007: ROC #' lwc-05-05-2010: Major modification. #' lwc-15-05-2012: Major changes: #' 1.) The cutoff points are sorted stat #' 2.) The rule is >=, not >. #' Note: 1. This function is partly taken from ROCfuns.R in package ROC. #' Note: sensitivity and specificity are estimated as: #' sensitivity (true positive rate) = #' Positives correctly classified / Total positives #' specificity (true negative rate) = #' Negatives correctly classified / Total negatives cl.roc <- function(stat, label, pos = levels(as.factor(label))[2], plot = TRUE, ...) { roc_rule <- function(thres, x) ifelse(x >= thres, 1, 0) if (missing(label) || missing(stat)) stop("arguments miss") if (length(label) != length(stat)) stop("lengths differ") #' convert label as factor label <- factor(label) if (nlevels(label) != 2) stop("'label' must be two categorical data") #' sort out which level is Positive pos <- match.arg(pos, levels(label)) label <- relevel(label, ref = pos) #' Reorder Levels of Factor levels(label) <- list("0" = levels(label)[2], "1" = levels(label)[1]) #' thresholds thres <- unique(sort(stat)) #' prediction based on thresholds pred <- lapply(thres, roc_rule, x = stat) perf <- lapply(pred, function(x) { sens <- mean(x[label == 1]) spec <- mean(1 - x[label == 0]) fpr <- 1 - spec #' false positive rate: 1 - spec tpr <- sens #' true positive rate: sens acc <- sum(label == x) / length(label) c(acc = acc, tpr = tpr, fpr = fpr, sens = sens, spec = spec) }) perf <- do.call(rbind, perf) perf <- cbind(cutoff = thres, perf) perf <- data.frame(perf) auc <- cl.auc(stat, label) if (plot) { #' plot ROC curve main <- "ROC Curve" xlab <- "False positive Rate" ylab <- "True positive Rate" #' xlab <- "1 - Specificity" #' ylab <- "Sensitivity" plot(perf$fpr, perf$tpr, type = "n", xlim = c(0, 1), ylim = c(0, 1), main = main, xlab = xlab, ylab = ylab, ... ) points(perf$fpr, perf$tpr, col = "red", type = "b", pch = 19) abline( h = seq(0, 1, by = .1), v = seq(0, 1, by = .1), lty = 3, lwd = 0.5, col = "grey" ) abline(0, 1) } return(list(perf = perf, auc = auc, positive = pos)) } #' ======================================================================= #' lwc-15-05-2012: Another version of ROC which call cl.perf. #' lwc-19-05-2012: minor changes. #' Note: 1.) The cutoff points are sorted stat #' 2.) The rule is >=, not >. #' 3.) call cl.perf to get acc, tpr, fpr, sens and spec. cl.roc.1 <- function(stat, label, pos = levels(as.factor(label))[2], plot = TRUE, ...) { if (missing(label) || missing(stat)) stop("arguments miss") if (length(label) != length(stat)) stop("lengths differ") label <- factor(label) if (nlevels(label) != 2) stop("'label' must be two categorical data") #' sort out which level is Positive pos <- match.arg(pos, levels(label)) label <- relevel(label, ref = pos) #' The 1st level is positive #' thresholds thres <- unique(sort(stat)) #' prediction based on thresholds pred <- lapply(thres, function(x) { tmp <- ifelse(stat >= x, levels(label)[1], levels(label)[2]) tmp <- factor(tmp, levels = levels(label)) }) #' tidy up perf perf <- lapply(pred, function(x) { cl.perf(label, x, pos = pos)[c("acc", "tpr", "fpr", "sens", "spec")] }) perf <- do.call(rbind, perf) perf <- cbind(cutoff = thres, perf) perf <- data.frame(perf) #' cutoff - independent AUC auc <- cl.auc(stat, label) if (plot) { #' plot ROC curve main <- "ROC Curve" xlab <- "False positive Rate" ylab <- "True positive Rate" #' xlab <- "1 - Specificity" #' ylab <- "Sensitivity" plot(perf$fpr, perf$tpr, type = "n", xlim = c(0, 1), ylim = c(0, 1), main = main, xlab = xlab, ylab = ylab, ... ) points(perf$fpr, perf$tpr, col = "red", type = "b", pch = 19) abline( h = seq(0, 1, by = .1), v = seq(0, 1, by = .1), lty = 3, lwd = 0.5, col = "grey" ) abline(0, 1) } return(list(perf = perf, auc = auc, positive = pos)) } #' ======================================================================= #' lwc-01-12-06: Wrapper function for single classifier #' History: #' 15-09-06: check predict's output #' 24-03-07: add dat.te and cl.te as NULLs. #' 30-06-07: Add the posterior, margin and AUC. I doubt using average of #' margin as an assessment factor like AUC for classification. #' 02-07-07: Deal with constant variables which have zero SD. Any #' possible bugs or mistakes in both programming and algorithm? #' 04-07-07: check validity of auc #' 06-07-07: Fix a bug #' 03-10-07: Restore user defined predict function. #' 14-12-07: method and pred.dunc can be a function or a character string #' naming function. #' 09-10-08: Spot a potential bug in dealing with collinearity #' 19-05-12: remove names of margin. #' 22-05-12: minor changes for dots processing for svm classifier <- function(dat.tr, cl.tr, dat.te = NULL, cl.te = NULL, method, pred.func = predict, ...) { #' lwc-14-12-2007: either a function or a character string naming the #' function to be pred.func. if (is.function(method)) { method <- deparse(substitute(method)) } pred.func <- if (is.function(pred.func)) { pred.func } else if (is.character(pred.func)) { get(pred.func) } else { eval(pred.func) } #' 05-10-2007: re-write this wrapper to avoid multiple arguments if (method == "knn") { method <- c("knn.wrap") knn.wrap <- function(train, cl, ...) list(train = train, cl = cl, ...) pred.func <- function(object, newdata, k = 1, ...) { #' knn(train=object$train, test=newdata,cl=object$cl, k=k,...) knn(train = object$train, test = newdata, cl = object$cl, k = k) } } if (is.null(dat.te) || is.null(cl.te)) { dat.te <- dat.tr cl.te <- cl.tr } #' Especially for SVM idx <- which(apply(dat.tr, 2, sd) > .Machine$double.eps) dat.tr <- dat.tr[, idx, drop = F] dat.te <- dat.te[, idx, drop = F] #' 09-10-08: Potential bug here. If idx is invalid, dat.tr will be NA. See #' preproc.sd for fixing it. It happens in the situation where #' the data set is a single column matrix and all values are #' same. nte <- length(cl.te) #' number of rows in test data dat.all <- rbind(dat.te, dat.tr) #' cl.all <- factor(c(cl.te,cl.tr)) cl.all <- factor(c(as.character(cl.te), as.character(cl.tr))) #' Note-04-07-2007: This is proper way to merge factors. #' lwc-22-05-2012: minor changes if (method != "svm") { model <- do.call(method, c(list(dat.tr, cl.tr), list(...))) } else { #' pre-process only for SVM dots <- list(...) #' wl-08-11-2021, Mon: Use this one if (hasArg("probability")) dots$probability <- NULL # if (hasArg(probability)) dots$probability <- NULL model <- do.call(method, c(list(dat.tr, cl.tr), probability = T, dots)) #' Note-30-06-2007: Using probability = T only for SVM. } #' pred <- pred.func(model, dat.all,...) # predict the entire data set pred <- pred.func(model, data.frame(dat.all), ...) #' lwc-22-05-2012: if (!is.list(pred)) { pred.te <- pred[1:nte] if (method == "svm") { prob <- attr(predict(model, dat.all, probability = TRUE), "probabilities") prob.te <- prob[1:nte, , drop = F] #' test prob } else if (method == "randomForest") { prob <- predict(model, dat.all, type = "prob") prob.te <- prob[1:nte, , drop = F] #' test prob } else { prob.te <- NULL } } else { if (!is.null(pred$class)) { # for 'lda' and 'qda' pred.te <- pred$class[1:nte] prob.te <- pred$posterior[1:nte, , drop = F] } else { stop("predict does not return a list with component 'class'.") } } #' calculate error rate err <- sum(cl.te != pred.te) / length(cl.te) #' err <- cl.rate(cl.te, pred.te)$err.rate #' Sort the prob by the column names. (for AUC calculation purpose) #' if (!is.null(prob.te)) { #' dfnames <- colnames(prob.te) #' dfnames <- sort(dfnames) #' prob.te <- prob.te[,dfnames,drop=F] #' } #' Note-06-07-07: Above code lines have bugs, e.g. if colnames are like #' 3,2,10. After sorting, colnames are 10,2,3 not 2,3,10. prob.te <- prob.te[, levels(cl.te), drop = F] #' lwc-06-07-07: for AUC purpose #' calculate margin if (!is.null(prob.te)) { margin <- .marg(prob.te, cl.te) names(margin) <- NULL #' lwc-19-05-2012: if (length(levels(cl.te)) == 2 && length(cl.te) > 1) { #' calculate AUC if two-class problem cl.tmp <- cl.te levels(cl.tmp) <- c(0, 1) stat <- prob.te[, 2] auc <- cl.auc(stat, cl.tmp) #' auc <- auc(stat,cl.tmp) } else { auc <- NULL } } else { margin <- NULL auc <- NULL } ret <- list( err = err, cl = cl.te, pred = pred.te, posterior = prob.te, acc = 1 - err, margin = margin, auc = auc ) return(ret) } #' ======================================================================== #' lwc-30-06-2007: calculate the margin of a classifier based on the #' posterior #' NOTE: 1. This function hacked from package 'randomForest'. For more #' description, see package 'randomForest'. #' 2. Internal function .marg <- function(prob, observed) { if (missing(observed) || missing(prob)) stop("arguments miss") if (length(observed) != nrow(prob)) stop("lengths differ") if (any(prob > 1)) { prob <- sweep(prob, 1, rowSums(prob), "/") } observed <- as.factor(observed) mat <- data.frame(prob, observed) names(mat) <- c(dimnames(prob)[[2]], "observed") #' NOTE-lwc: Ater data.frame() operation, the colnames may be changed (if #' the colnames are numbers). The above line is to restore the original #' colnames. nlev <- length(levels(observed)) ans <- apply(mat, 1, function(x) { pos <- match(x[nlev + 1], names(x)) t1 <- as.numeric(x[pos]) t2 <- max(as.numeric(x[-c(pos, nlev + 1)])) t1 - t2 }) names(ans) <- observed ans } #' ========================================================================= #' lwc-16-08-2006: Calculate bootstrap, .632 bootstrap and .632 plus #' bootstrap error rate #' History: #' 16-08-2006: truncate r #' 23-03-2007: major changes in principles boot.err <- function(err, resub) { #' apparent error rate/re-substitution rate ( not training error rate) err.ae <- resub$err cl <- resub$cl pred <- resub$pred #' .632 bootstrap error err.b632 <- 0.368 * err.ae + 0.632 * err gamma <- sum(outer(cl, pred, function(x, y) ifelse(x == y, 0, 1))) / (length(cl)^2) r <- (err - err.ae) / (gamma - err.ae) r <- ifelse(err > err.ae & gamma > err.ae, r, 0) #' lwc-16-08-2006: if r still falls outside of [0,1], truncate it to 0. #' if (r > 1 || r < 0) r <- 0 errprime <- min(err, gamma) #' weight <- .632/(1-.368*r) #' err.b632p <- (1-weight)*err.ae + weight*err err.b632p <- err.b632 + (errprime - err.ae) * (0.368 * 0.632 * r) / (1 - 0.368 * r) ret <- list(ae = err.ae, boot = err, b632 = err.b632, b632p = err.b632p) return(ret) } #' ========================================================================= #' Control parameters for validation using in estimation of classification #' and feature selection. #' lwc-03-08-2006: commence #' lwc-21-03-2007: revise valipars <- function(sampling = "cv", niter = 10, nreps = 10, strat = FALSE, div = 2 / 3) { sampling <- match.arg(sampling, c("loocv", "cv", "boot", "rand")) if (sampling == "cv" && nreps < 2) { stop("Number of fold (nreps) for cv must be greater than 1.") } if (sampling == "loocv") { res <- list(sampling = sampling, niter = 1) } else if (sampling == "rand") { res <- list( sampling = sampling, niter = niter, nreps = nreps, strat = strat, div = div ) } else { res <- list( sampling = sampling, niter = niter, nreps = nreps, strat = strat ) } class(res) <- "valipars" return(res) } #' ======================================================================= #' Generate index for training or feature ranking #' lwc-27-10-2006: commence #' lwc-29-10-2006: empty class checking #' lwc-04-12-2006: support iteration information #' lwc-13-02-2007: Fix a bug (set.seed(71)) #' lwc-22-03-2007: revise trainind <- function(cl, pars = valipars()) { if (!inherits(pars, "valipars")) { stop("pars not of class valipars") } cl <- factor(cl) #' lwc-17-09-2007: drop the factor levels idx.func <- function(cl, pars = valipars()) { n <- length(cl) if (pars$sampling == "loocv") { train.ind <- lapply(1:n, function(i) seq(1, n)[-i]) } else { #' 1) each class must have at least 2 samples in training index #' 2) each class must have at least 1 sample in test index emp_f <- T while (emp_f) { emp_f <- !emp_f switch(pars$sampling, "cv" = { train.ind <- cv.idx(cl, pars$nreps, strat = pars$strat) }, "rand" = { train.ind <- rand.idx(cl, pars$nreps, strat = pars$strat, div = pars$div ) }, "boot" = { train.ind <- boot.idx(cl, pars$nreps, strat = pars$strat) } ) for (i in 1:length(train.ind)) { if (any(table(cl[train.ind[[i]]]) < 2) || any(table(cl[-train.ind[[i]]]) < 1)) { emp_f <- !emp_f break } } #' end of for } #' end of while } #' end of else return(train.ind) } tr.idx <- list() for (i in 1:pars$niter) { tr.idx[[i]] <- idx.func(cl, pars = pars) } names(tr.idx) <- paste("Iter_", 1:pars$niter, sep = "") return(tr.idx) } #' ======================================================================== boot.idx <- function(x, nreps, strat = FALSE) { n <- length(x) if (strat) { x <- factor(x) #' drops the levels that do not occur idx <- sample(1:n, n, replace = F) #' shuffle the original x, #' idx <- c(1:n) x <- x[idx] v <- length(levels(x)) #' index of each factor #' s.idx <- lapply(1:v, function(i) idx[which(x == levels(x)[i])]) #' lwc-05-10-2010: bug fixed s.idx <- lapply(1:v, function(i) which(x == levels(x)[i])) train.ind <- lapply(1:nreps, function(y) { #' y is not used. tmp <- lapply(s.idx, function(x) sample(x, length(x), replace = T)) do.call("c", tmp) }) #' shuffle the results train.ind <- lapply(train.ind, function(x) sample(x, length(x), replace = F)) } else { train.ind <- lapply(1:nreps, function(x) sample(1:n, n, replace = T)) } return(train.ind) } #' ======================================================================== rand.idx <- function(x, nreps, strat = FALSE, div = 2 / 3) { n <- length(x) if (strat) { x <- factor(x) #' drops the levels that do not occur idx <- sample(1:n, n, replace = F) #' shuffl the original x, #' idx <- c(1:n) x <- x[idx] v <- length(levels(x)) #' index of each factor #' s.idx <- lapply(1:v, function(i) idx[which(x == levels(x)[i])]) #' lwc-05-10-2010: bug fixed s.idx <- lapply(1:v, function(i) which(x == levels(x)[i])) train.ind <- lapply(1:nreps, function(y) { #' y is not used. tmp <- lapply( s.idx, function(x) sample(x, trunc(length(x) * div), replace = F) ) do.call("c", tmp) }) #' shuffl the results train.ind <- lapply(train.ind, function(x) sample(x, length(x), replace = F)) } else { train.ind <- lapply(1:nreps, function(x) sample(1:n, trunc(n * div), replace = F)) } return(train.ind) } #' ======================================================================== cv.idx <- function(x, nreps, strat = FALSE) { #' One change has been made to get different results for each calling. #' from package ipred ssubset <- function(y, k, strat = TRUE) { #' if (!is.factor(y)) stop("y is not of class factor") #' lwc-14-04-2007 if (!is.factor(y)) y <- as.factor(y) N <- length(y) nlevel <- table(y) nindx <- list() #' lwc-29-10-2006: changed indx <- sample(1:N, N, replace = F) y <- y[indx] #' indx <- 1:N outindx <- list() if (strat) { for (j in 1:length(nlevel)) { nindx <- c(nindx, list(indx[which(y == levels(y)[j])])) } kmat <- kfoldcv(k, N, nlevel) for (i in 1:k) { sset <- kmat[, i] kindx <- c() for (j in 1:length(nlevel)) { if (i > 1) { kindx <- c(kindx, nindx[[j]][(sum(kmat[ j, 1:(i - 1) ]) + 1):sum(kmat[j, 1:i])]) } else { kindx <- c(kindx, nindx[[j]][1:kmat[j, 1]]) } } kindx <- kindx[!is.na(kindx)] outindx <- c(outindx, list(kindx)) } return(outindx) } else { kmat <- kfoldcv(k, N) nindx <- indx for (i in 1:k) { if (i > 1) { outindx <- c( outindx, list(nindx[(sum(kmat[1:(i - 1)]) + 1):sum(kmat[1:i])]) ) } else { outindx <- c(outindx, list(nindx[1:kmat[1]])) } } } return(outindx) } #' from package ipred kfoldcv <- function(k, N, nlevel = NULL) { if (is.null(nlevel)) { # no stratification if (k > N) { return(c(rep(1, N), rep(0, k - N))) } fl <- floor(N / k) ce <- ceiling(N / k) if (fl == ce) { return(rep(fl, k)) } else { return(c(rep(ce, round((N / k - fl) * k)), rep(fl, round((1 - (N / k - fl)) * k)))) } } else { # stratification # if (!is.integer(nlevel)) stop("nlevel is not a vector if integers") kmat <- matrix(0, ncol = k, nrow = length(nlevel)) for (i in 1:length(nlevel)) { kmat[i, ] <- kfoldcv(k, nlevel[i]) } return(kmat) } } n <- length(x) #' get index of test test.ind <- ssubset(x, nreps, strat = strat) #' get index of training train.ind <- lapply(1:nreps, function(i) seq(1, n)[-test.ind[[i]]]) #' shuffle the results train.ind <- lapply(train.ind, function(x) sample(x, length(x), replace = F)) return(train.ind) } #' 1) accest.default #' 2) print.accest #' 3) summary.accest #' 4) print.summary.accest #' 5) plot.accest #' 6) accest #' 7) accest.formula #' 8) binest #' 9) aam.mcl #' 10) aam.cl #' 11) cl.rate #' 12) cl.perf #' 13) cl.auc #' 14) auc #' 15) cl.roc #' 16) cl.roc.1 #' 17) classifier #' 18) .marg #' 19) boot.err #' 20) valipars #' 21) trainind #' 22) boot.idx #' 23) rand.idx #' 24) cv.idx <file_sep>/man/feat.rank.re.Rd % wll-15-03-2007: % \name{feat.rank.re} \alias{feat.rank.re} \title{ Feature Ranking with Resampling Method } % ---------------------------------------------------------------------------- \description{ Feature selection with resampling method. } % ---------------------------------------------------------------------------- \usage{ feat.rank.re(x,y,method=,pars = valipars(),tr.idx=NULL,...) } % ---------------------------------------------------------------------------- \arguments{ \item{x}{ A matrix or data frame containing the explanatory variables. } \item{y}{ A factor specifying the class for each observation. } \item{method}{ Feature selection method to be used. For each method used in this function, the output must be a list including two components, \code{fs.rank} (rank scores of features) and \code{fs.order}(feature orders in descending order). } \item{pars}{ A list of resampling scheme method such as \emph{Leave-one-out cross-validation}, \emph{Cross-validation}, \emph{Bootstrap} and \emph{Randomised validation (holdout)}. See \code{\link{valipars}} for details. } \item{tr.idx}{ User defined index of training samples. Can be generated by \code{trainind}. } \item{\dots}{Additional parameters to \code{method}.} } % ---------------------------------------------------------------------------- \value{ A list with components: \item{method}{Feature selection method used.} \item{fs.rank}{A vector of final feature ranking list.} \item{fs.order}{A vector of final feature order from best to worst.} \item{rank.list}{Feature rank lists of all computation.} \item{order.list}{Feature order lists of all computation.} \item{pars}{Resampling parameters.} \item{tr.idx}{Index of training samples.} \item{all}{All results come from re-sampling.} } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{valipars}}, \code{\link{feat.freq}}, \code{\link{frankvali}} } % ---------------------------------------------------------------------------- \examples{ ## prepare data set data(abr1) cls <- factor(abr1$fact$class) dat <- abr1$pos ## dat <- abr1$pos[,110:1930] ## fill zeros with NAs dat <- mv.zene(dat) ## missing values summary mv <- mv.stats(dat, grp=cls) ## mv ## View the missing value pattern ## filter missing value variables ## dim(dat) dat <- dat[,mv$mv.var < 0.15] ## dim(dat) ## fill NAs with mean dat <- mv.fill(dat,method="mean") ## log transformation dat <- preproc(dat, method="log10") ## select class "1" and "2" for feature ranking ind <- which(cls==1 | cls==2) x <- dat[ind,,drop=FALSE] y <- cls[ind, drop=TRUE] ## feature selection pars <- valipars(sampling="boot",niter=2,nreps=5) tr.idx <- trainind(y,pars=pars) z <- feat.rank.re(x,y,method="fs.auc",pars = pars) names(z) } \keyword{classif} <file_sep>/man/feat.mfs.Rd % wll-12-12-2007: % lwc-16-02-2010: % \name{feat.mfs} \alias{feat.mfs} \alias{feat.mfs.stab} \alias{feat.mfs.stats} \title{ Multiple Feature Selection } \description{ Multiple feature selection with or without resampling procedures. } \usage{ feat.mfs(x,y,method,pars = valipars(),is.resam = TRUE, \dots) feat.mfs.stab(fs.res,rank.cutoff = 20,freq.cutoff = 0.5) feat.mfs.stats(fs.stats,cumu.plot=FALSE, main="Stats Plot", ylab="Values", xlab="Index of variable", \dots) } \arguments{ \item{x}{ A matrix or data frame containing the explanatory variables.} \item{y}{ A factor specifying the class for each observation. } \item{method}{ Multiple feature selection/ranking method to be used. } \item{pars}{ A list of resampling scheme. See \code{\link{valipars}} for details. } \item{is.resam}{ A logical value indicating whether the resampling should be applied. } \item{fs.res}{ A list obtained by running \code{feat.mfs} .} \item{rank.cutoff}{Cutoff of top features for frequency calculating.} \item{freq.cutoff}{ Cutoff of feature frequency. } \item{fs.stats}{A matrix of feature statistics or values outputted by \code{feat.mfs} } \item{cumu.plot}{ A logical value indicating the cumulative scores should be plotted. } \item{main,xlab,ylab}{Plot parameters} \item{\dots}{Additional parameters.} } % ------------------------------------------------------------------------- \value{ \code{feat.mfs} returns a list with components: \item{fs.order}{A data frame of feature order from best to worst.} \item{fs.rank}{A matrix of feature ranking scores.} \item{fs.stats}{A matrix of feature statistics or values.} \item{all}{A list of output of \code{\link{feat.rank.re}} for each feature selection method.} \code{feat.mfs.stab} returns a list with components: \item{fs.freq}{Feature frequencies larger than \code{freq.cutoff}. } \item{fs.subs}{Feature with frequencies larger than \code{freq.cutoff}. } \item{fs.stab}{Stability rate of feature ranking.} \item{fs.cons}{A matrix of feature consensus table based on feature frequency.} \code{feat.mfs.stats} returns a list with components: \item{stats.tab}{A statistical values with their corresponding names.} \item{stats.long}{Long-format of statistical values for plotting.} \item{stats.p}{ An object of class "trellis".} } \details{ \code{feat.mfs.stab} summarises multiple feature selection only when resampling strategy is employed (i.e. \code{is.resam} is \code{TRUE} when calling \code{feat.mfs}). It obtains these results based on \code{feat.mfs}'s returned value called \code{all}. \code{feat.mfs.stats} handles the statistical values or scores. Its purpose is to provide a guidance in selecting the best number of features by spotting the elbow point. This method should work in conjunction with plotting of p-values and their corresponding adjusted values such as FDR and Bonferroni in the multiple hypothesis test. } \note{ The feature order can be computed directly from the overall statistics \code{fs.stats}. It is, however, slightly different from \code{fs.order} obtained by rank aggregation when resampling is employed. The \code{fs.cons} and \code{fs.freq} are computed based on \code{fs.order}. } \author{ <NAME> } \seealso{ \code{\link{feat.rank.re}}, \code{\link{feat.freq}} } % ------------------------------------------------------------------------- \examples{ \dontrun{ library(lattice) data(abr1) dat <- preproc(abr1$pos[,200:400], method="log10") cls <- factor(abr1$fact$class) tmp <- dat.sel(dat, cls, choices=c("1","2")) x <- tmp[[1]]$dat y <- tmp[[1]]$cls fs.method <- c("fs.anova","fs.rf","fs.rfe") fs.pars <- valipars(sampling="cv",niter=10,nreps=5) fs <- feat.mfs(x, y, fs.method, fs.pars) ## with resampling names(fs) ## frequency, consensus and stabilities of feature selection fs.stab <- feat.mfs.stab(fs) print(fs.stab$fs.cons,digits=2,na.print="") ## plot feature selection frequency freq <- fs.stab$fs.freq dotplot(freq$fs.anova, type="o", main="Feature Selection Frequencies") barchart(freq$fs.anova) ## rank aggregation fs.agg <- feat.agg(fs$fs.rank) ## stats table and plotting fs.stats <- fs$fs.stats tmp <- feat.mfs.stats(fs.stats, cumu.plot = TRUE) tmp$stats.p fs.tab <- tmp$stats.tab ## convert to matrix fs.tab <- list2df(un.list(fs.tab)) ## without resampling fs.1 <- feat.mfs(x, y, method=fs.method, is.resam = FALSE) } } \keyword{classif} <file_sep>/man/mv.util.Rd % lwc-19-10-2011: commence \name{mv.util} \alias{mv.stats} \alias{mv.fill} \alias{mv.zene} \title{ Missing Value Utilities } \description{ Functions to handle missing values of data set. } \usage{ mv.stats(dat,grp=NULL,\dots) mv.fill(dat,method="mean",ze_ne = FALSE) mv.zene(dat) } % -------------------------------------------------------------------- \arguments{ \item{dat}{ A data frame or matrix of data set. } \item{grp}{ A factor or vector of class. } \item{method}{Univariate imputation method for missing value. For details, see examples below. } \item{ze_ne}{ A logical value indicating whether the zeros or negatives should be treated as missing values. } \item{\dots}{ Additional parameters to \code{mv.stats} for plotting using \pkg{lattice}. } } % ---------------------------------------------------------------------- \value{ \code{mv.fill} returns an imputed data frame. \code{mv.zene} returns an NA-filled data frame. \code{mv.stats} returns a list including the components: \itemize{ \item \code{mv.overall}: Overall missng value rate. \item \code{mv.var}: Missing value rate per variable (column). \item \code{mv.grp}: A matrix of missing value rate for different groups if argument \code{grp} is given. \item \code{mv.grp.plot}: An object of class \code{trellis} for plotting of \code{mv.grp} if argument \code{grp} is given. } } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------- \examples{ data(abr1) dat <- abr1$pos[,1970:1980] cls <- factor(abr1$fact$class) ## fill zeros with NAs dat <- mv.zene(dat) ## missing values summary mv <- mv.stats(dat, grp=cls) plot(mv$mv.grp.plot) ## fill NAs with mean dat.mean <- mv.fill(dat,method="mean") ## fill NAs with median dat.median <- mv.fill(dat,method="median") ## ----------------------------------------------------------------------- ## fill NAs with user-defined methods: two examples given here. ## a.) Random imputation function: rand <- function(x,...) sample(x[!is.na(x)], sum(is.na(x)), replace=TRUE) ## test this function: (tmp <- dat[,1]) ## an vector with NAs ## get the randomised values for NAs rand(tmp) ## fill NAs with method "rand" dat.rand <- mv.fill(dat,method="rand") ## b.) "Low" imputation function: "low" <- function(x, ...) { max(mean(x,...) - 3 * sd(x,...), min(x, ...)/2) } ## fill NAs with method "low" dat.low <- mv.fill(dat, method="low") ## summary of imputed data set df.summ(dat.mean) } \keyword{util} <file_sep>/README.md # The mt R package # ## Overview ## This package provides functions for metabolomics data analysis: data preprocessing, orthogonal signal correction, PCA analysis, PCA-DA analysis, PLS-DA analysis, classification, feature selection, correlation analysis, data visualisation and re-sampling strategies. ## Installation from CRAN ```r install.packages("mt") ``` ## Installation from github ## ```r library(devtools) install_github("wanchanglin/mt") ``` ## Usage ## See the help pages of the package for details. <file_sep>/man/pval.util.Rd % lwc-21-03-2007 \name{pval.util} \alias{pval.test} \alias{pval.reject} \title{ P-values Utilities } \description{ Functions to handle p-values of data set. } \usage{ pval.test(x,y, method="oneway.test",\dots) pval.reject(adjp,alpha) } % -------------------------------------------------------------------- \arguments{ \item{x}{ A data frame or matrix of data set. } \item{y}{ A factor or vector of class. } \item{method}{Hypothesis test such as \code{t.test} and \code{wilcox.test}.} \item{adjp}{A matrix-like p-values of simultaneously testing.} \item{alpha}{A vector of cutoff of p-values or Type I error rate.} \item{\dots}{ Arguments to pass. } } % ---------------------------------------------------------------------------- \value{ \code{pval.test} returns a vector of p-values. \code{pval.reject} returns a matrix indicating rejected number according to cutoff. } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------- \examples{ library(lattice) ## Example for pval.test and pval.reject ## prepare data set data(abr1) cls <- factor(abr1$fact$class) dat <- abr1$pos[,200:500] dat <- preproc(dat, method="log") ## select class "1" and "2" for feature ranking ind <- grepl("1|2", cls) dat <- dat[ind,,drop=FALSE] cls <- cls[ind, drop=TRUE] ## univariate p-values and its adjusted p-values pval <- sort(pval.test(dat, cls, method="t.test")) ## adjust p-values pval.ad <- sapply(c("fdr","bonferroni","BY"), function(y){ p.adjust(pval, method=y) }) pval.ad <- cbind(raw=pval, pval.ad) pval.reject(pval.ad,c(0.005, 0.01, 0.05)) ## plot the all p-values tmp <- cbind(pval.ad, idx=1:nrow(pval.ad)) tmp <- data.frame(tmp) # pval_long <- melt(tmp, id="idx") pval_long <- data.frame(idx = tmp$idx, stack(tmp, select = -idx)) pval_long <- pval_long[c("idx", "ind", "values")] names(pval_long) <- c("idx", "variable", "value") pval.p <- xyplot(value~idx, data=pval_long, groups=variable, par.settings = list(superpose.line = list(lty=c(1:7))), as.table = TRUE, type="l", par.strip.text = list(cex=0.65), ylim=c(-0.005, 1.0), ylab="P-values", xlab="Index of variables", main="p-values", auto.key = list(lines=TRUE, points = FALSE,space="right"), panel = function(x, y,...) { panel.xyplot(x, y, ...) panel.abline(h = 0.05, col = "red",lty =2) }) pval.p } \keyword{util} <file_sep>/man/boxplot.frankvali.Rd % lwc-13-11-2006: First draft % \name{boxplot.frankvali} \alias{boxplot.frankvali} \title{ Boxplot Method for Class 'frankvali' } \description{ Boxplot method for error rate of each feature subset. } \usage{ \method{boxplot}{frankvali}(x, \dots) } % ---------------------------------------------------------------------------- \arguments{ \item{x}{ An object of class \code{frankvali}. } \item{\dots}{ Additional arguments to the plot, such as \code{main}, \code{xlab} and \code{ylab}. } } % ---------------------------------------------------------------------------- \details{ This function is a method for the generic function \code{boxplot()} for class \code{frankvali}. It plots the error rate of each feature subset. } \value{ Returns boxplot of class \code{frankvali}. } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{frankvali}} } \examples{ data(abr1) dat <- abr1$pos[,110:500] x <- preproc(dat, method="log10") y <- factor(abr1$fact$class) dat <- dat.sel(x, y, choices=c("1","2")) x.1 <- dat[[1]]$dat y.1 <- dat[[1]]$cls pars <- valipars(sampling="cv",niter=2,nreps=4) res <- frankvali(x.1,y.1,fs.method = "fs.rfe",fs.len = "power2", cl.method = "knn",pars = pars) res summary(res) boxplot(res) } \keyword{plot} <file_sep>/man/list.util.Rd % lwc-15-02-2010: % \name{list.util} \alias{list2df} \alias{un.list} \alias{shrink.list} \title{ List Manipulation Utilities } \description{ Functions to handle manipulation of list. } \usage{ list2df(x) un.list(x, y="") shrink.list(x) } % -------------------------------------------------------------------- \arguments{ \item{x}{A list to be manipulated. } \item{y}{A character or string of separator.} } \value{ \code{list2df} returns a data frame. \code{un.list} returns a list. \code{shrink.list} retuns a list. } \details{ \code{list2df} converts a list with components of vector to a data frame. Shorter vectors will be filled with NA. It is useful to convert rugged vectors into a data frame which can be written to an Excel file. \code{un.list} collapses higher-depths list to 1-depth list. This function uses recursive programming skill to tackle any depths of list. \code{shrink.list} removes all NULL or NA entries from a list. } \seealso{ \code{\link{feat.mfs}} } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------- \examples{ ## See examples of function feat.mfs for the usages of list2df and un.list. a <- list(x=1, y=NA, z=NULL) b <- list(x=1, y=NA) c <- list(x=1, z=NULL) shrink.list(a) shrink.list(b) shrink.list(c) } \keyword{util} <file_sep>/R/mt_fs.R #' ======================================================================= #' Feature frequency and stability of feature ranking. #' History: #' 01-02-2007: commence #' 04-02-2007: feature ranking frequency and overlap rate #' 10-02-2007: add the stability #' 09-07-2007: output all frequency #' Arguments: #' x - a matrix or data frame of feature order #' rank.cutoff - top feature order cut-off #' freq.cutoff - feature frequency cut-off #' References: #' 1.) <NAME>, et al., (2006), Reliable gene signatures for microarray #' classification: assessment of stability and performance. Bioinformatics, #' vol.22, no.19, pages 2356 - 2363. #' 2.) <NAME>.; <NAME>. & <NAME>. Prediction of cancer outcome #' with microarrays: a multiple random validation strategy. Lancet, 365, #' 488-492 feat.freq <- function(x, rank.cutoff = 50, freq.cutoff = 0.5) { x <- as.matrix(x) x <- x[1:rank.cutoff, , drop = F] tmp <- table(x) tmp <- sort(tmp, decreasing = T) / ncol(x) fre <- as.vector(tmp) #' strip off a character 'x' names(fre) <- names(tmp) #' overlap.rate <- sum(fre==1)/rank.cutoff res <- list( freq.all = fre, freq = fre[fre >= freq.cutoff], rank.cutoff = rank.cutoff, freq.cutoff = freq.cutoff, stability = sum(fre) / length(fre) #' overlap.fs = if (overlap.rate) names(fre[fre==1]) else NULL, ) return(res) } #' ======================================================================== #' lwc-16-02-2007: Calculate the consensus of feature selection by different #' methods #' Internal function. #' Arguments: #' freq - a list consisting of feature frequency more than a threshold feat.cons <- function(freq, disp = TRUE) { #' lwc-18-02-2007: If only retrieve the names, the following code line #' is enough. #' fs.names <- unique(unlist(lapply(freq, names))) fs.names <- lapply(freq, function(x) names(x)) fs.names <- do.call("c", fs.names) fs.names <- sort(table(fs.names), decreasing = T) mat <- matrix( data = NA, nrow = length(fs.names), ncol = length(freq), dimnames = list(names(fs.names), names(freq)) ) fs.tab <- sapply(names(freq), function(x) { dname <- names(freq[[x]]) if (disp) mat[dname, x] <- freq[[x]] else mat[dname, x] <- 1 #' mat[dname,x] <- ifelse(disp, freq[[x]],1) #' do not work return(mat[, x]) }) #' print(fs.tab, na.print="") return(fs.tab) } #' ======================================================================== #' wll-04-07-2007: Multiple feature selectors with resampling procedures. #' Will give rank table based on the statistics and feature #' consensus table. #' wll-12-12-2007: add output of fs.freq and R doc #' wll-29-04-2008: Wrapper function for multiple feature selection with or #' without re-sampling. #' wll-29-05-2008: Add no-resampling code; #' wll-21-10-2009: Integrate re-sampling and no-resampling #' lwc-15-02-2010: drop fs.tab. #' lwc-25-02-2010: minor changes #' Arguments: #' x - data matrix or data frame #' y - class labels (factor) #' method - a set of feature selections methods #' pars - validation control parameters. #' is.resam - Boolean indicator of re-sampling. feat.mfs <- function(x, y, method, pars = valipars(), is.resam = TRUE, ...) { res <- lapply(method, function(m) { cat("\nFeature Selector = :", m, "\n") flush.console() if (is.resam) { feat.rank.re(x, y, method = m, pars = pars, ...) } else { #' model <- if (is.function(m)) m #' else if (is.character(m)) get(m) #' else eval(m) model <- get(m) model(x, y, ...) } }) names(res) <- method fs.rank <- sapply(res, function(x) x$fs.rank) fs.order <- sapply(res, function(x) x$fs.order) fs.order <- as.data.frame(fs.order, stringsAsFactors = F) fs.stats <- if (is.resam) { sapply(res, function(x) x$fs.stats) } else { sapply(res, function(x) x$stats) } feat.res <- list( fs.order = fs.order, fs.rank = fs.rank, fs.stats = fs.stats, all = res ) return(feat.res) } #' ====================================================================== #' lwc-25-02-2010: Calculate frequency and consensus from results of #' feat.mfs' with 'is.resam=TRUE'. #' wll-05-12-2015: Should give an error checking here. feat.mfs.stab <- function(fs.res, rank.cutoff = 20, freq.cutoff = 0.5) { order.list <- lapply(fs.res$all, function(x) x$order.list) freq.all <- lapply(order.list, function(x) { feat.freq(x, rank.cutoff = rank.cutoff, freq.cutoff = freq.cutoff) }) fs.freq <- lapply(freq.all, function(x) x$freq) fs.subs <- lapply(freq.all, function(x) names(x$freq)) fs.stab <- lapply(freq.all, function(x) x$stability) fs.cons <- feat.cons(fs.freq) #' print(fs.cons,digits=2,na.print="") fs <- list( fs.freq = fs.freq, fs.subs = fs.subs, fs.stab = fs.stab, fs.cons = fs.cons ) return(fs) } #' ======================================================================= #' lwc-03-09-2010: Plot the stats values of multiple feature selection. #' lwc-06-09-2010: add fs.tab #' wll-05-12-2015: should use this function in data analysis #' Note: #' Arguments: #' fs.stats - Stats value of features #' cumu.plot - A logical value indicating the cumulative scores should be #' plotted. feat.mfs.stats <- function(fs.stats, cumu.plot = FALSE, main = "Stats Plot", ylab = "Values", xlab = "Index of variable", ...) { fs.stats <- as.matrix(fs.stats) nam <- colnames(fs.stats) fs.stats <- lapply(nam, function(x) { #' x = nam[1] val <- fs.stats[, x] #' if stats is data frame, no names for val. val <- sort(val, decreasing = T, na.last = T) }) names(fs.stats) <- nam #' Get feautes tab based on stats fs.tab <- lapply(fs.stats, function(x) { list(fs = names(x), val = x) }) #' fs.tab <- list2df(un.list(fs.tab)) #' Note-09-03-2010: If you use cumulative scores, you can easily calculate #' the numbers, such as fix at 80%. if (cumu.plot) { st <- lapply(fs.stats, function(x) cumsum(x / sum(x, na.rm = T))) } else { st <- fs.stats } #' reshape data for plotting st <- do.call(cbind, st) st <- cbind(st, idx = 1:nrow(st)) st <- data.frame(st, stringsAsFactors = F) #' wl-06-11-2021, Sat: use base R function to get long format st_long <- with(st, data.frame(idx = st$idx, stack(st, select = -idx))) # st_long <- data.frame(idx = st$idx,stack(st,select = -idx)) st_long <- st_long[c("idx", "ind", "values")] names(st_long) <- c("idx", "variable", "value") #' st_l <- reshape::melt(st, id = "idx") st.p <- xyplot(value ~ idx | variable, data = st_long, as.table = T, type = c("g", "l"), scales = list(cex = .75, relation = "free"), par.strip.text = list(cex = 0.65), ylab = ylab, xlab = xlab, main = main, ... ) st.p res <- list(stats.tab = fs.tab, stats.long = st_long, stats.p = st.p) return(res) } #' ======================================================================= #' wll-06-11-2008: Use Borda count to get the final feature order #' Note: Previous name is fs.agg feat.agg <- function(fs.rank.list) { fs.score <- apply(fs.rank.list, 1, sum) fs.order <- order(fs.score, decreasing = F) #' order from best to worst. fs.rank <- order(fs.order, decreasing = F) #' feature rank score. names(fs.rank) <- rownames(fs.rank.list) temp <- names(fs.rank[fs.order]) if (!is.null(temp)) fs.order <- temp return(list(fs.order = fs.order, fs.rank = fs.rank)) } #' ====================================================================== #' wll-20-03-2007: resampling-based on feature ranking/selection #' wll-23-07-2008: some change in loops handling feat.rank.re <- function(x, y, method, pars = valipars(), tr.idx = NULL, ...) { #' validity checking if (missing(x) || missing(y)) { stop("data set or class are missing") } if (length(dim(x)) != 2) { stop("'x' must be a matrix or data frame") } y <- as.factor(y) #' some classifier need it as factor, such as SVM. if (nrow(x) != length(y)) stop("x and y don't match.") if (length(unique(y)) < 2) { stop("Classification needs at least two classes.") } if (any(is.na(x)) || any(is.na(y))) { stop("NA is not permitted in data set or class labels.") } n <- nrow(x) #' number of samples p <- ncol(x) #' size of feature #' construct index of train data if (is.null(tr.idx)) { if (pars$sampling == "cv" && pars$nreps > n) { pars$sampling <- "loocv" pars$niter <- 1 } if (pars$sampling == "cv" && pars$nreps < 2) { stop("Number of fold (nreps) for cv must greater than 1") } tr.idx <- trainind(y, pars = pars) } else { pars$sampling <- c("user") } pars$niter <- length(tr.idx) pars$nreps <- length(tr.idx[[1]]) #' feature selection with re-sampling cat("Iter (", pars$niter, "):", sep = "") res.all <- lapply(1:pars$niter, function(i) { cat(" ", i, sep = "") flush.console() train.ind <- tr.idx[[i]] res <- lapply(1:pars$nreps, function(j) { x.tr <- x[train.ind[[j]], , drop = F] y.tr <- y[train.ind[[j]]] do.call(method, c(list(x = x.tr, y = y.tr), list(...))) }) names(res) <- paste("Reps", 1:pars$nreps, sep = "_") res }) cat("\n") names(res.all) <- paste("Iter", 1:pars$niter, sep = "_") rank.list <- lapply(res.all, function(x) as.data.frame(sapply(x, function(y) y$fs.rank))) order.list <- lapply(res.all, function(x) as.data.frame(sapply(x, function(y) y$fs.order))) stats.list <- lapply(res.all, function(x) as.data.frame(sapply(x, function(y) y$stats))) rank.list <- do.call("cbind", rank.list) order.list <- do.call("cbind", order.list) stats.list <- do.call("cbind", stats.list) fs.stats <- apply(stats.list, 1, mean) #' Use Borda count to get the final feature order fs.score <- apply(rank.list, 1, sum) fs.order <- order(fs.score, decreasing = F) #' feature order from best to worst. fs.rank <- order(fs.order, decreasing = F) #' feature rank score. names(fs.rank) <- rownames(rank.list) temp <- names(fs.rank[fs.order]) if (!is.null(temp)) { fs.order <- noquote(temp) } #' lwc-16-02-2010: Should we remove noquote? res <- list( method = method, fs.order = fs.order, #' feature order fs.rank = fs.rank, #' feature rank fs.stats = fs.stats, #' means of stats rank.list = rank.list, #' full feature rank list order.list = order.list, #' full feature order list pars = pars, #' resampling parameters tr.idx = tr.idx, #' index of training samples. all = res.all ) #' all results of re-sampling return(res) } #' ======================================================================= #' wll-31-10-2007: feature selection using VIP of PLS. #' NOTE: This function supports multiple response, Y (i.e. dummy matrix for #' discriminant). The Mahalanobis distance of VIP is computed as the values #' for selection of feature/variables. #' References: #' 1. <NAME> et. al., PLS-regression: a basic tool of chemometrics. #' Chemometrics and Intelligent Laboratory Systems 58(2001), 109-130. #' 2. <NAME>, et.al., Pattern recognition of gas chromatography mass #' spectrometry of human volatiles in sweat to distinguish the sex of #' subjects and determine potential discriminatory marker peaks. #' Chemometrics and Intelligent Laboratory Systems 87(2007), 161-172. #' 3. <NAME> and <NAME>, Performance of some variable #' selection methods when multicollinearity is present, Chemometrics and #' Intelligent Laboratory Systems 78(2005), 103-112. fs.plsvip <- function(x, y, ncomp = 10, ...) { if (!is.data.frame(x)) x <- as.data.frame(x) if (length(y) != nrow(x)) { stop("x and y is not consistent.") } val <- plsc(x, y, pls = "oscorespls", ncomp = ncomp) #' NOTE: Only NIPLS supports VIP values. pls <- val$pls.out #' calculate SS y.lo <- (unclass(pls$Yloadings))^2 x.sc <- colSums(pls$scores^2) x.sc <- matrix(x.sc, nrow = nrow(y.lo), ncol = ncol(y.lo), byrow = T) SS <- y.lo * x.sc #' not matrix product %*%. #' calculate normalised squared weight W <- (unclass(pls$loading.weights))^2 sumW <- colSums(W) sumW <- matrix(sumW, nrow = nrow(W), ncol = ncol(W), byrow = T) W <- W / sumW SSW <- W %*% t(SS) sumSS <- apply(SS, 1, sum) sumSS <- matrix(sumSS, nrow = nrow(SSW), ncol = ncol(SSW), byrow = T) vip <- sqrt(nrow(SSW) * (SSW / sumSS)) #' vip <- rowSums(abs(vip)) #' Mahalanobis distances val <- sqrt(mahalanobis(vip, colMeans(vip), cov(vip), inverted = T)) #' feature rank and feature order fs.order <- order(val, decreasing = T, na.last = T) fs.rank <- order(fs.order) names(fs.rank) <- names(val) nam <- names(val[fs.order]) if (!is.null(nam)) fs.order <- nam res <- list(fs.order = fs.order, fs.rank = fs.rank, stats = val) return(res) } #' ======================================================================= #' wll-31-10-2007: feature selection using VIP of PLS. #' NOTE: This function supports multiple response, Y (i.e. dummy matrix for #' discriminant). The final VIP is the means of absolute value of VIP. fs.plsvip.1 <- function(x, y, ncomp = 10, ...) { if (!is.data.frame(x)) x <- as.data.frame(x) if (length(y) != nrow(x)) { stop("x and y is not consistent.") } val <- plsc(x, y, pls = "oscorespls", ncomp = ncomp) #' NOTE: Only NIPLS supports VIP values. pls <- val$pls.out #' calculate SS y.lo <- (unclass(pls$Yloadings))^2 x.sc <- colSums(pls$scores^2) x.sc <- matrix(x.sc, nrow = nrow(y.lo), ncol = ncol(y.lo), byrow = T) SS <- y.lo * x.sc #' not matrix product %*%. #' calculate normalised squared weight W <- (unclass(pls$loading.weights))^2 sumW <- colSums(W) sumW <- matrix(sumW, nrow = nrow(W), ncol = ncol(W), byrow = T) W <- W / sumW SSW <- W %*% t(SS) sumSS <- apply(SS, 1, sum) sumSS <- matrix(sumSS, nrow = nrow(SSW), ncol = ncol(SSW), byrow = T) vip <- sqrt(nrow(SSW) * (SSW / sumSS)) val <- rowMeans(abs(vip)) #' Mahalanobis distances #' val <- sqrt(mahalanobis(vip, colMeans(vip), cov(vip), inverted=T)) #' feature rank and feature order fs.order <- order(val, decreasing = T, na.last = T) fs.rank <- order(fs.order) names(fs.rank) <- names(val) nam <- names(val[fs.order]) if (!is.null(nam)) fs.order <- nam res <- list(fs.order = fs.order, fs.rank = fs.rank, stats = val) return(res) } #' ======================================================================= #' wll-29-10-2007: feature selection using VIP of PLS. #' NOTE: To calculate VIP, two conditions needs to satisfy: #' 1.) PLS algorithm is NIPLS; #' 2.) Y must be single vector, not multiple vector, i.e matrix. Hence #' for classification, the coding of Y as a single vector is not #' efficient, especially for the multi-class problem. For two-class #' problem, coding of 0 and 1 or 1 and -1 may be OK for a single y #' vector. fs.plsvip.2 <- function(x, y, ncomp = 10, ...) { if (!is.data.frame(x)) x <- as.data.frame(x) if (length(y) != nrow(x)) { stop("x and y is not consistent.") } #' convert to numeric, especially for factor. y <- as.numeric(y) #' NOTE: need to consider a nice way to convert pls <- oscorespls.fit(as.matrix(x), y, ncomp = ncomp) #' NOTE: Only NIPLS supports VIP values. #' VIP values (taken from http://mevik.net/work/software/pls.html) SS <- c(pls$Yloadings)^2 * colSums(pls$scores^2) Wnorm2 <- colSums(pls$loading.weights^2) SSW <- sweep(pls$loading.weights^2, 2, SS / Wnorm2, "*") val <- sqrt(nrow(SSW) * apply(SSW, 1, cumsum) / cumsum(SS)) val <- val[ncomp, ] #' extract VIP values for ncomp components #' feature rank and feature order fs.order <- order(val, decreasing = T, na.last = T) fs.rank <- order(fs.order) names(fs.rank) <- names(val) nam <- names(val[fs.order]) if (!is.null(nam)) fs.order <- nam res <- list(fs.order = fs.order, fs.rank = fs.rank, stats = val) return(res) } #' ========================================================================= #' wll-29-10-2007: feature selection using regression coefficient of PLS. #' wll-19-11-2015: add 'pls:::' in the front of 'coef.mvr' since it 'hides' #' in the new version of 'pls' #' NOTE: I try to use robust estimation of center and covariant matrix by #' cov.rob in package MASS. But it seems the collinearity problem to #' appear. Therefore, the simple Mahalanobis distance is used. #' NOTES: 1.) Mahalanobis distance and leverage are often used to detect #' outliers especially in the development of linear regression models. #' A point that has a greater Mahalanobis distance from the rest of #' the sample population of points is said to have higher leverage #' since it has a greater influence on the slope or coefficients of #' the regression equation. (From #' http://en.wikipedia.org/wiki/Mahalanobis_distance) fs.pls <- function(x, y, pls = "simpls", ncomp = 10, ...) { if (!is.data.frame(x)) x <- as.data.frame(x) if (length(y) != nrow(x)) { stop("x and y is not consistent.") } val <- plsc(x, y, pls = pls, ncomp = ncomp, ...) #' wl-08-11-2021, Mon: Use this one coe <- drop(coef(val$pls.out, ncomp = val$ncomp)) # coe <- drop(pls:::coef.mvr(val$pls.out, ncomp = val$ncomp)) #' lwc-14-06-2010: After running plsc, ncomp may change; hence ncomp here #' use val$ncomp #' Mahalanobis distances val <- sqrt(mahalanobis(coe, colMeans(coe), cov(coe), inverted = T)) #' val <- sapply(as.data.frame(t(coe)), function(x) sqrt(sum(x^2))) #' val <- sapply(as.data.frame(t(coe)), function(x) sum(abs(x))) #' feature rank and feature order fs.order <- order(val, decreasing = T, na.last = T) fs.rank <- order(fs.order) names(fs.rank) <- names(val) nam <- names(val[fs.order]) if (!is.null(nam)) fs.order <- nam res <- list(fs.order = fs.order, fs.rank = fs.rank, stats = val) return(res) } #' ====================================================================== #' wll-30-10-2007: feature selection using loadings of PCA. #' lwc-22-09-2011: a bug fixed. #' NOTE: 1. To check the eignenvalue, use screeplot(). #' 2. If it combines with other supervised methods such as fs.rf and #' fs.anova as the input methods for feat.mfs and feat.mfs.1, the #' 'thres' should be given explicitly in case of conflicting with 'y'. #' e.g., fs.m <- c("fs.anova", "fs.rf", "fs.pca") feat.mfs.1(dat, cls, #' method=fs.m, is.resam=F, thres=0.8) fs.pca <- function(x, thres = 0.8, ...) { x <- as.matrix(x) obj <- prcomp(x, ...) vars <- obj$sdev^2 vars <- vars / sum(vars) #' Proportion of Variance cumvars <- cumsum(vars) #' Cumulative Proportion names(cumvars) <- colnames(obj$rotation) id <- which(cumvars >= thres)[1] if (id == 1) id <- 2 #' lwc-22-09-2011: lo <- obj$rotation[, 1:id] #' selected loadings #' Mahalanobis distances #' rob <- cov.rob(lo, method="mve") #' c("mve", "mcd", "classical") #' val <- sqrt(mahalanobis(lo, rob$center, rob$cov,tol = 1e-7)) val <- sqrt(mahalanobis(lo, colMeans(lo), cov(lo), inverted = T)) #' val <- sapply(as.data.frame(t(lo)), function(x) sqrt(sum(x^2))) #' val <- sapply(as.data.frame(t(lo)), function(x) sum(abs(x))) #' feature rank and feature order fs.order <- order(val, decreasing = T, na.last = T) fs.rank <- order(fs.order) names(fs.rank) <- names(val) nam <- names(val[fs.order]) if (!is.null(nam)) fs.order <- nam res <- list(fs.order = fs.order, fs.rank = fs.rank, stats = val) return(res) } #' ======================================================================= #' lwc-12-04-2007: feature selection using ratio of between-group #' to within-group sums of squares (BW). #' References: <NAME>, <NAME>, <NAME>. Comparison of #' discrimination methods for the classification of tumors using gene #' expression data. J Amer Statist Assoc 2002, 97:7. #' NOTE: Someone claims that BW ratio for multiclass classification is a #' modification of the F-ratio statistics for one-way ANOVA. fs.bw <- function(x, y, ...) { if (!is.data.frame(x)) x <- as.data.frame(x) if (length(y) != nrow(x)) { stop("x and y is not consistent.") } bw <- sapply(x, function(z) { #' z <- x[,1] #' for debug mn.all <- mean(z) mn.grp <- tapply(z, y, mean) tmp.1 <- tmp.2 <- 0 for (i in 1:length(z)) { cls <- y[i] #' which class tmp.1 <- tmp.1 + (mn.grp[[cls]] - mn.all)^2 tmp.2 <- tmp.2 + (z[i] - mn.grp[[cls]])^2 } tmp.1 / tmp.2 }) fs.order <- order(bw, decreasing = T, na.last = T) fs.rank <- order(fs.order) names(fs.rank) <- names(bw) nam <- names(bw[fs.order]) if (!is.null(nam)) fs.order <- nam res <- list(fs.order = fs.order, fs.rank = fs.rank, stats = bw) return(res) } #' ========================================================================= #' lwc-06-04-07: Feature selection using RELIEF #' wll-06-04-07: According to the original algorithm, a random sample should #' be drawn in each computation. But result of each run will be different. #' So I change the idea and each sample will be used to update the weight. #' wll-15-10-07: 1. Extend to ReliefF, in which main ideas are that there #' are k (k>=1, default as 10) nearest hits/misses and all the #' hits and misses are averaged. 2. Add the user defined #' number of instances to sample. Default is all instances will #' be used. #' References: #' 1.) <NAME>. and <NAME>. (1992). The Feature Selection Problem : #' Traditional Methods and a new algorithm. Proc. Tenth National Conference #' on Artificial Intelligence, MIT Press, 129-134. #' 2.) <NAME>., <NAME>., and <NAME>. (1997). Overcoming #' the myopia of induction learning algorithms with RELIEFF. Applied #' Intelligence Vol7, 1, 39-55. #' 3.) <NAME>, Estimating Attributes: Analysis and Extensions of #' RELIEF, European Conference on Machine Learning, Ed. <NAME> #' and <NAME>, 1994, 171-182, Springer #' 4.) <NAME> and <NAME>, Theoretical and Empirical #' Analysis of ReliefF and RReliefF, Machine Learning, 53, 23<U+FFFD>C69, 2003 fs.relief <- function(x, y, m = NULL, k = 10, ...) { #' Find the nearest neighbors from a matrix nearest <- function(x, mat, k = 10) { #' Euclidean distance dis <- sapply(as.data.frame(t(mat)), function(y) sqrt(sum((x - y)^2))) k <- min(k, length(dis)) #' wll-21-03-2008: fix a bug spotted by <NAME>. #' ind <- which.min(dis) ind <- sort.list(dis)[1:k] return(mat[ind, , drop = F]) } if (!is.matrix(x)) x <- as.matrix(x) if (!is.factor(y)) y <- as.factor(y) if (length(y) != nrow(x)) { stop("x and y is not consistent.") } n <- nrow(x) p <- ncol(x) gp <- levels(y) prio <- table(y) / n #' Computing the prior #' Calculating the range of each feature. range = Max - Min rng <- sapply(as.data.frame(x), function(x) diff(range(x))) if (is.null(m)) { m <- n } else { m <- min(m, n) } idx <- sample(1:n, m, replace = F) weight <- rep(0, p) for (i in idx) { #' split x by group dat <- split.data.frame(x[-i, , drop = F], y[-i]) #' find nearest neighbours near <- lapply(dat, function(z) nearest(x[i, ], z, k = k)) hit <- gp[gp == y[i]] miss <- gp[gp != y[i]] delta <- rep(0, p) for (j in 1:p) { diff.hit <- -mean(abs(x[i, ][j] - near[[hit]][, j, drop = T])) diff.miss <- lapply(miss, function(z) { prio[z] * mean(abs(x[i, ][j] - near[[z]][, j, drop = T])) }) diff.miss <- do.call("sum", diff.miss) diff.miss <- diff.miss / (1 - prio[hit]) delta[j] <- (1 / m) * ((diff.hit + diff.miss) / rng[j]) } #' updat weight weight <- weight + delta } names(weight) <- colnames(x) fs.order <- order(weight, decreasing = T, na.last = T) fs.rank <- order(fs.order) names(fs.rank) <- names(weight) nam <- names(weight[fs.order]) if (!is.null(nam)) fs.order <- nam res <- list(fs.order = fs.order, fs.rank = fs.rank, stats = weight) return(res) } #' ======================================================================== #' RFE-SVM feature selection #' History: #' lwc-02-11-2006: #' lwc-15-11-2006: fix fs.len as "power2" #' lwc-12-01-2007: re-write #' lwc-17-01-2007: sequence of number of features in decreasing order #' lwc-18-01-2008: dots argument has a problem. Bu I think it is problem of #' svm, not dots' problem. So I have to strip off ... here. #' Does svm treat ... and list(...) differently? fs.rfe <- function(x, y, fs.len = "power2", ...) { #' lwc-16-01-2007: avoid multiple actual arguments in calling svm. #' dots <- list(...) #' if(hasArg(kernel)) dots$kernel <- NULL #' LWC-24-10-2006: Calculates the primal variables w which are stored in #' warray wts.svm <- function(x) { #' warray[k,l,] is the weight vector for the binary pb class k against #' class l ncl <- length(x$labels) classk <- rep(1:ncl, x$nSV) p <- dim(x$SV)[2] #' array of the weight vectors warray <- array(0, dim <- c(ncl, ncl, p)) #' loop to use the coefs for (s in 1:dim(x$SV)[1]) { for (co in 1:(ncl - 1)) { #' find the two class problem k <- classk[s] l <- ((1:ncl)[-k])[co] warray[k, l, ] <- warray[k, l, ] + x$coefs[s, co] * x$SV[s, ] warray[l, k, ] <- warray[l, k, ] + x$coefs[s, co] * x$SV[s, ] } } #' return twice the sum of the absolute value of primal variables w wts <- apply(abs(warray), 3, sum) return(wts) } y <- as.factor(y) #' 31-03-2007: must be factor if for classification. p <- ncol(x) fs.order <- seq(1, p) #' get feature lengths for SVM-RFE computation. len <- get.fs.len(p, fs.len = fs.len) len <- sort(len, decreasing = T) #' must be decreasing order for SVM-RFE nlen <- length(len) for (i in 1:nlen) { #' extract index of feature for this length. sel <- fs.order[1:len[i]] #' call SVM with linear kernel. model <- svm(x[, sel, drop = F], y, kernel = "linear") #' model <- svm(x[,sel,drop=F], y, kernel = "linear",dots) #' calculate the weights wts <- wts.svm(model) #' sort the feature based on the weights ord <- order(wts, decreasing = TRUE) #' update the feature set fs.order[1:len[i]] <- sel[ord] } fs.rank <- order(fs.order) names(fs.rank) <- colnames(x) nam <- colnames(x)[fs.order] if (!is.null(nam)) fs.order <- nam #' wll-05-07-2007: add stats for consistent with other methods. #' No other purpose. stats <- length(fs.rank) - fs.rank res <- list(fs.order = fs.order, fs.rank = fs.rank, stats = stats) return(res) } #' ======================================================================== #' SNR for feature selection #' wll-20-03-2007: SNR is only for two-class classification. #' wll-29-10-2008: is similar to Fisher criterion rate: (m1-m2)^2/(v1+v2) fs.snr <- function(x, y, ...) { y <- as.factor(y) if (length(levels(y)) != 2) { stop("'y' must have two classes") } g.mn <- sapply(data.frame(x), function(x) tapply(x, y, mean)) g.sd <- sapply(data.frame(x), function(x) tapply(x, y, sd)) snr <- abs(g.mn[1, ] - g.mn[2, ]) / (g.sd[1, ] + g.sd[2, ]) fs.order <- order(abs(snr), decreasing = T, na.last = T) fs.rank <- order(fs.order) names(fs.rank) <- names(snr) nam <- names(snr[fs.order]) if (!is.null(nam)) fs.order <- nam res <- list(fs.order = fs.order, fs.rank = fs.rank, stats = abs(snr)) return(res) } #' ======================================================================== #' AUC for feature selection #' wll-20-03-2007: AUC is only for two-class classification. fs.auc <- function(x, y, ...) { y <- as.factor(y) if (length(levels(y)) != 2) { stop("'y' must have two classes") } levels(y) <- c(0, 1) #' change levels as 1,0 y <- as.numeric(levels(y))[as.integer(y)] auc <- sapply(as.data.frame(x), function(x) { y <- y[order(x, decreasing = TRUE)] tmp <- cumsum(y) / sum(y) mean(tmp[y == 0]) }) #' library(limma) #' auc <- sapply(as.data.frame(x),function(z) auROC(y,z)) #' library(verification) #' auc <- sapply(as.data.frame(x),function(z) roc.area(y,z)$A) #' library(ROCR) #' auc <- sapply(as.data.frame(x),function(z) #' as.numeric(performance(prediction(z,y),measure="auc")@y.values)) auc[auc < 0.5] <- 1 - auc[auc < 0.5] fs.order <- order(abs(auc), decreasing = T, na.last = T) fs.rank <- order(fs.order) names(fs.rank) <- names(auc) nam <- names(auc[fs.order]) if (!is.null(nam)) fs.order <- nam res <- list(fs.order = fs.order, fs.rank = fs.rank, stats = abs(auc)) return(res) } #' ======================================================================== #' randomForest for feature selection #' lwc-20-03-2007:Commence #' wll-08-12-2015:Use scaled important scores. #' Note-08-12-2015: #' 1.) If there are large quantity of zeros in the important score which in #' turn lead to ties of rank list, the results of reampling in which #' rank aggregation is used are not reasonable. #' 2.) Random Forest is random method, which leads to different results for #' different runs even if the random seed has been set by set.seed(). #' 3.) The application of 'fs.rf' should be limited. fs.rf <- function(x, y, ...) { tmp <- randomForest(x, y, importance = T, ...) meas <- tmp$importance[, ncol(tmp$importance) - 1] meas[meas <= 0] <- 0 #' Or use the following two lines if (F) { meas <- importance(tmp, type = 1, scale = TRUE) meas <- meas[, 1] } fs.order <- order(meas, decreasing = T, na.last = T) fs.rank <- order(fs.order) names(fs.rank) <- names(meas) nam <- names(meas[fs.order]) if (!is.null(nam)) fs.order <- nam res <- list(fs.order = fs.order, fs.rank = fs.rank, stats = meas) return(res) } #' ======================================================================== #' wll-29-10-2008: Another version of RF for feature selection based on #' successively eliminating the least important variables. fs.rf.1 <- function(x, y, fs.len = "power2", ...) { y <- as.factor(y) p <- ncol(x) fs.order <- seq(1, p) #' initialisation #' get feature lengths len <- get.fs.len(p, fs.len = fs.len) len <- sort(len, decreasing = T) #' must be decreasing order nlen <- length(len) for (i in 1:nlen) { #' extract index of feature for this length. sel <- fs.order[1:len[i]] #' call randomForest rf <- randomForest(x[, sel, drop = F], y, importance = T, keep.forest = FALSE, ... ) imp <- importance(rf, type = 1, scale = TRUE) imp <- imp[, 1] #' sort the feature based on the scaled important scores ord <- order(imp, decreasing = T, na.last = T) #' update the feature set fs.order[1:len[i]] <- sel[ord] } fs.rank <- order(fs.order) names(fs.rank) <- colnames(x) nam <- colnames(x)[fs.order] if (!is.null(nam)) fs.order <- nam #' Add stats for consistent with other methods. No other purpose. stats <- length(fs.rank) - fs.rank res <- list(fs.order = fs.order, fs.rank = fs.rank, stats = stats) return(res) } #' ========================================================================= #' Welch test for feature selection #' lwc-05-01-2007: commence #' lwc-16-03-2007: minor change. Note that do not use sort with na.last=T #' lwc-19-05-2008: Change oneway.test as t.test with dot arguments. So this #' method supports paired t.test (Welch). And also strip off #' noquote. #' wll-17-06-2008: change data.frame as as.data.frame in case of data frame #' names changed. (e.g., names of a data frame is #' numeric-like characteristic) fs.welch <- function(x, y, ...) { tmp <- sapply(as.data.frame(x), function(x) { tmp <- t.test(x ~ y, var.equal = F, ...) #' tmp <- oneway.test(x ~ y,var.equal=F) c(tmp$statistic, tmp$p.value) }) stats <- tmp[1, ] pval <- tmp[2, ] fs.order <- order(abs(stats), decreasing = T, na.last = T) fs.rank <- order(fs.order) names(fs.rank) <- names(stats) nam <- names(stats[fs.order]) if (!is.null(nam)) fs.order <- nam #' fs.order <- noquote(nam) res <- list( fs.order = fs.order, fs.rank = fs.rank, stats = abs(stats), pval = pval ) return(res) } #' ========================================================================= #' wll-19-05-2008: Welch test for feature selection #' NOTE: This function selects features based on the p-values rather than #' absolute values of statistics. And also supports additional arguments #' passing, such as paired test or not, and the alternative hypothesis. fs.welch.1 <- function(x, y, ...) { tmp <- sapply(as.data.frame(x), function(x) { tmp <- t.test(x ~ y, var.equal = F, ...) c(tmp$statistic, tmp$p.value) }) stats <- tmp[1, ] pval <- tmp[2, ] fs.order <- order(pval, decreasing = F, na.last = T) fs.rank <- order(fs.order) names(fs.rank) <- names(pval) nam <- names(pval[fs.order]) if (!is.null(nam)) fs.order <- nam res <- list( fs.order = fs.order, fs.rank = fs.rank, pval = pval, stats = stats ) return(res) } #' ========================================================================= #' Wilcoxon test for feature selection #' lwc-21-06-2010: commence #' Note: No doc and not export fs.wilcox <- function(x, y, ...) { tmp <- sapply(as.data.frame(x), function(x) { tmp <- wilcox.test(x ~ y, ...) c(tmp$statistic, tmp$p.value) }) stats <- tmp[1, ] pval <- tmp[2, ] fs.order <- order(abs(stats), decreasing = T, na.last = T) fs.rank <- order(fs.order) names(fs.rank) <- names(stats) nam <- names(stats[fs.order]) if (!is.null(nam)) fs.order <- nam res <- list(fs.order = fs.order, fs.rank = fs.rank, stats = abs(stats), pval = pval) return(res) } #' ========================================================================= #' ANOVA for feature selection #' lwc-05-01-2007: commence #' lwc-16-03-2007: minor change fs.anova <- function(x, y, ...) { tmp <- sapply(as.data.frame(x), function(x) { tmp <- oneway.test(x ~ y, var.equal = T) c(tmp$statistic, tmp$p.value) }) stats <- tmp[1, ] pval <- tmp[2, ] fs.order <- order(abs(stats), decreasing = T, na.last = T) fs.rank <- order(fs.order) names(fs.rank) <- names(stats) nam <- names(stats[fs.order]) if (!is.null(nam)) fs.order <- nam res <- list( fs.order = fs.order, fs.rank = fs.rank, stats = abs(stats), pval = pval ) return(res) } #' ========================================================================= #' Kruskal-Wallis test for feature selection #' lwc-05-01-2007: commence #' lwc-16-03-2007: minor change fs.kruskal <- function(x, y, ...) { tmp <- sapply(as.data.frame(x), function(x) { tmp <- kruskal.test(x ~ y) c(tmp$statistic, tmp$p.value) }) stats <- tmp[1, ] pval <- tmp[2, ] fs.order <- order(abs(stats), decreasing = T, na.last = T) fs.rank <- order(fs.order) names(fs.rank) <- names(stats) nam <- names(stats[fs.order]) if (!is.null(nam)) fs.order <- nam res <- list( fs.order = fs.order, fs.rank = fs.rank, stats = abs(stats), pval = pval ) return(res) } #' ========================================================================= #' Feature ranking validation by error estimation. #' History: #' 08-11-2006: commence #' 09-11-2006: output different errors. #' 26-11-2006: Borda count for selecting final feature order #' 10-01-2007: Add user-defined data partitioning #' 12-10-2007: Add fs.order as a argument. This allows the user to #' estimate error using a feature order calculated somewhere #' else. Actually this function replaces rankvali (deprecated). frankvali.default <- function(dat, cl, cl.method = "svm", fs.method = "fs.auc", fs.order = NULL, fs.len = "power2", pars = valipars(), tr.idx = NULL, ...) { #' validity checking if (missing(dat) || missing(cl)) { stop("data set or class are missing") } if (length(dim(dat)) != 2) { stop("'dat' must be a matrix or data frame") } if (!is.factor(cl)) { stop("cl must be a factor.") } if (nrow(dat) != length(cl)) stop("dat and cl don't match.") if (length(unique(cl)) < 2) { stop("Classification needs at least two classes.") } if (any(is.na(dat)) || any(is.na(cl))) { stop("NA is not permitted in data set or class labels.") } dat <- as.matrix(dat) rownames(dat) <- NULL # strip off the row names n <- nrow(dat) #' number of samples p <- ncol(dat) #' size of feature len <- get.fs.len(p, fs.len = fs.len) nlen <- length(len) #' construct index of train data if (is.null(tr.idx)) { if (pars$sampling == "cv" && pars$nreps > n) { pars$sampling <- "loocv" } tr.idx <- trainind(cl, pars = pars) } pars$niter <- length(tr.idx) pars$nreps <- length(tr.idx[[1]]) err.all <- list() fs.list <- list() for (i in 1:pars$niter) { train.ind <- tr.idx[[i]] res <- list() #' generic loop for loocv, cv, scv,random and bootstrap. for (j in 1:length(train.ind)) { dat.tr <- dat[train.ind[[j]], , drop = F] cl.tr <- cl[train.ind[[j]]] dat.te <- dat[-train.ind[[j]], , drop = F] cl.te <- cl[-train.ind[[j]]] #' Error estimation of feature selection with fs.order or with rank #' method. res[[j]] <- frank.err(dat.tr, cl.tr, dat.te, cl.te, cl.method = cl.method, fs.method = fs.method, fs.order = fs.order, fs.len = fs.len, ... ) } #' end of j #' feature ranking list fs.list[[i]] <- sapply(res, function(x) cbind(x$fs.rank)) rownames(fs.list[[i]]) <- colnames(dat) #' error estimation err.all[[i]] <- t(sapply(res, function(x) cbind(x$error))) colnames(err.all[[i]]) <- len #' colnames(err.all[[i]]) <- paste("Len_", len, sep="") } #' End of i names(err.all) <- paste("Iter_", seq(1, pars$niter), sep = "") names(fs.list) <- paste("Iter_", seq(1, pars$niter), sep = "") err.iter <- t(sapply(err.all, function(x) apply(x, 2, mean))) err.avg <- apply(err.iter, 2, mean) #' or try #' err.mat <- do.call("rbind",err.all) # err.avg <- apply(err.mat,2,mean) if (is.null(fs.order)) { #' final feature ranking #' Use Borda count to get the final feature order fs.mat <- do.call("cbind", fs.list) fs.score <- apply(fs.mat, 1, sum) fs.order <- order(fs.score, decreasing = F) #' fs order from best to worst. fs.rank <- order(fs.order, decreasing = F) #' fs rank score. names(fs.rank) <- rownames(fs.mat) temp <- names(fs.rank[fs.order]) if (!is.null(temp)) { fs.order <- noquote(temp) } } else { fs.rank <- order2rank(fs.order) fs.method <- "User defined" } ret <- list( fs.method = fs.method, cl.method = cl.method, fs.len = len, #' computational levels err.avg = err.avg, #' average error err.iter = err.iter, #' error matrix on each iteration err.all = err.all, #' all error matrix fs.order = fs.order, #' final feature order fs.rank = fs.rank, #' final feature rank sampling = switch(pars$sampling, "cv" = "cross validation", "loocv" = "leave-one-out cross-validation", "boot" = "bootstrap", "random" = "randomised validation (holdout)" ), niter = pars$niter, #' number of iteration nreps = pars$nreps ) if (is.null(fs.order)) { ret$fs.list <- fs.list #' feature list of all computation } class(ret) <- "frankvali" return(ret) } #' ======================================================================== frankvali <- function(dat, ...) UseMethod("frankvali") #' ======================================================================== #' lwc-12-11-2006: frankvali.formula <- function(formula, data = NULL, ..., subset, na.action = na.omit) { call <- match.call() if (!inherits(formula, "formula")) { stop("method is only for formula objects") } m <- match.call(expand.dots = FALSE) if (identical(class(eval.parent(m$data)), "matrix")) { m$data <- as.data.frame(eval.parent(m$data)) } m$... <- NULL m[[1]] <- as.name("model.frame") m$na.action <- na.action m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 x <- model.matrix(Terms, m) y <- model.extract(m, "response") attr(x, "na.action") <- attr(y, "na.action") <- attr(m, "na.action") ret <- frankvali.default(x, y, ..., na.action = na.action) ret$call <- call ret$call[[1]] <- as.name("frankvali") ret$terms <- Terms if (!is.null(attr(m, "na.action"))) { ret$na.action <- attr(m, "na.action") } class(ret) <- c("frankvali.formula", class(ret)) return(ret) } #' ======================================================================= #' lwc-13-11-2006: boxplot the error rate on each iteration or computation. boxplot.frankvali <- function(x, ...) { col <- "lightgray" xlab <- "Numbers of Feature" ylab <- "Error Rate" ylim <- c(0, 1.0) if (x$niter > 1) { main <- "Error rate on each iteration" #' tmp <- data.frame(x$err.iter) #' why does data.frame change names? tmp <- as.data.frame(x$err.iter) #' wll-1706-2008: Is it OK? colnames(tmp) <- colnames(x$err.iter) } else { main <- "Error rate on each computation" tmp <- as.data.frame(x$err.all) #' tmp <- data.frame(x$err.all) colnames(tmp) <- colnames(x$err.all) } boxplot(tmp, main = main, col = col, xlab = xlab, ylab = ylab, ylim = ylim) } #' ======================================================================== print.frankvali <- function(x, digits = 3, ...) { cat("\nFeature selection method:\t\t", x$fs.method) cat("\nClassification method:\t\t", x$cl.method) cat("\nSampling:\t\t", x$sampling) cat("\nNo. of iteration.:\t", x$niter) cat("\nNo. of replications:\t", x$nreps) cat("\nFeature numbers:\t", x$fs.len) cat("\nAverage error:\t\t", round(x$err.avg, digits)) cat( "\nFeature order (top 10):\t", x$fs.order[1:min(10, length(x$fs.order))] ) #' best to worst cat("\n") invisible(x) } #' ======================================================================== summary.frankvali <- function(object, ...) { structure(object, class = "summary.frankvali") } #' ======================================================================== print.summary.frankvali <- function(x, ...) { print.frankvali(x) cat("\nError of each iteration:\n") print(x$err.iter) #' cat("\nError of all computation:\n") #' print(x$err.all) if (!is.null(x$fs.list)) { cat("\nFeature ranking List:\n") print(x$fs.list) } } #' ========================================================================= #' Error estimation of feature ranking on a single data set. #' History: #' 08-11-2006: commence #' 10-11-2006: generalise cl.method. #' 10-01-2007: minor modification #' 24-09-2007: Generalise fs.method #' 12-10-2007: Add fs.order as a argument. frank.err <- function(dat.tr, cl.tr, dat.te, cl.te, cl.method = "svm", fs.method = "fs.auc", fs.order = NULL, fs.len = "power2", ...) { if (missing(dat.tr) || missing(cl.tr) || missing(dat.te) || missing(cl.te)) { stop(" training and test data missing") } #' feature ranking if (is.null(fs.order)) { tmp <- do.call(fs.method, c(list(x = dat.tr, y = cl.tr), list(...))) fs.order <- tmp$fs.order fs.rank <- tmp$fs.rank } else { fs.rank <- order2rank(fs.order) fs.method <- "User defined" } #' generate feature length for error estimation p <- ncol(dat.tr) len <- get.fs.len(p, fs.len = fs.len) nlen <- length(len) #' error estimation error <- numeric(length = nlen) names(error) <- len #' names(error) <- paste("Len_", len, sep="") for (i in 1:nlen) { #' feature selection sel <- fs.order[1:len[i]] error[i] <- classifier(dat.tr[, sel, drop = F], cl.tr, dat.te[, sel, drop = F], cl.te, method = cl.method, ... )$err } ret <- list( cl.method = cl.method, fs.len = len, error = error, fs.method = fs.method, fs.order = fs.order, fs.rank = fs.rank ) return(ret) } #' ========================================================================= #' wll-29-04-2008: Wrapper function for validation of feature selection by #' classification. #' wll-29-10-2008: Give a logical value to validate all fs or not. #' lwc-07-10-2011: use get.fs.len again but remove the last one. #' Note: 1. Similar but more complete function is frankvali #' 2. should change cl.method as method fs.cl <- function(dat, cl, fs.order = colnames(dat), fs.len = 1:ncol(dat), cl.method = "svm", pars = valipars(), all.fs = FALSE, ...) { len <- get.fs.len(ncol(dat), fs.len = fs.len) if (!all.fs) { len <- len[1:(length(len) - 1)] #' remove the last one } nlen <- length(len) res <- sapply(1:nlen, function(i) { id <- fs.order[1:len[i]] #' extract index of selected features #' cat("\n--Feature Length = :",i,"\n"); flush.console() res <- aam.cl(dat[, id, drop = F], cl, method = cl.method, pars = pars, ...) }) res <- t(res) rownames(res) <- len res } #' ========================================================================= #' lwc-27-06-2011: Wrapper function for validation of feature selection by #' classification. #' Note: This function evaluate all features given by user either in #' individual feature or aggregated features. fs.cl.1 <- function(dat, cl, fs.order = colnames(dat), cl.method = "svm", pars = valipars(), agg_f = FALSE, ...) { len <- length(fs.order) if (agg_f) { res <- sapply(1:len, function(i) { id <- fs.order[1:i] #' aggregation of features res <- aam.cl(dat[, id, drop = F], cl, method = cl.method, pars = pars, ...) }) } else { res <- sapply(1:len, function(i) { id <- fs.order[i] #' individual feature res <- aam.cl(dat[, id, drop = F], cl, method = cl.method, pars = pars, ...) }) } res <- t(res) rownames(res) <- 1:len res } #' ========================================================================= #' lwc-27-06-2011: Wrapper function for validation of feature selection by #' classification. #' lwc-19-05-2012: replace aam.cl with accest in order to get more results. #' lwc-22-10-2012: To get aam, call perf.aam. #' lwc-21-01-2014: To get other outcome such as SE and CI, need to provide #' extra code scripts. Refer to frankvali. Usages: #' usages #' data(abr1) #' dat <- abr1$pos #' x <- preproc(dat[, 110:500], method = "log10") #' y <- factor(abr1$fact$class) #' dat <- dat.sel(x, y, choices = c("1", "2")) #' x.1 <- dat[[1]]$dat #' y.1 <- dat[[1]]$cls #' pars <- valipars(sampling = "cv", niter = 4, nreps = 4) #' #' #' multi-classes #' fs <- fs.rf(x, y) #' ord <- fs$fs.order[1:50] #' res <- fs.cl.2(x, y, #' fs.order = ord, cl.method = "svm", pars = pars, #' agg_f = TRUE #' ) #' perf.aam(res) #' #' #' two-classes #' fs <- fs.rf(x.1, y.1) #' ord <- fs$fs.order[1:50] #' res.1 <- fs.cl.2(x.1, y.1, #' fs.order = ord, cl.method = "svm", pars = pars, #' agg_f = TRUE #' ) #' perf.aam(res.1) #' fs.cl.2 <- function(dat, cl, fs.order = colnames(dat), cl.method = "svm", pars = valipars(), agg_f = FALSE, ...) { len <- length(fs.order) if (agg_f) { res <- lapply(1:len, function(i) { id <- fs.order[1:i] #' aggregation of features res <- accest(dat[, id, drop = F], cl, method = cl.method, pars = pars, ... ) #' res <- aam.cl(dat[,id, drop=F],cl, method=cl.method, pars=pars,...) }) } else { res <- lapply(1:len, function(i) { id <- fs.order[i] #' individual feature res <- accest(dat[, id, drop = F], cl, method = cl.method, pars = pars, ... ) #' res <- aam.cl(dat[,id, drop=F],cl, method=cl.method, pars=pars,...) }) } #' res <- t(res) #' rownames(res) <- 1:len names(res) <- 1:len res } #' ======================================================================= #' lwc-21-01-2014: get average of acc, auc and mar from outcome of fs.cl.2 perf.aam <- function(res) { tmp <- sapply(res, function(x) { #' x = res[[1]] acc <- x$acc auc <- ifelse(!is.null(x$auc), x$auc, NA) mar <- ifelse(!is.null(x$mar), x$mar, NA) res <- c(acc = acc, auc = auc, mar = mar) }) return(t(tmp)) } #' ======================================================================= #' lwc-24-05-2012: Wrapper function for perf.aam. perf <- function(res) { perf <- lapply(res, function(x) perf.aam(x)) } #' ====================================================================== #' Generate a sequence of feature number #' History: #' 25-10-2006: commence #' 31-10-2006: add user defined sequence #' 15-11-2006: fix a bug in returning a decreasing vector #' Usages: #' get.fs.len(10,fs.len=c(1,5,3,11.2,7.8,23,1,0)) #' get.fs.len(200,fs.len="half") get.fs.len <- function(p, fs.len = c("power2")) { if (!is.character(fs.len)) { fs.len <- as.vector(fs.len) fs.len <- as.integer(fs.len) fs.len <- fs.len[fs.len <= p & fs.len > 0] fs.len <- c(p, fs.len) x <- unique(fs.len) } else { fs.len <- match.arg(fs.len, c("full", "half", "power2")) if (fs.len == "full") { x <- seq(p, 1) } else if (fs.len == "half") { x <- tmp <- p while (tmp > 1) { tmp <- trunc(tmp / 2) x <- c(x, tmp) } } else { n <- ceiling(log2(p)) x <- 2^(n:0) x[1] <- p } } #' x <- sort(x,decreasing = T) #' must be decreasing order for SVM-RFE x <- sort(x, decreasing = F) return(x) } #' ====================================================================== #' lwc-02-02-2007: convert feature rank to feature order #' NOTE: The vector of fs.rank should have variable names. #' Usages: #' load("D:\R_lwc\data\R-W2-GC\31_01_2007\class_rfrankvali_auto.RData") #' fs.rank.list <- do.call("cbind",fs$rfe$fs.list) #' tmp <- mt:::rank2order(fs.rank.list[,1]) #' fs.order.list <- sapply(1:ncol(fs.rank.list), #' function(x) mt:::rank2order(fs.rank.list[,x])) #' Internal function. rank2order <- function(fs.rank) { fs.order <- order(fs.rank) tmp <- names(fs.rank[fs.order]) if (!is.null(tmp)) fs.order <- tmp return(fs.order) } #' ======================================================================= #' wll-12-03-2007: convert feature order to feature rank #' Internal function. order2rank <- function(fs.order) { fs.rank <- order(fs.order) names(fs.rank) <- fs.order[fs.rank] return(fs.rank) } #' 1) feat.freq #' 2) feat.cons #' 3) feat.mfs #' 4) feat.mfs.stab #' 5) feat.mfs.stats #' 6) feat.agg #' 7) feat.rank.re #' 8) fs.plsvip #' 9) fs.plsvip.1 #' 10) fs.plsvip.2 #' 11) fs.pls #' 12) fs.pca #' 13) fs.bw #' 14) fs.relief #' 15) fs.rfe #' 16) fs.snr #' 17) fs.auc #' 18) fs.rf #' 19) fs.rf.1 #' 20) fs.welch #' 21) fs.welch.1 #' 22) fs.wilcox #' 23) fs.anova #' 24) fs.kruskal #' 25) frankvali.default #' 26) frankvali #' 27) frankvali.formula #' 28) boxplot.frankvali #' 29) print.frankvali #' 30) summary.frankvali #' 31) print.summary.frankvali #' 32) frank.err #' 33) fs.cl #' 34) fs.cl.1 #' 35) fs.cl.2 #' 36) perf.aam #' 37) perf #' 38) get.fs.len #' 39) rank2order #' 40) order2rank <file_sep>/man/plsc.Rd % wll-23-05-2007: commence % wll-03-10-2007: add plsc stuff % \name{plsc} \alias{plsc} \alias{plsc.default} \alias{plsc.formula} \alias{print.plsc} \alias{summary.plsc} \alias{print.summary.plsc} \alias{plslda} \alias{plslda.default} \alias{plslda.formula} \alias{print.plslda} \alias{summary.plslda} \alias{print.summary.plslda} \title{ Classification with PLSDA } \description{ Classification with partial least squares (PLS) or PLS plus linear discriminant analysis (LDA). } \usage{ plsc(x, \dots) plslda(x, \dots) \method{plsc}{default}(x, y, pls="simpls",ncomp=10, tune=FALSE,\dots) \method{plsc}{formula}(formula, data = NULL, \dots, subset, na.action = na.omit) \method{plslda}{default}(x, y, pls="simpls",ncomp=10, tune=FALSE,\dots) \method{plslda}{formula}(formula, data = NULL, \dots, subset, na.action = na.omit) } % ---------------------------------------------------------------------------- \arguments{ \item{formula}{ A formula of the form \code{groups ~ x1 + x2 + \dots} That is, the response is the grouping factor and the right hand side specifies the (non-factor) discriminators. } \item{data}{ Data frame from which variables specified in \code{formula} are preferentially to be taken. } \item{x}{ A matrix or data frame containing the explanatory variables if no formula is given as the principal argument. } \item{y}{ A factor specifying the class for each observation if no formula principal argument is given. } \item{pls}{ A method for calculating PLS scores and loadings. The following methods are supported: \itemize{ \item \code{simpls:} SIMPLS algorithm. \item \code{kernelpls:} kernel algorithm. \item \code{oscorespls:} orthogonal scores algorithm. } For details, see \code{\link[pls]{simpls.fit}}, \code{\link[pls]{kernelpls.fit}} and \code{\link[pls]{oscorespls.fit}} in package \pkg{pls}. } \item{ncomp}{ The number of components to be used in the classification. } \item{tune}{ A logical value indicating whether the best number of components should be tuned. } \item{\dots}{ Arguments passed to or from other methods. } \item{subset}{ An index vector specifying the cases to be used in the training sample. } \item{na.action}{ A function to specify the action to be taken if \code{NA}s are found. The default action is \code{na.omit}, which leads to rejection of cases with missing values on any required variable. An alternative is \code{na.fail}, which causes an error if \code{NA} cases are found. } } % ---------------------------------------------------------------------------- \details{ \code{plcs} implements PLS for classification. In details, the categorical response vector \code{y} is converted into a numeric matrix for regression by PLS and the output of PLS is convert to posteriors by \code{softmax} method. The classification results are obtained based on the posteriors. \code{plslda} combines PLS and LDA for classification, in which, PLS is for dimension reduction and LDA is for classification based on the data transformed by PLS. Three PLS functions,\code{\link[pls]{simpls.fit}}, \code{\link[pls]{kernelpls.fit}} and \code{\link[pls]{oscorespls.fit}}, are implemented in package \pkg{pls}. } % ---------------------------------------------------------------------------- \value{ An object of class \code{plsc} or \code{plslda} containing the following components: \item{x}{ A matrix of the latent components or scores from PLS. } \item{cl}{ The observed class labels of training data. } \item{pred}{ The predicted class labels of training data. } \item{conf}{ The confusion matrix based on training data. } \item{acc}{ The accuracy rate of training data. } \item{posterior}{ The posterior probabilities for the predicted classes. } \item{ncomp}{ The number of latent component used for classification. } \item{pls.method}{ The PLS algorithm used. } \item{pls.out}{ The output of PLS. } \item{lda.out}{ The output of LDA used only by \code{plslda}. } \item{call}{ The (matched) function call. } } % ---------------------------------------------------------------------------- \note{ Two functions may be called giving either a formula and optional data frame, or a matrix and grouping factor as the first two arguments. } % ---------------------------------------------------------------------------- \references{ <NAME>. and <NAME>. (1989) \emph{Multivariate calibration.} <NAME> & Sons. } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------------- \seealso{ \code{\link[pls]{kernelpls.fit}}, \code{\link[pls]{simpls.fit}}, \code{\link[pls]{oscorespls.fit}}, \code{\link{predict.plsc}}, \code{\link{plot.plsc}}, \code{\link{tune.func}} } % ---------------------------------------------------------------------------- \examples{ library(pls) data(abr1) cl <- factor(abr1$fact$class) dat <- preproc(abr1$pos , y=cl, method=c("log10"),add=1)[,110:500] ## divide data as training and test data idx <- sample(1:nrow(dat), round((2/3)*nrow(dat)), replace=FALSE) ## construct train and test data train.dat <- dat[idx,] train.t <- cl[idx] test.dat <- dat[-idx,] test.t <- cl[-idx] ## apply plsc and plslda (res <- plsc(train.dat,train.t, ncomp = 20, tune = FALSE)) ## Estimate the mean squared error of prediction (MSEP), root mean squared error ## of prediction (RMSEP) and R^2 (coefficient of multiple determination) for ## fitted PLSR model MSEP(res$pls.out) RMSEP(res$pls.out) R2(res$pls.out) (res.1 <- plslda(train.dat,train.t, ncomp = 20, tune = FALSE)) ## Estimate the mean squared error of prediction (MSEP), root mean squared error ## of prediction (RMSEP) and R^2 (coefficient of multiple determination) for ## fitted PLSR model MSEP(res.1$pls.out) RMSEP(res.1$pls.out) R2(res.1$pls.out) \dontrun{ ## with function of tuning component numbers (z.plsc <- plsc(train.dat,train.t, ncomp = 20, tune = TRUE)) (z.plslda <- plslda(train.dat,train.t, ncomp = 20, tune = TRUE)) ## check nomp tuning results z.plsc$ncomp plot(z.plsc$acc.tune) z.plslda$ncomp plot(z.plslda$acc.tune) ## plot plot(z.plsc,dimen=c(1,2,3),main = "Training data",abbrev = TRUE) plot(z.plslda,dimen=c(1,2,3),main = "Training data",abbrev = TRUE) ## predict test data pred.plsc <- predict(z.plsc, test.dat)$class pred.plslda <- predict(z.plslda, test.dat)$class ## classification rate and confusion matrix cl.rate(test.t, pred.plsc) cl.rate(test.t, pred.plslda) } } \keyword{classif} <file_sep>/man/fs.rf.Rd %% lwc-21-03-2007: commence %% lwc-16-02-2010: add fs.rf.1 %% lwc-18-10-2011: re-write examples. Will use these exaples for all %% feature selection. %% wll-08-12-2015: add codes for comparison of 'fs.rf' and 'fs.rf.1'. \name{fs.rf} \alias{fs.rf} \alias{fs.rf.1} \title{ Feature Selection Using Random Forests (RF) } \description{ Feature selection using Random Forests (RF). } \usage{ fs.rf(x,y,\dots) fs.rf.1(x,y,fs.len="power2",\dots) } % -------------------------------------------------------------------- \arguments{ \item{x}{ A data frame or matrix of data set. } \item{y}{ A factor or vector of class. } \item{fs.len}{ Method or numeric sequence for feature lengths. For details, see \code{\link{get.fs.len}} } \item{\dots}{ Arguments to pass to \code{randomForests}. } } \value{ A list with components: \item{fs.rank}{A vector of feature ranking scores.} \item{fs.order}{A vector of feature order from best to worst.} \item{stats}{A vector of measurements. For \code{fs.rf}, it is Random Forest important score. For \code{fs.rf.1}, it is a dummy variable (current ignored). } } \details{ \code{fs.rf.1} select features based on successively eliminating the least important variables. } \author{ <NAME> } % ---------------------------------------------------------------------- \examples{ data(abr1) cls <- factor(abr1$fact$class) dat <- abr1$pos ## fill zeros with NAs dat <- mv.zene(dat) ## missing values summary mv <- mv.stats(dat, grp=cls) mv ## View the missing value pattern ## filter missing value variables dat <- dat[,mv$mv.var < 0.15] ## fill NAs with mean dat <- mv.fill(dat,method="mean") ## log transformation dat <- preproc(dat, method="log10") ## select class "1" and "2" for feature ranking ind <- grepl("1|2", cls) mat <- dat[ind,,drop=FALSE] mat <- as.matrix(mat) grp <- cls[ind, drop=TRUE] ## apply random forests for feature selection/ranking res <- fs.rf(mat,grp) res.1 <- fs.rf.1(mat,grp) ## compare the results fs <- cbind(fs.rf=res$fs.order, fs.rf.1=res.1$fs.order) ## plot the important score of 'fs.rf' (not 'fs.rf.1') score <- res$stats score <- sort(score, decreasing = TRUE) plot(score) } \keyword{classif} <file_sep>/R/mt_pcalda.R #' ========================================================================= #' wll-02-10-2007: tune the best number of components tune.pcalda <- function(x, y, ncomp = NULL, tune.pars, ...) { n <- nrow(x) g <- length(levels(y)) if (is.null(ncomp)) { ncomp <- max(g, n - g) } else { if (ncomp < 1 || ncomp > max(g, n - g)) { ncomp <- max(g, n - g) } } if (missing(tune.pars)) { tune.pars <- valipars(sampling = "rand", niter = 1, nreps = 4) } cat("ncomp tune (", ncomp, "):", sep = "") res <- sapply(1:ncomp, function(i) { cat(" ", i, sep = "") flush.console() accest(x, y, pars = tune.pars, method = "pcalda", ncomp = i, tune = F, ... )$acc }) cat("\n") list(ncomp = which.max(res), acc.tune = res) } #' ========================================================================= #' wll-02-10-2007: Tune the best number of components #' NOTE: This is a debug version of tune.pcalda, which can take user defined #' range of ncomp. tune.pcalda.1 <- function(x, y, ncomp = NULL, tune.pars, ...) { n <- nrow(x) g <- length(levels(y)) if (is.null(ncomp)) { ncomp <- 1:max(g, n - g) } else { if (max(ncomp) < 1 || max(ncomp) > max(g, n - g)) { ncomp <- 1:max(g, n - g) } } if (missing(tune.pars)) { tune.pars <- valipars(sampling = "rand", niter = 1, nreps = 4) } #' res <- sapply(1:ncomp, function(i) { #' accest(x, y, pars = tune.pars, method = "pcalda", ncomp=i, tune=F,...)$acc #' }) cat("ncomp tune (", max(ncomp), "):", sep = "") func <- function(i) { cat(" ", i, sep = "") flush.console() accest( dat = x, cl = y, pars = tune.pars, method = "pcalda", ncomp = i, tune = F, ... )$acc } res <- sapply(ncomp, FUN = func) names(res) <- ncomp cat("\n") list(ncomp = which.max(res), acc.tune = res) } #' ========================================================================= #' wll-02-10-2007: tune the best number of components #' NOTE: This is a test version of tune ncomp, which uses LDA directly and #' runs fast than tune.pcalda. tune.pcalda.2 <- function(x, y, ncomp = NULL, center = TRUE, scale. = FALSE, tune.pars, ...) { n <- nrow(x) g <- length(levels(y)) if (is.null(ncomp)) { ncomp <- max(g, n - g) } else { if (ncomp < 1 || ncomp > max(g, n - g)) { ncomp <- max(g, n - g) } } pca.out <- prcomp(x, center = center, scale. = scale., ...) ncomp <- min(ncomp, length(pca.out$center)) if (missing(tune.pars)) { tune.pars <- valipars(sampling = "rand", niter = 1, nreps = 10) } cat("ncomp tune (", ncomp, "):", sep = "") res <- sapply(1:ncomp, function(i) { cat(" ", i, sep = "") flush.console() acc <- accest(pca.out$x[, 1:i, drop = F], y, pars = tune.pars, method = "lda" )$acc }) cat("\n") list(ncomp = which.max(res), acc.tune = res) } #' ========================================================================= #' PCA+LDA for classification #' History: #' wll-22-06-2007: commence #' wll-01-07-2007: try number of PCs as n - g. Over-fitting. #' wll-24-01-2008: strip off constant PCs within group pcalda.default <- function(x, y, center = TRUE, scale. = FALSE, ncomp = NULL, tune = FALSE, ...) { #' arguments validity checking if (missing(x) || missing(y)) { stop("data set or class are missing") } x <- as.matrix(x) if (nrow(x) != length(y)) stop("x and y don't match.") y <- as.factor(y) if (any(table(y) == 0)) stop("Can't have empty classes in y.") #' lwc-NOTE: Or simple apply: y <- factor(y), which will drop factor levels if (length(unique(y)) < 2) { stop("Classification needs at least two classes.") } if (any(is.na(x)) || any(is.na(y))) { stop("NA is not permitted in data set or class labels.") } n <- nrow(x) g <- length(levels(y)) #' The singularity problem of the within-class scatter matrix is overcome #' if number of retained PCs varies at least g to at most n-g. Here g and #' n is the number of classes and training data, respectively. #' if(is.null(ncomp)) ncomp <- max(g,n - g) #' NOTE-wll: Too good for training, which means overfits the training data. #' if(is.null(ncomp)) ncomp <- max(g,round(n/2)) #' Check the number of components if (is.null(ncomp)) { ncomp <- if (tune) max(g, n - g) else max(g, round(n / 2)) } else { if (ncomp < 1 || ncomp > max(g, n - g)) { ncomp <- max(g, n - g) } } #' find the best number of components if (tune) { val <- tune.pcalda(x, y, ncomp, ...) ncomp <- val$ncomp } #' dimension reduction by PCA pca.out <- prcomp(x, center = center, scale. = scale., ...) ncomp <- min(ncomp, length(pca.out$center)) x.tmp <- pca.out$x[, 1:ncomp, drop = F] #' stip off PCs constant within groups x.tmp <- preproc.const(x.tmp, y) ncomp <- ncol(x.tmp) #' NOTE-28-01-2008: If the variables being stript off is not in the end #' of columns, they positions should be sorted somewhere. But this #' situation is rare in PCs. Refer to predict.pcalda where the ncomp is #' used. lda.out <- lda(x.tmp, y) pred <- predict(lda.out, x.tmp) conf <- table(y, pred$class) acc <- round(sum(diag(conf)) * 100 / n, 2) x <- scale(x.tmp, center = colMeans(lda.out$means), scale = FALSE) %*% lda.out$scaling res <- list( x = x, cl = y, pred = pred$class, posterior = pred$posterior, conf = conf, acc = acc, ncomp = ncomp, pca.out = pca.out, lda.out = lda.out ) if (tune) res$acc.tune <- val$acc.tune res$call <- match.call() res$call[[1]] <- as.name("pcalda") class(res) <- "pcalda" return(res) } #' ========================================================================= predict.pcalda <- function(object, newdata, ...) { if (!inherits(object, "pcalda")) stop("object not of class \"pcalda\"") if (missing(newdata)) { res <- list(class = object$pred, posterior = object$posterior, x = object$x) return(res) } if (is.null(dim(newdata))) { dim(newdata) <- c(1, length(newdata)) } #' a row vector newdata <- as.matrix(newdata) if (ncol(newdata) != length(object$pca.out$center)) { stop("wrong number of variables") } #' rotated data (projection) by PCA x <- predict(object$pca.out, newdata) x <- x[, 1:object$ncomp, drop = F] #' predict using LDA pred <- predict(object$lda.out, x) #' list(class=pred$class, posterior = pred$posterior, x = pred$x) return(pred) } #' ======================================================================== #' wll-22-06-2007: print method for pcalda #' wll-15-01-2008: add ratio(svd values) print.pcalda <- function(x, ...) { cat("\nCall:\n", deparse(x$call), "\n") cat("\nNumber of components considered:", x$ncomp) cat("\nConfusion matrix of training data:\n") print(x$conf) cat("\nRatio of between- and within-group s.d.:\n") df <- colnames(x$x) ratio <- x$lda.out$svd names(ratio) <- df print(ratio) cat("\nAccurary rate of training data:\n") print(x$acc) invisible(x) } #' ======================================================================== #' lwc-22-06-2007: summary method for pcalda summary.pcalda <- function(object, ...) { structure(object, class = "summary.pcalda") } #' ======================================================================== #' lwc-22-06-2007: summary method for pcalda print.summary.pcalda <- function(x, ...) { print.pcalda(x) lev <- levels(x$cl) cat("\nNumber of Classes: ", length(lev), "\n\n") cat("Levels:", if (is.numeric(lev)) "(as integer)", "\n", lev) cat("\n\n") } #' ======================================================================== #' wll-13-12-2007: plot method for pcalda using lattice. #' wll-11-10-2008: Definition of svd in lda documents: #' svd is the singular values, which give the ratio of the between- and #' within-group standard deviations on the linear discriminant variables. #' Their squares are the canonical F-statistics. #' wll-15-01-2008: add svd listed in the plot. plot.pcalda <- function(x, dimen, ...) { ld.names <- function(object, comps) { labs <- paste("LD", 1:length(object$means), sep = "") if (missing(comps)) { comps <- seq(along = labs) } else { labs <- labs[comps] } svd <- object$svd svd.p <- 100 * svd / sum(svd) #' svd.p <- 100 * svd^2/sum(svd^2) #' wll-11-01-2008: check lda's print method: Proportion of trace svd <- svd[comps] svd.p <- svd.p[comps] #' labs <- paste(labs, " (", format(svd.p, digits = 2, trim = TRUE), #' " %)", sep = "") labs <- paste(labs, " (", format(svd, digits = 2, trim = TRUE), ", ", format(svd.p, digits = 2, trim = TRUE), "%)", sep = "" ) return(labs) } if (missing(dimen)) { dimen <- seq(along = colnames(x$x)) } else { #' check validity if (!all(dimen %in% c(1:ncol(x$x)))) { stop("dimen is not valid") } } dfn <- ld.names(x$lda.out, dimen) y <- x$cl x <- data.frame(x$x[, dimen, drop = FALSE]) names(x) <- dfn #' call group plot p <- grpplot(x, y, plot = "pairs", ...) p } #' ========================================================================= #' lwc-22-06-2007: plot method for pcalda. It plot LDA scores. plot.pcalda.1 <- function(x, panel = panel.pcalda, cex = 0.7, dimen, abbrev = FALSE, ...) { panel.pcalda <- function(x, y, ...) { text(x, y, as.character(g.nlda), cex = tcex, col = unclass(g), ...) } ld.names <- function(object, comps) { labs <- paste("LD", 1:length(object$means), sep = "") if (missing(comps)) { comps <- seq(along = labs) } else { labs <- labs[comps] } svd <- object$svd svd <- 100 * svd^2 / sum(svd^2) evar <- svd[comps] labs <- paste(labs, " (", format(evar, digits = 2, trim = TRUE), " %)", sep = "" ) return(labs) } xval <- x$x g <- x$cl if (abbrev) levels(g) <- abbreviate(levels(g), abbrev) assign("g.nlda", g) assign("tcex", cex) if (missing(dimen)) { dimen <- seq(along = colnames(xval)) } else { #' check validity if (!all(dimen %in% c(1:ncol(xval)))) { stop("dimen is not valid") } } xval <- xval[, dimen, drop = FALSE] varlab <- ld.names(x$lda.out, dimen) nDimen <- length(dimen) if (nDimen <= 2) { if (nDimen == 1) { #' One component ldahist(xval[, 1], g, ...) #' ldahist(xval, g, xlab=varlab,...) } else { #' Second component versus first xlab <- varlab[1] ylab <- varlab[2] eqscplot(xval, xlab = xlab, ylab = ylab, type = "n", ...) panel(xval[, 1], xval[, 2], ...) } } else { #' Pairwise scatterplots of several components pairs(xval, labels = varlab, panel = panel, ...) } invisible(NULL) } #' ========================================================================= pcalda <- function(x, ...) UseMethod("pcalda") #' ========================================================================= pcalda.formula <- function(formula, data = NULL, ..., subset, na.action = na.omit) { call <- match.call() if (!inherits(formula, "formula")) { stop("method is only for formula objects") } m <- match.call(expand.dots = FALSE) if (identical(class(eval.parent(m$data)), "matrix")) { m$data <- as.data.frame(eval.parent(m$data)) } m$... <- NULL m[[1]] <- as.name("model.frame") m$na.action <- na.action m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 x <- model.matrix(Terms, m) y <- model.extract(m, "response") attr(x, "na.action") <- attr(y, "na.action") <- attr(m, "na.action") ret <- pcalda.default(x, y, ..., na.action = na.action) ret$call <- call ret$call[[1]] <- as.name("pcalda") ret$terms <- Terms if (!is.null(attr(m, "na.action"))) { ret$na.action <- attr(m, "na.action") } class(ret) <- c("pcalda.formula", class(ret)) return(ret) } #' 1) tune.pcalda #' 2) tune.pcalda.1 #' 3) tune.pcalda.2 #' 4) pcalda.default #' 5) predict.pcalda #' 6) print.pcalda #' 7) summary.pcalda #' 8) print.summary.pcalda #' 9) plot.pcalda #' 10) plot.pcalda.1 #' 11) pcalda #' 12) pcalda.formula <file_sep>/man/abr1.Rd % lwc-12-10-2006: First draft % lwc-20-02-2007: minor changes \name{abr1} \alias{abr1} \title{abr1 Data} \usage{ data(abr1) } \description{ An FIE-MS data. } \details{ \code{abr1} is an FIE-MS data matrices developed from analysis of samples representing a time course of pathogen attack in a model plant species (Brachypodium distachyon). The data was developed in a single batch with all samples randomised using a Thermo LTQ linear ion trap. Both positive and negative ion mode are given (\code{abr1$pos} and \code{abr1$neg}). } \value{ A list with the following elements: \item{fact}{A data frame containing experimental meta-data.} \item{pos}{A data frame for positive data with 120 observations and 2000 variables.} \item{neg}{A data frame for negative data with 120 observations and 2000 variables.} } \examples{ # Load data set data(abr1) # Select data set dat <- abr1$neg # number of observations and variables dim(dat) # Transform data dat.log <- preproc(dat, method = "log") dat.sqrt <- preproc(dat, method = "sqrt") dat.asinh <- preproc(dat, method = "asinh") op <- par(mfrow=c(2,2), pch=16) matplot(t(dat),main="Original",type="l",col="blue", ylab="Intensity") matplot(t(dat.log),main="Log",type="l",col="green", ylab="Intensity") matplot(t(dat.sqrt),main="Sqrt",type="l",col="red", ylab="Intensity") matplot(t(dat.asinh),main="ArcSinh)",type="l",col="black", ylab="Intensity") par(op) mtext("Data set", line=2.5, font=3, cex=1.5) } \keyword{datasets} <file_sep>/man/boxplot.maccest.Rd % lwc-30-01-2007: First draft % \name{boxplot.maccest} \alias{boxplot.maccest} \title{ Boxplot Method for Class 'maccest' } \description{ Boxplot method for the accuracy rate of each classifier. } \usage{ \method{boxplot}{maccest}(x, \dots) } % ---------------------------------------------------------------------------- \arguments{ \item{x}{ An object of class \code{maccest}. } \item{\dots}{ Additional arguments to the plot, such as \code{main}, \code{xlab} and \code{ylab}. } } % ---------------------------------------------------------------------------- \details{ This function is a method for the generic function \code{boxplot()} for class \code{maccest}. It plots the accurary rate for each classifier. } \value{ Returns boxplot of class \code{maccest}. } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{maccest}}, \code{\link{plot.maccest}} } \examples{ # Iris data data(iris) x <- subset(iris, select = -Species) y <- iris$Species method <- c("randomForest","svm","knn") pars <- valipars(sampling="cv", niter = 2, nreps=5) tr.idx <- trainind(y, pars=pars) res <- maccest(x, y, method=method, pars=pars, comp="anova",kernel="linear") res boxplot(res) } \keyword{plot} <file_sep>/man/pca.outlier.Rd %% lwc-01-09-2010 %% wll-29-11-2015: give more information about ellipse and outlier \name{pca.outlier} \alias{pca.outlier} \alias{pca.outlier.1} \title{ Outlier detection by PCA } \description{ Outlier detection by the Mahalanobis distances of PC1 and PC2. Also plot PC1 and PC2 with its confidence ellipse. } \usage{ pca.outlier(x, center = TRUE, scale=TRUE,conf.level = 0.975,...) pca.outlier.1(x, center = TRUE, scale=TRUE, conf.level = 0.975, group=NULL, main = "PCA", cex=0.7,...) } % -------------------------------------------------------------------- \arguments{ \item{x}{ A data frame or matrix. } \item{center}{ A logical value indicating whether the variables should be shifted to be zero centred before PCA analysis takes place. } \item{scale}{ A logical value indicating whether the variables should be scaled to have unit variance before PCA analysis takes place. } \item{conf.level}{ The confidence level for controlling the cutoff of the Mahalanobis distances. } \item{group}{ A string character or factor indicating group information of row of \code{x}. It is used only for plotting. } \item{main}{An overall title for PCA plot.} \item{cex}{ A numerical value giving the amount by which plotting text and symbols should be magnified relative to the default. } \item{\dots}{Further arguments for plotting} } % ---------------------------------------------------------------------------- \value{ A list with components: \item{plot}{plot object of class \code{"trellis"} by \code{pca.outlier} only. } \item{outlier}{Outliers detected.} \item{conf.level}{Confidence level used.} \item{mah.dist}{Mahalanobis distances of each data sample.} \item{cutoff}{ Cutoff of Mahalanobis distances used for outlier detection.} } \note{ Examples of \code{\link{panel.elli}} and \code{\link{panel.outl}} give more general information about ellipses and outliers. If you ONLY want to plot outliers based on PCA in a general way, for example, outliers in different groups or in conditional panel, you can write an wrapper function combining with \code{\link{pca.comp}}, \code{\link{panel.elli}} and \code{\link{panel.outl}}. It is quite similiar to the implementation of \code{\link{pca.plot.wrap}}. } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------------- \seealso{\code{\link{pcaplot}}, \code{\link{grpplot}}, \code{\link{panel.outl}},\code{\link{panel.elli}}, \code{\link{pca.plot.wrap}} } % ---------------------------------------------------------------------- \examples{ data(iris) ## call lattice version pca.outlier(iris[,1:4], adj=-0.5) ## plot group pca.outlier(iris[,1:4], adj=-0.5,groups=iris[,5]) ## more information about groups pca.outlier(iris[,1:4],groups=iris[,5],adj = -0.5, xlim=c(-5, 5), auto.key = list(x = .05, y = .9, corner = c(0, 0)), par.settings = list(superpose.symbol=list(pch=rep(1:25)))) ## call basic graphic version pca.outlier.1(iris[,1:4]) ## plot group pca.outlier.1(iris[,1:4], group=iris[,5]) } \keyword{plot} <file_sep>/man/plot.maccest.Rd % lwc-30-01-2007: First draft % \name{plot.maccest} \alias{plot.maccest} \title{ Plot Method for Class 'maccest' } \description{ Plot accuracy rate with standard derivation of each classifier. } \usage{ \method{plot}{maccest}(x, main = NULL, xlab = NULL, ylab = NULL, \dots) } % ---------------------------------------------------------------------------- \arguments{ \item{x}{ An object of class \code{maccest}. } \item{main}{ An overall title for the plot. } \item{xlab}{ A title for the x axis. } \item{ylab}{ A title for the y axis. } \item{\dots}{ Additional arguments to the plot. } } % ---------------------------------------------------------------------------- \details{ This function is a method for the generic function \code{plot()} for class \code{maccest}. It plots the accuracy rate with standard derivation against the classifiers. } \value{ Returns plot of class \code{maccest}. } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{maccest}}, \code{\link{boxplot.maccest}} } \examples{ # Iris data data(iris) x <- subset(iris, select = -Species) y <- iris$Species method <- c("randomForest","svm","pcalda","knn") pars <- valipars(sampling="boot", niter = 10, nreps=4) res <- maccest(x, y, method=method, pars=pars, comp="anova",kernel="linear") res plot(res) } \keyword{plot} <file_sep>/man/plot.pcalda.Rd % wll-02-07-2007: % \name{plot.pcalda} \alias{plot.pcalda} \title{ Plot Method for Class 'pcalda' } \description{ Plot linear discriminants of \code{pcalda}. } \usage{ \method{plot}{pcalda}(x, dimen, \dots) } % ---------------------------------------------------------------------------- \arguments{ \item{x}{An object of class \code{pcalda}. } \item{dimen}{ The index of linear discriminants to be used for the plot. } \item{\dots}{ Further arguments. See corresponding entry in \code{\link{xyplot}} for non-trivial details. One argument is \code{ep}: an integer for plotting ellipse. \code{1} and \code{2} for plotting overall and group ellipse, respectively. Otherwise, none. For details, see \code{\link{panel.elli.1}}. } } % ---------------------------------------------------------------------------- \details{ This function is a method for the generic function \code{plot()} for class \code{pcalda}. If the length of \code{dimen} is greater than 2, a pairs plot is used. If the length of \code{dimen} is equal to 2, a scatter plot is drawn. Otherwise, the dot plot is drawn for the single component. } \value{An object of class \code{"trellis"}.} % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{pcalda}}, \code{\link{predict.pcalda}}, \code{\link{lda.plot.wrap}},\code{\link{panel.elli.1}}. } \examples{ data(abr1) cl <- factor(abr1$fact$class) dat <- abr1$pos model <- pcalda(dat,cl) ## Second component versus first plot(model,dimen=c(1,2),main = "Training data",ep=2) ## Pairwise scatterplots of several components plot(model,main = "Training data",ep=1) ## The first component plot(model,dimen=c(1),main = "Training data") } \keyword{plot} <file_sep>/man/data.visualisation.Rd % lwc-20-10-2010: % lwc-10-06-2015: add mds.plot.wrap % lwc-15-07-2015: more explaination for 'ep'. \name{pca.plot.wrap} \alias{pca.plot.wrap} \alias{mds.plot.wrap} \alias{lda.plot.wrap} \alias{lda.plot.wrap.1} \alias{pls.plot.wrap} \title{Grouped Data Visualisation by PCA, MDS, PCADA and PLSDA} \description{ Grouped data visualisation by PCA, MDS, PCADA and PLSDA. } \usage{ pca.plot.wrap(data.list,title="plotting",\dots) mds.plot.wrap(data.list,method="euclidean",title="plotting",\dots) pca.plot.wrap(data.list,title="plotting",\dots) lda.plot.wrap.1(data.list,title="plotting",\dots) pls.plot.wrap(data.list,title="plotting",\dots) } \arguments{ \item{data.list}{ A two-layer list structure, in which the second layer include a data frame called \code{dat} and a factor of class label called \code{cls}. Noticed that names of the first layer of \code{data.list} should be given. \code{data.list} can be produced by \code{\link{dat.sel}}. } \item{method}{ The distance measure to be used. This must be one of "euclidean", "maximum", "manhattan", "canberra", "binary" or "minkowski". Any unambiguous substring can be given. It is only for \code{mds.plot.wrap}. } \item{title}{A part of title string for plotting.} \item{\dots}{Further arguments to \code{lattice}. See corresponding entry in \code{\link{xyplot}} for non-trivial details of \code{lattice}. One argument is \code{ep}: an integer flag for ellipse. \code{1} and \code{2} for plotting overall and group ellipse, respectively. Otherwise, none. For details, see \code{\link{panel.elli.1}}. } } \value{ \code{mds.plot.wrap} returns a handle for MDS plot. All other four functions return a list with components: the first one is an object of class \code{"trellis"} for data visualisation; the second one is also an object of class \code{"trellis"} but plotting the corresponding variables, PCs (principal components), LDs (linear discrimniants) and LCs (latent components); and the third one is a matrix of these variables. } \note{ There is a slight differences between \code{lda.plot.wrap.1} and \code{lda.plot.wrap}. The former plots the two-class grouped data, which has one linear discriminant (LD1), with strip plot. The later plots the two-class data by LD1 vs LD2 which is identical to LD1. Hence \code{lda.plot.wrap} is more general and can be applied to fusion of two and more class data sets. } %% ----------------------------------------------------------------------- \author{ <NAME> } %% ----------------------------------------------------------------------- \seealso{ \code{\link{pcaplot}}, \code{\link{mdsplot}}, \code{\link{plot.pcalda}}, \code{\link{plot.plsc}}, \code{\link{dat.sel}}, \code{\link{grpplot}}, \code{\link{panel.elli.1}}. } %% ------------------------------------------------------------------------ \examples{ data(iris) x <- subset(iris, select = -Species) y <- iris$Species ## generate data list by dat.sel iris.pw <- dat.sel(x,y,choices=NULL) names(iris.pw) pca.p <- pca.plot.wrap(iris.pw, ep=2) pca.p[[1]] ## visualised by PCA pca.p[[2]] ## plot PCA variables pca.p[[3]] ## matrix of PCA variables mds.p <- mds.plot.wrap(iris.pw) mds.p pls.p <- pls.plot.wrap(iris.pw) pls.p[[1]] pls.p[[2]] pls.p[[3]] lda.p <- lda.plot.wrap.1(iris.pw) lda.p[[1]] lda.p[[2]] lda.p[[3]] lda.plot.wrap(iris.pw)$lda.p ## only plot iris data ph <- pca.plot.wrap(list(list(dat=x, cls=y)))$pca.p ## Not given data names ph update(ph, strip=FALSE) ## strip is an argument of lattice tmp <- list(iris.dat=list(dat=x, cls=y)) pca.plot.wrap(tmp)$pca.p pca.plot.wrap(tmp,strip=FALSE)$pca.p pls.plot.wrap(tmp,strip=FALSE)$pls.p lda.plot.wrap(tmp,strip=FALSE)$lda.p data(abr1) cls <- factor(abr1$fact$class) dat <- preproc(abr1$pos, method="log") ## pair-wise data set dat.pw <- dat.sel(dat, cls,choices=c("2","3","4")) ## add mult-class idx <- grep("2|3|4",cls) cls.234 <- factor(cls[idx]) dat.234 <- dat[idx,,drop = FALSE] ## combine all dat.tmp <- c(dat.pw, "2~3~4"=list(list(dat=dat.234,cls=cls.234)), all=list(list(dat=dat, cls=cls))) ## PCA ph <- pca.plot.wrap(dat.tmp, title="abr1", par.strip.text = list(cex=0.75), scales=list(cex =.75,relation="free"), ep=2) ## See function grpplot for usage of ep. ph[[1]] ph[[2]] ##PLSDA ph <- pls.plot.wrap(dat.tmp, title="abr1", par.strip.text = list(cex=0.75), scales=list(cex =.75,relation="free"), ep=2) ph[[1]] ph[[2]] ## PCADA ph <- lda.plot.wrap(dat.tmp, title="abr1", par.strip.text = list(cex=0.75), scales=list(cex =.75,relation="free")) ph[[1]] ph[[2]] } \keyword{plot} <file_sep>/R/mt_osc.R #' ======================================================================= #' Orthogonal signal correction (OSC) for data pre-processing #' History: #' wll-04-06-2007: commence #' wll-12-06-2007: minor changes osc.default <- function(x, y, method = "wold", center = TRUE, osc.ncomp = 4, pls.ncomp = 10, tol = 1e-3, iter = 20, ...) { #' arguments validity checking if (missing(x) || missing(y)) { stop("data set or class are missing") } if (nrow(x) != length(y)) stop("x and y don't match.") if (length(unique(y)) < 2) { stop("Classification needs at least two classes.") } if (any(is.na(x)) || any(is.na(y))) { stop("NA is not permitted in data set or class labels.") } method <- match.arg(method, c("wold", "sjoblom", "wise")) #' initialisation x <- as.matrix(x) y <- as.factor(y) n <- nrow(x) p <- ncol(x) if (pls.ncomp < 1 || pls.ncomp > min(n - 1, p)) { pls.ncomp <- min(n - 1, p) #' stop("Invalid number of components, ncomp") } #' Select OSC algorithm: oscFunc <- switch(method, wold = osc_wold, sjoblom = osc_sjoblom, wise = osc_wise ) #' call OSC algorithm res <- oscFunc(x, y, center = center, osc.ncomp = osc.ncomp, pls.ncomp = pls.ncomp, tol = tol, iter = iter, ... ) #' Build and return the object: res$call <- match.call() res$call[[1]] <- as.name("osc") res$center <- center res$osc.ncomp <- osc.ncomp res$pls.ncomp <- pls.ncomp res$method <- method class(res) <- "osc" return(res) } #' ======================================================================== #' wll-04-06-2007: predict method for OSC predict.osc <- function(object, newdata, ...) { #' if(!inherits(object, "osc")) stop("object not of class \"osc\"") if (missing(newdata)) { return(object$x) } if (is.null(dim(newdata))) { dim(newdata) <- c(1, length(newdata)) } #' a row vector newdata <- as.matrix(newdata) if (ncol(newdata) != ncol(object$x)) stop("wrong number of variables") if (object$center) { newdata <- sweep(newdata, 2, colMeans(newdata), "-") } #' column-wise center if (F) { #' <NAME>, On OSC, Chmom. Intell. Lab. Syst. 50(2000):47-52 t <- newdata %*% object$w p <- t(newdata) %*% t %*% ginv(t(t) %*% t) x <- newdata - t %*% t(p) } else { x <- newdata - newdata %*% object$w %*% t(object$p) } #' calculate the removed variance of X Q2 <- sum(x^2) / sum(newdata^2) * 100 return(list(x = x, Q2 = Q2)) } #' ======================================================================== #' wll-04-06-2007: print method for osc print.osc <- function(x, ...) { alg <- switch(x$method, wold = "Wold et al approach", sjoblom = "Sjoblom et al approach", wise = "Wise and Gallagher approach", stop("Unknown approach.") ) cat("Orthogonal signal correction (OSC), fitted with the", alg, ".") cat("\nCall:\n", deparse(x$call), "\n") cat("\nR2 (percentage):", x$R2) cat("\nAngle (degree):\t", x$angle) cat("\n") invisible(x) } #' ======================================================================== #' wll-04-06-2007: summary method for osc summary.osc <- function(object, ...) { structure(object, class = "summary.osc") } #' ======================================================================= #' wll-04-06-2007: summary method for osc print.summary.osc <- function(x, ...) { print.osc(x) cat("\nNumber of OSC components:\t", x$osc.ncomp) cat("\nNumber of PLS components:\t", x$pls.ncomp) cat("\nData dimension:\t\t\t", dim(x$x)) cat("\nWeight dimension:\t\t", dim(x$w)) cat("\nLoading dimension:\t\t", dim(x$p)) cat("\nScore dimension:\t\t", dim(x$t)) cat("\n") } #' ======================================================================= osc <- function(x, ...) UseMethod("osc") #' ======================================================================= osc.formula <- function(formula, data = NULL, ..., subset, na.action = na.omit) { call <- match.call() if (!inherits(formula, "formula")) { stop("method is only for formula objects") } m <- match.call(expand.dots = FALSE) if (identical(class(eval.parent(m$data)), "matrix")) { m$data <- as.data.frame(eval.parent(m$data)) } m$... <- NULL m[[1]] <- as.name("model.frame") m$na.action <- na.action m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 x <- model.matrix(Terms, m) y <- model.extract(m, "response") attr(x, "na.action") <- attr(y, "na.action") <- attr(m, "na.action") ret <- osc.default(x, y, ..., na.action = na.action) ret$call <- call ret$call[[1]] <- as.name("osc") ret$terms <- Terms if (!is.null(attr(m, "na.action"))) { ret$na.action <- attr(m, "na.action") } class(ret) <- c("osc.formula", class(ret)) return(ret) } #' ======================================================================== #' wll-04-06-2007: Wold algorithm for OSC osc_wold <- function(x, y, center = TRUE, osc.ncomp = 4, pls.ncomp = 10, tol = 1e-3, iter = 20, ...) { if (center) x <- sweep(x, 2, colMeans(x), "-") #' column-wise centre x.ori <- x y <- class.ind(y) #' convert class labels to numeric values np <- nw <- nt <- list() for (i in 1:osc.ncomp) { pc <- prcomp(x, scale = F) t <- pc$x[, 1] #' PC1 as initial score dif <- 1 k <- 0 while (dif > tol & k < iter) { k <- k + 1 #' Orthogonalize t to y tnew <- t - (y %*% ginv(t(y) %*% y) %*% t(y) %*% t) #' calculate weight vector using PLS pls <- simpls.fit(x, tnew, ncomp = pls.ncomp, ...) w <- pls$coefficients[, , ncomp = pls.ncomp, drop = FALSE] w <- w / sqrt(sum(w^2)) tnew <- x %*% w #' Check for convergence dif <- sqrt(sum((tnew - t)^2) / sum(tnew^2)) t <- tnew } p <- t(x) %*% t %*% ginv(t(t) %*% t) x <- x - t %*% t(p) nw[[i]] <- w np[[i]] <- p nt[[i]] <- t } nw <- do.call(cbind, nw) np <- do.call(cbind, np) nt <- do.call(cbind, nt) #' OSC-correct the original data set x <- x.ori - x.ori %*% nw %*% t(np) #' Calculate the fraction of the variation in X. (also called the removed #' /remained variance of X) R2 <- sum(x^2) / sum(x.ori^2) * 100 #' R2 <- var(as.vector(x^2))/var(as.vector(x.ori^2)) #' Calculate angle which assesses that t vector is orthogonal to y. angle <- t(nt) %*% y norm <- ginv(sqrt(apply(nt^2, 2, sum) * sum(y^2))) angle <- t(angle) %*% t(norm) angle <- mean(acos(angle) * 180 / pi) res <- list(x = x, R2 = R2, angle = angle, w = nw, p = np, t = nt, center = center) return(res) } #' ======================================================================== #' wll-03-06-2007: Sjoblom algorithm #' wll-03-06-2007: Fix a algorithm mis-understanding in updating weights. #' wll-03-06-2007: Orthogonalize t to y in the last step. This improve the #' performance of OSC. osc_sjoblom <- function(x, y, center = TRUE, osc.ncomp = 4, pls.ncomp = 10, tol = 1e-3, iter = 20, ...) { if (center) x <- sweep(x, 2, colMeans(x), "-") #' column-wise centre x.ori <- x y <- class.ind(y) #' convert class labels to numeric values np <- nw <- nt <- list() for (i in 1:osc.ncomp) { pc <- prcomp(x, scale = F) t <- pc$x[, 1] #' PC1 as initial score dif <- 1 k <- 0 while (dif > tol & k < iter) { k <- k + 1 #' Orthogonalize t to y (by tnew = t - y*inv(y'*y)*y'*t). tnew <- t - (y %*% ginv(t(y) %*% y) %*% t(y) %*% t) #' Update weights and scores w <- t(x) %*% tnew %*% ginv(t(tnew) %*% tnew) #' w <- t(x) %*% tnew #' NOTE: not this one! w <- w / sqrt(sum(w^2)) tnew <- x %*% w #' Check for convergence dif <- sqrt(sum((tnew - t)^2) / sum(tnew^2)) t <- tnew } #' fit PLS model pls <- simpls.fit(x, tnew, ncomp = pls.ncomp, ...) #' extract the coefficients as weights in OSC w <- pls$coefficients[, , ncomp = pls.ncomp, drop = FALSE] #' Update scores, loads and corrected data t <- x %*% w #' Orthogonalize t to y t <- t - y %*% ginv(t(y) %*% y) %*% t(y) %*% t p <- t(x) %*% t %*% ginv(t(t) %*% t) x <- x - t %*% t(p) nw[[i]] <- w np[[i]] <- p nt[[i]] <- t } nw <- do.call(cbind, nw) np <- do.call(cbind, np) nt <- do.call(cbind, nt) #' OSC-correct the original data set x <- x.ori - x.ori %*% nw %*% t(np) #' Calculate the fraction of the variation in X. (also called the removed #' /remained variance of X) R2 <- sum(x^2) / sum(x.ori^2) * 100 #' R2 <- var(as.vector(x^2))/var(as.vector(x.ori^2)) #' Calculate angle which assesses that t vector is orthogonal to y. angle <- t(nt) %*% y norm <- ginv(sqrt(apply(nt^2, 2, sum) * sum(y^2))) angle <- t(angle) %*% t(norm) angle <- mean(acos(angle) * 180 / pi) res <- list( x = x, R2 = R2, angle = angle, w = nw, p = np, t = nt, center = center ) return(res) } #' ======================================================================== #' wll-03-06-2007: Wise algorithm osc_wise <- function(x, y, center = TRUE, osc.ncomp = 4, pls.ncomp = 10, tol = 1e-3, iter = 20, ...) { if (center) x <- sweep(x, 2, colMeans(x), "-") x.ori <- x y <- class.ind(y) np <- nw <- nt <- list() for (i in 1:osc.ncomp) { pc <- prcomp(x, scale = F) told <- pc$x[, 1] #' initial score p <- pc$rotation[, 1] #' initial loadings dif <- 1 k <- 0 while (dif > 1e-5 & k < iter) { k <- k + 1 #' Calculate scores from loads (by t = x*p/(p'*p) ). t <- (x %*% p) %*% solve(t(p) %*% p) #' Othogonalize t to y (by tnew = t - y*inv(y'*y)*y'*t). tnew <- t - (y %*% solve(t(y) %*% y) %*% t(y) %*% t) #' Compute a new loading (by pnew = x'*tnew/(tnew'*tnew) ). pnew <- (t(x) %*% tnew) %*% solve(t(tnew) %*% tnew) #' Check for convergence dif <- sqrt(sum((tnew - told)^2) / sum(tnew^2)) told <- tnew p <- pnew } #' fit PLS model nc <- min(pls.ncomp, qr(x, tol = 1e-9)$rank) pls <- simpls.fit(x, tnew, ncomp = nc, ...) w <- pls$coefficients[, , ncomp = nc, drop = FALSE] w <- w / sqrt(sum(w^2)) #' pls <- plsr(tnew~x,ncomp=ncomp,method="simpls") #' w <- coef.mvr(pls,ncomp=nc) #' Calculate new scores vector t <- x %*% w #' Othogonalize t to y t <- t - y %*% ginv(t(y) %*% y) %*% t(y) %*% t #' Compute new p p <- t(x) %*% t %*% ginv(t(t) %*% t) #' Remove orthogonal signal from x x <- x - t %*% t(p) nw[[i]] <- w np[[i]] <- p nt[[i]] <- t } nw <- do.call(cbind, nw) np <- do.call(cbind, np) nt <- do.call(cbind, nt) #' OSC-correct the original data set x <- x.ori - x.ori %*% nw %*% t(np) #' Calculate the fraction of the variation in X. (also called the removed #' /remained variance of X) R2 <- sum(x^2) / sum(x.ori^2) * 100 #' R2 <- var(as.vector(x^2))/var(as.vector(x.ori^2)) #' Calculate angle which assesses that t vector is orthogonal to y. angle <- t(nt) %*% y norm <- ginv(sqrt(apply(nt^2, 2, sum) * sum(y^2))) angle <- t(angle) %*% t(norm) angle <- mean(acos(angle) * 180 / pi) res <- list( x = x, R2 = R2, angle = angle, w = nw, p = np, t = nt, center = center ) return(res) } #' 1) osc.default #' 2) predict.osc #' 3) print.osc #' 4) summary.osc #' 5) print.summary.osc #' 6) osc #' 7) osc.formula #' 8) osc_wold #' 9) osc_sjoblom #' 10) osc_wise <file_sep>/man/get.fs.len.Rd % lwc-01-11-2006 \name{get.fs.len} \alias{get.fs.len} \title{ Get Length of Feature Subset for Validation } \description{ Get feature lengths for feature selection validation by classification. } \usage{ get.fs.len(p,fs.len=c("power2")) } % -------------------------------------------------------------------- \arguments{ \item{p}{ Number of features in the data set. } \item{fs.len}{ Method or numeric sequence for feature lengths. It can be a numeric vector as user-defined feature lengths, or methods: \itemize{ \item \code{full}. The feature lengths are \code{p, \dots, 2, 1}. This is an exhaustive method. If \code{p} is too large, it will consume a lot of time and hence it is not practical. \item \code{half}. The feature lengths are the sequence of \code{p, p/2, p/2/2, \dots, 1}. \item \code{power2}. The feature lengths are the sequence of \code{p, 2^(log2(p)-1), \dots, 2^1, 2^0}. } } } % ---------------------------------------------------------------------------- \value{ An descending order numeric vector of feature lengths. } \details{ The generation of feature length is used in the validation of feature subsets by classification. The feature length decide the lengths of feature subset starting from top of the full feature order list. } \note{ The last length of feature returned is always \code{p}. } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{fs.rfe}}, \code{\link{frank.err}}, \code{\link{frankvali}} } % ---------------------------------------------------------------------- \examples{ data(abr1) dat <- abr1$pos ## number of featres p <- ncol(dat) ## predefined feature lengths. The returned will be descending order ## vector with the first one is p. (vec <- get.fs.len(p, fs.len=c(1,2,3,4,5,6,7,8,9,10))) ## use all features as feature lengths (vec.full <- get.fs.len(p, fs.len="full")) ## use "half" (vec.half <- get.fs.len(p, fs.len="half")) ## use "power2" (vec.power2 <- get.fs.len(p, fs.len="power2")) } \keyword{manip} <file_sep>/man/tune.func.Rd % wll-03-10-2007: add plsc stuff % \name{tune.func} \alias{tune.func} \alias{tune.plsc} \alias{tune.plslda} \alias{tune.pcalda} \title{Functions for Tuning Appropriate Number of Components} \description{ Tune appropriate number of components (\code{ncomp}) for \code{plsc}, \code{plslda} or \code{pcalda}. } \usage{ tune.plsc(x,y, pls="simpls",ncomp=10, tune.pars,\dots) tune.plslda(x,y, pls="simpls",ncomp=10, tune.pars,\dots) tune.pcalda(x,y, ncomp=NULL, tune.pars,\dots) } \arguments{ \item{x}{ A matrix or data frame containing the explanatory variables if no formula is given as the principal argument. } \item{y}{ A factor specifying the class for each observation if no formula principal argument is given. } \item{pls}{ A method for calculating PLS scores and loadings. The following methods are supported: \itemize{ \item \code{simpls:} SIMPLS algorithm. \item \code{kernelpls:} kernel algorithm. \item \code{oscorespls:} orthogonal scores algorithm. } For details, see \code{\link[pls]{simpls.fit}}, \code{\link[pls]{kernelpls.fit}} and \code{\link[pls]{oscorespls.fit}} in package \pkg{pls}. } \item{ncomp}{ The number of components to be used in the classification. } \item{tune.pars}{ A list of parameters using by the resampling method. See \code{\link{valipars}} for details. } \item{\dots}{Further parameters passed to \code{tune}.} } \value{ A list including: \item{ncomp}{The best number of components.} \item{acc.tune}{Accuracy rate of components.} } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{plsc}}, \code{\link{plslda}}, \code{\link{pcalda}},\code{\link{valipars}} } % ---------------------------------------------------------------------------- \examples{ \dontrun{ data(abr1) cl <- factor(abr1$fact$class) dat <- preproc(abr1$pos , y=cl, method=c("log10"),add=1)[,110:500] ## divide data as training and test data idx <- sample(1:nrow(dat), round((2/3)*nrow(dat)), replace=FALSE) ## construct train and test data train.dat <- dat[idx,] train.t <- cl[idx] test.dat <- dat[-idx,] test.t <- cl[-idx] ## tune the best number of components ncomp.plsc <- tune.plsc(dat,cl, pls="simpls",ncomp=20) ncomp.plslda <- tune.plslda(dat,cl, pls="simpls",ncomp=20) ncomp.pcalda <- tune.pcalda(dat,cl, ncomp=60) ## model fit (z.plsc <- plsc(train.dat,train.t, ncomp=ncomp.plsc$ncomp)) (z.plslda <- plslda(train.dat,train.t, ncomp=ncomp.plslda$ncomp)) (z.pcalda <- pcalda(train.dat,train.t, ncomp=ncomp.pcalda$ncomp)) ## or indirect use tune function in model fit z.plsc <- plsc(train.dat,train.t, ncomp=20, tune=TRUE) z.plslda <- plslda(train.dat,train.t, ncomp=20, tune=TRUE) z.pcalda <- pcalda(train.dat,train.t, ncomp=60, tune=TRUE) ## predict test data pred.plsc <- predict(z.plsc, test.dat)$class pred.plslda <- predict(z.plslda, test.dat)$class pred.pcalda <- predict(z.pcalda, test.dat)$class ## classification rate and confusion matrix cl.rate(test.t, pred.plsc) cl.rate(test.t, pred.plslda) cl.rate(test.t, pred.pcalda) } } \keyword{models} <file_sep>/man/predict.plsc.Rd % wll-23-05-2007: % wll-03-10-2007: add plsc stuff % \name{predict.plsc} \alias{predict.plsc} \alias{predict.plslda} \title{ Predict Method for Class 'plsc' or 'plslda' } \description{ Prediction of test data using \code{plsc} or \code{plslda}. } \usage{ \method{predict}{plsc}(object, newdata,\dots) \method{predict}{plslda}(object, newdata,\dots) } % ---------------------------------------------------------------------------- \arguments{ \item{object}{ Object of class \code{plsc} or \code{plslda}. } \item{newdata}{ A matrix or data frame of cases to be classified. } \item{\dots}{ Arguments based from or to other methods. } } % ---------------------------------------------------------------------------- \details{ Two functions are methods for the generic function \code{predict()} for class \code{plsc} or \code{plslda}. If \code{newdata} is omitted, the results of training data in \code{plsc} or \code{plslda} object will be returned. } % ---------------------------------------------------------------------------- \value{ A list with components: \item{class}{ The predicted class (a factor). } \item{posterior}{ The posterior probabilities for the predicted classes. } \item{x}{ The rotated test data by the projection matrix of PLS. } } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------------- \seealso{ \code{\link{plsc}}, \code{\link{plot.plsc}},\code{\link{plslda}}, \code{\link{plot.plslda}} } % ---------------------------------------------------------------------------- \examples{ data(iris3) tr <- sample(1:50, 25) train <- rbind(iris3[tr,,1], iris3[tr,,2], iris3[tr,,3]) test <- rbind(iris3[-tr,,1], iris3[-tr,,2], iris3[-tr,,3]) cl <- factor(c(rep("s",25), rep("c",25), rep("v",25))) ## model fit using plsc and plslda without tuning of ncomp (z.plsc <- plsc(train, cl)) (z.plslda <- plslda(train, cl)) ## predict for test data pred.plsc <- predict(z.plsc, test) pred.plslda <- predict(z.plslda, test) ## plot the projected test data. grpplot(pred.plsc$x, pred.plsc$class, main="PLSC: Iris") grpplot(pred.plslda$x, pred.plslda$class, main="PLSLDA: Iris") } \keyword{classif} <file_sep>/man/feat.freq.Rd %% lwc-15-03-2007: %% wll-04-12-2015:tidy up \name{feat.freq} \alias{feat.freq} \title{Frequency and Stability of Feature Selection } \description{Frequency and stability of feature selection. } \usage{ feat.freq(x,rank.cutoff=50,freq.cutoff=0.5) } % -------------------------------------------------------------------- \arguments{ \item{x}{ A matrix or data frame of feature orders. } \item{rank.cutoff}{A numeric value for cutoff of top features.} \item{freq.cutoff}{A numeric value for cutoff of feature frequency.} } % ------------------------------------------------------------------------- \value{ A list with components: \item{freq.all}{Feature frequencies. } \item{freq}{Feature frequencies larger than \code{freq.cutoff}. } \item{stability}{Stability rate of feature ranking.} \item{rank.cutoff}{ Top feature order cut-off used. } \item{freq.cutoff}{ Feature frequency cut-off used. } } \references{ <NAME>., et al., (2006) Reliable gene signatures for microarray classification: assessment of stability and performance. \emph{Bioinformatics}, vol.22, no.19, 2356 - 2363. <NAME>., et al., (2005) Prediction of cancer outcome with microarrays: a multiple random validation strategy. \emph{Lancet}, vol.365, 488 - 492. } \author{ <NAME> } \seealso{ \code{\link{feat.rank.re}} } % ---------------------------------------------------------------------- \examples{ ## prepare data set data(abr1) cls <- factor(abr1$fact$class) dat <- abr1$pos ## dat <- abr1$pos[,110:1930] ## fill zeros with NAs dat <- mv.zene(dat) ## missing values summary mv <- mv.stats(dat, grp=cls) mv ## View the missing value pattern ## filter missing value variables ## dim(dat) dat <- dat[,mv$mv.var < 0.15] ## dim(dat) ## fill NAs with mean dat <- mv.fill(dat,method="mean") ## log transformation dat <- preproc(dat, method="log10") ## select class "1" and "2" for feature ranking ind <- grepl("1|2", cls) mat <- dat[ind,,drop=FALSE] mat <- as.matrix(mat) grp <- cls[ind, drop=TRUE] ## use resampling method of bootstrap pars <- valipars(sampling="boot",niter=10, nreps=5) z <- feat.rank.re(mat,grp,method="fs.plsvip",pars = pars) ## compute the frequency and stability of feature selection freq <- feat.freq(z$order.list,rank.cutoff=50,freq.cutoff=0.5) } \keyword{classif} <file_sep>/man/stats.util.Rd %% lwc-09-01-2014: commence %% wll-01-12-2015: add an argument for 'fc' \name{stats.util} \alias{stats.mat} \alias{stats.vec} \title{ Statistical Summary Utilities for Two-Classes Data } \description{ Functions to summarise two-group data. } \usage{ stats.vec(x,y, method="mean",test.method = "wilcox.test",fc=TRUE,\dots) stats.mat(x,y, method="mean",test.method = "wilcox.test", padj.method= "fdr",fc=TRUE,\dots) } % -------------------------------------------------------------------- \arguments{ \item{x}{ A data frame or matrix of data set for \code{stats.mat} or data vector for \code{stats.vec}. } \item{y}{ A factor or vector of class. Two classes only. } \item{method}{method for group center such as \code{\link{mean}} or \code{\link{median}}. } \item{test.method}{method for p-values from parametric test such as \code{\link{t.test}} or non-parametric test such as \code{\link{wilcox.test}}. } \item{padj.method}{method for p-values correction. Can be one in \code{\link{p.adjust.methods}}: "holm", "hochberg", "hommel", "bonferroni", "BH", "BY", "fdr" and "none". } \item{fc}{a flag for fold-change.} \item{\dots}{ Additional parameters. } } % ---------------------------------------------------------------------- \value{ \code{stats.vec} returns an vector of center, group center, fold change, log2 fold change, AUC and p-value. \code{stats.mat}, which is an wrapper function of \code{stats.vec}, returns an data frame of center, group center, fold change, log2 fold-change, AUC, p-value and adjusted p-values. } \note{ If \code{x} has negative values, the results of fold-change and log2 fold-change are not reliable. } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------- \examples{ data(iris) sel <- c("setosa", "versicolor") grp <- iris[,5] idx <- match(iris[,5],sel,nomatch = 0) > 0 cls <- factor(iris[idx,5]) dat <- iris[idx,1:4] ## stats for the single vector stats.vec(dat[[1]],cls, method = "median",test.method = "wilcox.test") ## use matrix format tab <- stats.mat(dat,cls, method = "median",test.method = "wilcox.test", padj.method = "BH") sapply(tab, class) } \keyword{util} <file_sep>/man/df.util.Rd % lwc-11-10-2011: commence \name{df.util} \alias{df.summ} \alias{vec.summ} \alias{vec.summ.1} \title{ Summary Utilities } \description{ Functions to summarise data. } \usage{ df.summ(dat, method=vec.summ,\dots) vec.summ(x) vec.summ.1(x) } % -------------------------------------------------------------------- \arguments{ \item{dat}{ A data frame or matrix of data set. } \item{x}{ A vector value. } \item{method}{Summary method such as \code{vec.summ} and \code{vec.summ.1}. For user-defined methods, see examples below. } \item{\dots}{ Additional parameters to \code{method} function. } } % ---------------------------------------------------------------------- \value{ \code{df.summ} returns a summarised data frame. \code{vec.summ} returns an vector of number of variables (exclusing NAs), minimum, mean, median, maximum and standard derivation. \code{vec.summ.1} returns an vector of number of variables (exclusing NAs), mean, median, 95\% confidence interval of median, IQR and standard derivation. } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------- \examples{ data(abr1) dat <- (abr1$pos)[,110:150] cls <- factor(abr1$fact$class) ## sort out missing value dat <- mv.zene(dat) ## summary of an individual column vec.summ(dat[,2]) vec.summ.1(dat[,2]) ## summary of data frame summ <- df.summ(dat) ## default: vec.summ summ.1 <- df.summ(dat, method=vec.summ.1) ## summary by groups by(dat, list(cls=cls), df.summ) ## User-defined summary function: vec.segment <- function(x, bar=c("SD", "SE", "CI")) { bar <- match.arg(bar) centre <- mean(x, na.rm = TRUE) if (bar == "SD") { stderr <- sd(x, na.rm = TRUE) ## Standard derivation (SD) lower <- centre - stderr upper <- centre + stderr } else if (bar == "SE") { ## Standard error(SE) of mean stderr <- sd(x, na.rm = TRUE)/sqrt(sum(!is.na(x))) ## stderr <- sqrt(var(x, na.rm = TRUE)/length(x[complete.cases(x)])) lower <- centre - stderr upper <- centre + stderr } else if (bar == "CI") { ## Confidence interval (CI), here 95%. conf <- t.test(x)$conf.int lower <- conf[1] upper <- conf[2] } else { stop("'method' invalid") } res <- c(lower=lower, centre=centre, upper=upper) return(res) } ## test it vec.segment(dat[,2]) summ.2 <- df.summ(dat, method=vec.segment, bar="SE") ## ---------------------------------------------------------- #' iris data df.summ(iris) #' Group summary ## library(plyr) ## ddply(iris, .(Species), df.summ) ## (tmp <- dlply(iris, .(Species), df.summ, method=vec.segment)) ##do.call("rbind", tmp) #' or you can use summarise to get the group summary for single variable: ## ddply(iris, .(Species), summarise, ## mean=mean(Sepal.Length), std=sd(Sepal.Length)) } \keyword{util} <file_sep>/man/cor.util.Rd % lwc-16-02-2010: commence % lwc-02-09-2010: add corrgram % lwc-14-09-2010: add cor.heat.gram. \name{cor.util} \alias{cor.cut} \alias{cor.hcl} \alias{cor.heat} \alias{corrgram.circle} \alias{corrgram.ellipse} \alias{cor.heat.gram} \alias{hm.cols} \title{ Correlation Analysis Utilities } \description{ Functions to handle correlation analysis on data set. } \usage{ cor.cut(mat,cutoff=0.75,abs.f = FALSE, use = "pairwise.complete.obs", method = "pearson",\dots) cor.hcl(mat, cutoff=0.75, use = "pairwise.complete.obs", method = "pearson",fig.f=TRUE, hang=-1, horiz = FALSE, main = "Cluster Dendrogram", ylab = ifelse(!horiz, "1 - correlation",""), xlab = ifelse(horiz, "1 - correlation",""),\dots) cor.heat(mat, use = "pairwise.complete.obs", method = "pearson", dend = c("right", "top", "none"),\dots) corrgram.circle(co, col.regions = colorRampPalette(c("red", "white", "blue")), scales = list(x = list(rot = 90)), \dots) corrgram.ellipse(co,label=FALSE, col.regions = colorRampPalette(c("red", "white", "blue")), scales = list(x = list(rot = 90)), \dots) cor.heat.gram(mat.1, mat.2, use = "pairwise.complete.obs", method = "pearson", main="Heatmap of correlation", cex=0.75, \dots) hm.cols(low = "green", high = "red", n = 123) } % -------------------------------------------------------------------- \arguments{ \item{mat, mat.1, mat.2}{ A data frame or matrix. It should be noticed that \code{mat.1} and \code{mat.2} must have the same number of row. } \item{cutoff}{A scalar value of threshold.} \item{abs.f}{ Logical flag indicating whether the absolute values should be used. } \item{fig.f}{ Logical flag indicating whether the dendrogram of correlation matrix should be plotted. } \item{hang}{The fraction of the plot height by which labels should hang below the rest of the plot. A negative value will cause the labels to hang down from 0. See \code{\link{plot.hclust}}.} \item{horiz}{Logical indicating if the dendrogram should be drawn \emph{horizontally} or not.} \item{main, xlab, ylab}{Graphical parameters, see \code{\link{plot.default}}.} \item{dend}{Character string indicating whether to draw 'right', 'top' or 'none' dendrograms}. \item{use}{Argument for \code{\link{cor}}. An optional character string giving a method for computing covariances in the presence of missing values. This must be (an abbreviation of) one of the strings \code{"everything"}, \code{"all.obs"}, \code{"complete.obs"}, \code{"na.or.complete"}, or \code{"pairwise.complete.obs"}.} \item{method}{Argument for \code{\link{cor}}. A character string indicating which correlation coefficient (or covariance) is to be computed. One of \code{"pearson"}, \code{"kendall"}, or \code{"spearman"}, can be abbreviated.} \item{co}{Correlation matrix} \item{label}{ A logical value indicating whether the correlation coefficient should be plotted. } \item{\dots}{ Additional parameters to \pkg{lattice}. } \item{col.regions}{Color vector to be used} \item{scales}{A list determining how the x- and y-axes (tick marks and labels) are drawn. More details, see \code{\link{xyplot}}. } \item{cex}{A numeric multiplier to control character sizes for axis labels}. \item{low}{ Colour for low value} \item{high}{ Colour for high value} \item{n}{The number of colors (>= 1) to be in the palette} } % ---------------------------------------------------------------------- \value{ \code{cor.cut} returns a data frame with three columns, in which the first and second columns are variable names and their correlation (lager than cutoff) are given in the third column. \code{cor.hcl} returns a list with components of each cluster group and all correlation coefficients. \code{cor.heat} returns an object of class "trellis". \code{corrgram.circle} returns an object of class "trellis". \code{corrgram.ellipse} returns an object of class "trellis". \code{cor.heat.gram} returns a list including the components: \itemize{ \item \code{cor.heat}: An object of class "trellis" for correlation heatmap ordered by the hierarchical clustering. \item \code{cor.gram}: An object of class "trellis" for corrgrams with circle ordered by the hierarchical clustering. \item \code{cor.short}: A matrix of correlation coefficient in short format. \item \code{cor.long}: A matrix of correlation coefficient in long format. } } \details{ \code{cor.cut} returns the pairs with correlation coefficient larger than \code{cutoff}. \code{cor.hcl} computes hierarchical cluster analysis based on correlation coefficient. For other graphical parameters, see \code{\link{plot.dendrogram}}. \code{cor.heat} display correlation heatmap using \pkg{lattice}. \code{corrgram.circle} and \code{corrgram.ellipse} display corrgrams with circle and ellipse. The functions are modified from codes given in Deepayan Sarkar's \code{Lattice: Multivariate Data Visualization with R, 13.3.3 Corrgrams as customized level plots, pp:238-241}. \code{cor.heat.gram} handles the correlation of two data sets which have the same row number. The best application is correlation between MS data (metabolites) and meta/clinical data. \code{hm.cols} creates a vector of n contiguous colors for heat map. } % ---------------------------------------------------------------------------- \references{ <NAME> (2002). \emph{Corrgrams: Exploratory displays for correlation matrices}. The American Statistician, 56, 316--324. <NAME>, <NAME> (1996). \emph{A graphical display of large correlation matrices}. The American Statistician, 50, 178--180. <NAME> (2008). \emph{Lattice: Multivariate Data Visualization with R}. Springer. } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------- \examples{ data(iris) cor.cut(iris[,1:4],cutoff=0.8, use="pairwise.complete.obs") cor.hcl(iris[,1:4],cutoff=0.75,fig.f = TRUE) ph <- cor.heat(iris[,1:4], dend="top") ph update(ph, scales = list(x = list(rot = 45))) ## change heatmap color scheme cor.heat(iris[,1:4], dend="right", xlab="", ylab="", col.regions = colorRampPalette(c("green", "black", "red"))) ## or use hm.cols cor.heat(iris[,1:4], dend="right", xlab="", ylab="", col.regions = hm.cols()) ## prepare data set data(abr1) cls <- factor(abr1$fact$class) dat <- preproc(abr1$pos[,110:1930], method="log10") ## feature selection res <- fs.rf(dat,cls) ## take top 20 features fs <- res$fs.order[1:20] ## construct the data set for correlation analysis mat <- dat[,fs] cor.cut(mat,cutoff=0.9) ch <- cor.hcl(mat,cutoff=0.75,fig.f = TRUE, xlab="Peaks") ## plot dendrogram horizontally with coloured labels. ch <- cor.hcl(mat,cutoff=0.75,fig.f = TRUE, horiz=TRUE,center=TRUE, nodePar = list(lab.cex = 0.6, lab.col = "forest green", pch = NA), xlim=c(2,0)) names(ch) cor.heat(mat,dend="right") cor.heat(mat,dend="right",col.regions = colorRampPalette(c("yellow", "red"))) ## use corrgram with order by the hierarchical clustering co <- cor(mat, use="pairwise.complete.obs") ord <- order.dendrogram(as.dendrogram(hclust(as.dist(1-co)))) corrgram.circle(co[ord,ord], main="Corrgrams with circle") corrgram.ellipse(co[ord,ord], label = TRUE, main = "Corrgrams with circle", col.regions = hm.cols()) ## if without ordering corrgram.circle(co, main="Corrgrams with circle") ## example of cor.heat.gram fs.1 <- res$fs.order[21:50] mat.1 <- dat[,fs.1] res.cor <- cor.heat.gram(mat, mat.1, main="Heatmap of correlation between mat.1 and mat.2") names(res.cor) res.cor$cor.heat res.cor$cor.gram } \keyword{util} <file_sep>/man/plot.plsc.Rd % wll-23-05-2007: commence % wll-03-10-2007: add plsc stuff % wll-13-12-2007: major changes % \name{plot.plsc} \alias{plot.plsc} \alias{plot.plslda} \title{ Plot Method for Class 'plsc' or 'plslda' } \description{ Plot latent components of \code{plsc} or \code{plslda}. } \usage{ \method{plot}{plsc}(x, dimen, \dots) \method{plot}{plslda}(x, dimen, \dots) } % ---------------------------------------------------------------------------- \arguments{ \item{x}{ An object of class \code{plsc} or \code{plslda}. } \item{dimen}{ The index of latent components to be used for the plot. } \item{\dots}{ Further arguments. See corresponding entry in \code{\link{xyplot}} for non-trivial details. One argument is \code{ep}: an integer for plotting ellipse. \code{1} and \code{2} for plotting overall and group ellipse, respectively. Otherwise, none. For details, see \code{\link{panel.elli.1}}. } } % ---------------------------------------------------------------------------- \details{ Two functions are methods for the generic function \code{plot()} of class \code{plsc} and \code{plslda}. If the length of \code{dimen} is greater than 2, a pairs plot is used. If the length of \code{dimen} is equal to 2, a scatter plot is drawn. Otherwise, the dot plot is drawn for the single component. } \value{An object of class \code{"trellis"}.} % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{plsc}}, \code{\link{predict.plsc}},\code{\link{plslda}}, \code{\link{predict.plslda}}, \code{\link{pls.plot.wrap}}, \code{\link{panel.elli.1}}. } \examples{ data(abr1) cl <- factor(abr1$fact$class) dat <- abr1$pos mod.plsc <- plsc(dat,cl,ncomp=4) mod.plslda <- plslda(dat,cl,ncomp=4) ## Second component versus first plot(mod.plsc,dimen=c(1,2),main = "Training data", ep = 2) plot(mod.plslda,dimen=c(1,2),main = "Training data", ep = 2) ## Pairwise scatterplots of several components plot(mod.plsc, main = "Training data", ep = 1) plot(mod.plslda, main = "Training data", ep = 1) ## single component plot(mod.plsc,dimen=c(1),main = "Training data") plot(mod.plslda,dimen=c(1),main = "Training data") } \keyword{plot} <file_sep>/man/save.tab.Rd % lwc-15-02-2010: Previous name is my.save.tab % \name{save.tab} \alias{save.tab} \title{ Save List of Data Frame or Matrix into CSV File } \description{ Save a list of data frame or matrix into a CSV file } \usage{ save.tab(x, filename="temp.csv", firstline="\n") } % -------------------------------------------------------------------- \arguments{ \item{x}{ A list of data frame or matrix. } \item{filename}{ A character string for saved file name. } \item{firstline}{ A string giving some description of the saved file. } } \details{ This function gives a quick option to save a set of data frame or matrix into a single table file. } \value{ No return value, called for side effects. } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{write.table}} } % ---------------------------------------------------------------------- \examples{ \dontrun{ data(abr1) dat <- preproc(abr1$pos[,200:400], method="log10") cls <- factor(abr1$fact$class) tmp <- dat.sel(dat, cls, choices=c("1","2")) x <- tmp[[1]]$dat y <- tmp[[1]]$cls fs.method <- c("fs.anova","fs.rf","fs.rfe") fs.pars <- valipars(sampling="cv",niter=10,nreps=5) fs <- feat.mfs(x, y, fs.method, fs.pars) ## with resampling names(fs) fs <- fs[1:3] ## save consistency of feature selection filename <- "fs.csv" firstline <- paste('\nResults of feature selection', sep='') save.tab(fs, filename, firstline) } } \keyword{manip} <file_sep>/man/panel.elli.Rd % lwc-14-02-2010: % lwc-15-07-2015: major changes \name{panel.elli} \alias{panel.elli} \alias{panel.outl} \alias{panel.elli.1} \title{ Panel Function for Plotting Ellipse and outlier } \description{ \pkg{lattice} panel functions for plotting grouped ellipse and outlier } \usage{ panel.elli(x, y, groups = NULL,conf.level = 0.975, \dots) panel.elli.1(x, y, subscripts, groups=NULL, conf.level = 0.975, ep=0, com.grp=NULL, no.grp=NULL, ell.grp=NULL, \dots) panel.outl(x, y, subscripts, groups=NULL, conf.level = 0.975, labs, \dots) } \arguments{ \item{x, y}{Variables to be plotted.} \item{conf.level}{Confident level for ellipse} \item{groups, subscripts}{ Internal parameters for Lattice.} \item{labs}{Labels for potential outliers.} \item{ep}{ An integer for plotting ellipse. \code{1} and \code{2} for plotting overall and group ellipse, respectively. Otherwise, none. } \item{com.grp}{A list of characters to select which combination of groups to be plotted.} \item{no.grp}{A list of characters to select which individual group not to be plotted. Note it will be overridden by \code{com.grp}. If no \code{com.grp} and \code{no.grp}, ellipses of each individual group will be plotted.} \item{ell.grp}{Another categorical vector used for plotting ellipse. If provided, \code{ep},\code{com.grp} and \code{no.grp} will be ignored. It should be different from default \code{groups}, but has the same length of \code{groups}. For details, see example below.} \item{\dots}{Further arguments. See corresponding entry in \code{\link{xyplot}} for non-trivial details. } } \value{ Retuns objects of class \code{"trellis"}. } \details{ \code{panel.elli} is modified from function \code{\link{panel.ellipse}} in package \pkg{latticeExtra}. \code{panel.elli.1} gives more control on how to plot ellipse for the current group. It also provides an option to plot ellipse based on another user-defined groups. \code{panel.outl} plots the labels of data points outside the ellipse. These data points can be treated as potential outliers. } \note{ \code{panel.elli.1} can be called by functions \code{grpplot}, \code{pcaplot}, \code{mdsplot}, \code{pca.plot.wrap}, \code{mds.plot.wrap}, \code{pls.plot.wrap} and \code{lda.plot.wrap} by passing argument of \code{ep}. See examples of these function for details. } \author{ <NAME> } % ---------------------------------------------------------------------------- \seealso{ \code{\link{grpplot}}, \code{\link{pcaplot}}, \code{\link{mdsplot}}. } \examples{ library(lattice) data(iris) ## ===================================================================== ## Examples of calling 'panel.elli' and 'panel.outl' xyplot(Sepal.Length ~ Petal.Length, data = iris, groups=Species, par.settings = list(superpose.symbol = list(pch=c(15:17)), superpose.line = list(lwd=2, lty=1:3)), panel = function(x, y, ...) { panel.xyplot(x, y, ...) panel.elli(x, y, ..., type="l",lwd=2) panel.outl(x,y, ...) }, auto.key = list(x = .1, y = .8, corner = c(0, 0)), labs=rownames(iris), conf.level=0.9,adj = -0.5) ## Without groups xyplot(Sepal.Length ~ Petal.Length, data = iris, par.settings = list(plot.symbol = list(cex = 1.1, pch=16)), panel = function(x, y, ...) { panel.xyplot(x, y, ...) panel.elli(x, y, ..., type="l", lwd = 2) panel.outl(x,y, ...) }, auto.key = list(x = .1, y = .8, corner = c(0, 0)), labs=rownames(iris), conf.level=0.9,adj = -0.5) ## With conditioning xyplot(Sepal.Length ~ Petal.Length|Species, data = iris, par.settings = list(plot.symbol = list(cex = 1.1, pch=16)), layout=c(2,2), panel = function(x, y, ...) { panel.xyplot(x, y, ...) panel.elli(x, y, ..., type="l", lwd = 2) panel.outl(x,y, ...) }, auto.key = list(x = .6, y = .8, corner = c(0, 0)), adj = 0,labs=rownames(iris), conf.level=0.95) ## ===================================================================== ## Examples of 'panel.elli.1' xyplot(Sepal.Length ~ Petal.Length, data = iris, groups=Species, ## --------------------------------------------------- ## Select what to be plotted. ep=2, ## com.grp = list(a="setosa",b=c("versicolor", "virginica")), ## no.grp = "setosa", ## Not draw ellipse for "setosa" ## --------------------------------------------------- par.settings = list(superpose.symbol = list(pch=c(15:17)), superpose.line = list(lwd=2, lty=1:3)), panel = function(x, y, ...) { panel.xyplot(x, y, ...) panel.elli.1(x, y, ...) panel.outl(x,y, ...) }, auto.key = list(points = TRUE, rectangles = FALSE, space = "right"), adj = 0,labs=rownames(iris), conf.level=0.95) xyplot(Sepal.Length ~ Petal.Length, data = iris, groups=Species, ## --------------------------------------------------- ## Select what to be plotted. ep=2, ## com.grp = list(a="setosa",b=c("versicolor", "virginica")), no.grp = c("setosa","versicolor"),## Only draw "virginica" ## --------------------------------------------------- par.settings = list(superpose.symbol = list(pch=c(15:17)), superpose.line = list(lwd=2, lty=1:3)), panel = function(x, y, ...) { panel.xyplot(x, y, ...) panel.elli.1(x, y, ...) }, auto.key = list(x = .1, y = .8, corner = c(0, 0))) xyplot(Sepal.Length ~ Petal.Length, data = iris, groups=Species, ## --------------------------------------------------- ## Select what to be plotted. ep=2, com.grp = list(a="setosa",b=c("versicolor", "virginica")), ## no.grp = "setosa", ## Not draw ellipse for "setosa" ## --------------------------------------------------- par.settings = list(superpose.symbol = list(pch=c(15:17)), superpose.line = list(lwd=2, lty=1:3)), panel = function(x, y, ...) { panel.xyplot(x, y, ...) panel.elli.1(x, y, ...) }, auto.key = list(x = .1, y = .8, corner = c(0, 0))) xyplot(Sepal.Length ~ Petal.Length, data = iris, groups=Species, ep=1, par.settings = list(superpose.symbol = list(pch=c(15:17)), superpose.line = list(lwd=2, lty=1:3)), panel = function(x, y, ...) { panel.xyplot(x, y, ...) panel.elli.1(x, y, ...) }, auto.key = list(points = TRUE, rectangles = FALSE, space = "right")) ## ===================================================================== ## Another data set from package MASS require(MASS) data(Cars93) ## Plot ellipse based on original groups: DriveTrain xyplot(Price~EngineSize, data=Cars93, groups=DriveTrain, ep=2, par.settings = list(superpose.symbol = list(pch=c(15:17)), superpose.line = list(lwd=2, lty=1:3)), panel = function(x, y, ...) { panel.xyplot(x, y, ...) panel.elli.1(x, y, ...) }, auto.key = list(points = TRUE, rectangles = FALSE, space = "right")) ## But we want to plot ellipse using AirBags xyplot(Price~EngineSize, data=Cars93, groups=DriveTrain, ell.grp=Cars93$AirBags, par.settings = list(superpose.symbol = list(pch=c(15:17)), superpose.line = list(lwd=2, lty=1:3)), panel = function(x, y, ...) { panel.xyplot(x, y, ...) panel.elli.1(x, y, ...) }, auto.key = list(points = TRUE, rectangles = FALSE, space = "right")) } \keyword{plot} <file_sep>/R/mt_maccest.R #' ========================================================================= #' Mult-Classifier accuracy estimation with results of accuracy and #' significant test using ANOVA plus TukeyHSD test. #' History: #' 05-12-06: commence #' 10-01-07: minor changes. #' 31-01-07: prepare RD file and put it into package 'mt' #' 24-03-07: Re-calculate improved bootstrap error. #' 28-03-07: Add overall confusion matrix #' 01-07-07: Change largely. Produced directly from accest. maccest.default <- function(dat, cl, method = "svm", pars = valipars(), tr.idx = NULL, comp = "anova", ...) { if (missing(dat) || missing(cl)) { stop("data set or class are missing") } if (missing(method)) { stop("'method' is missing") } #' if (!is.factor (cl)) stop("cl must be a factor.") cl <- as.factor(cl) #' some classifier need it as factor, such as SVM. if (nrow(dat) != length(cl)) stop("mat and cl don't match.") if (length(unique(cl)) < 2) { stop("Classification needs at least two classes.") } if (any(is.na(dat)) || any(is.na(cl))) { stop("NA is not permitted in data set or class labels.") } dat <- as.matrix(dat) rownames(dat) <- NULL n <- nrow(dat) #' construct index of train data if (is.null(tr.idx)) { if (pars$sampling == "cv" && pars$nreps > n) { pars$sampling <- "loocv" } tr.idx <- trainind(cl, pars = pars) } pars$niter <- length(tr.idx) pars$nreps <- length(tr.idx[[1]]) #' apply single accest for maccest res <- lapply(method, function(m) { cat("\n--Classifier = :", m) flush.console() accest(dat, cl, method = m, pars = pars, tr.idx = tr.idx, ...) }) names(res) <- method acc <- sapply(res, function(x) x$acc) acc.iter <- sapply(res, function(x) x$acc.iter) #' acc.std <- sapply(res, function(x) x$acc.std) acc.std <- sapply(res, function(x) ifelse(!is.null(x$acc.std), x$acc.std, NA)) conf <- lapply(res, function(x) x$conf) mar <- sapply(res, function(x) ifelse(!is.null(x$mar), x$mar, NA)) mar.iter <- sapply(res, function(x) x$mar.iter) auc <- sapply(res, function(x) ifelse(!is.null(x$auc), x$auc, NA)) auc.iter <- sapply(res, function(x) x$auc.iter) #' significant test if (length(method) < 2 || pars$niter < 2) { h.test <- NULL gl.pval <- NULL mc.pval <- NULL } else { comp <- match.arg(comp, c("anova", "fried")) h.test <- switch(comp, "anova" = mc.anova(acc.iter), "fried" = mc.fried(acc.iter) ) gl.pval <- h.test$gl.pval mc.pval <- h.test$mc.pval comp <- paste(names(h.test)[1], "+", names(h.test)[2], sep = " ") } #' prepare the returned values ret <- list( method = method, acc = acc, acc.std = acc.std, acc.iter = acc.iter, mar = mar, mar.iter = mar.iter, auc = auc, auc.iter = auc.iter, comp = comp, h.test = h.test, gl.pval = gl.pval, mc.pval = mc.pval, sampling = switch(pars$sampling, "loocv" = "leave-one-out cross-validation", "cv" = "cross validation", "boot" = "bootstrap", "rand" = "randomised validation (holdout)" ), niter = pars$niter, nreps = pars$nreps, tr.idx = tr.idx, conf = conf ) if (pars$sampling == "boot") { ret$acc.boot <- lapply(res, function(x) x$acc.boot) } class(ret) <- "maccest" return(ret) } #' ======================================================================== print.maccest <- function(x, digits = 3, ...) { cat("\nMethod:\t\t\t", x$method) cat("\nAccuracy:\t\t", round(x$acc, digits)) #' cat("\nSTD of Accuracy:\t",round(x$acc.std,digits)) cat("\nAUC:\t\t\t", round(x$auc, digits)) cat("\nMargin:\t\t\t", round(x$mar, digits)) cat("\n\nNo. of iteration:\t", x$niter) cat("\nSampling:\t\t", x$sampling) cat("\nNo. of replications:\t", x$nreps) cat("\n") if (!is.null(x$h.test)) { cat("\nComparison:\t\t", x$comp) cat("\nGlobal test p-value:\t", round(x$gl.pval, digits)) cat("\n\nMultiple comparison p-values:\n") print(round(x$mc.pval, digits)) } cat("\n") invisible(x) } #' ======================================================================== summary.maccest <- function(object, ...) { structure(object, class = "summary.maccest") } #' ======================================================================== print.summary.maccest <- function(x, digits = 3, ...) { print.maccest(x) cat("\n\nAccuracy on each iteration:\n") print(round(x$acc.iter, digits)) cat("\nSummary of accuracy on each iteration:\n") #' print(summary(x$acc.iter)) print(apply(x$acc.iter, 2, summary)) #' nicely formatted summary invisible(x) } #' ======================================================================== boxplot.maccest <- function(x, ...) { if (x$niter == 1) { stop("Number of iteration (niter) must be greater than 1") } col <- "lightgray" xlab <- "Classifier" ylab <- "Accuracy Rate" ylim <- c(0, 1.0) main <- "Classifier Accuracy" boxplot(data.frame(x$acc.iter), main = main, col = col, xlab = xlab, ylab = ylab, ylim = ylim ) } #' ======================================================================= #' wll-02-12-2006: user defined x-ticks #' wll-04-12-2006: std bar #' wll-03-07-2007: Check validity of acc.std plot.maccest <- function(x, main = NULL, xlab = NULL, ylab = NULL, ...) { dots <- list(...) #' ylim <- if("ylim" %in% names(dots)) dots$ylim else #' c(min(x$acc - x$acc.std) - 0.1, max(x$acc + x$acc.std) + 0.1) ylim <- if ("ylim" %in% names(dots)) { dots$ylim } else if (!any(is.na(x$acc.std))) { c(min(x$acc - x$acc.std) - 0.1, max(x$acc + x$acc.std) + 0.1) } else { c(min(x$acc) - 0.1, max(x$acc) + 0.1) } if (is.null(main)) { main <- paste("Performance of classifier (Sampling: ", x$sampling, " )", sep = "" ) } if (is.null(xlab)) xlab <- "Classifier" if (is.null(ylab)) ylab <- "Accuracy" plot(x$acc, type = "o", main = main, xlab = xlab, ylab = ylab, col = "blue", ylim = ylim, xaxt = "n", ... ) ns <- 1:length(x$method) if (!any(is.na(x$acc.std))) { segments(ns, x$acc - x$acc.std, ns, x$acc + x$acc.std) } # Now draw the x axis with text labels axis(1, at = seq(1, length(x$method), by = 1), labels = x$method) } #' ======================================================================== maccest <- function(dat, ...) UseMethod("maccest") maccest.formula <- function(formula, data = NULL, ..., subset, na.action = na.omit) { call <- match.call() if (!inherits(formula, "formula")) { stop("method is only for formula objects") } m <- match.call(expand.dots = FALSE) if (identical(class(eval.parent(m$data)), "matrix")) { m$data <- as.data.frame(eval.parent(m$data)) } m$... <- NULL m$scale <- NULL m[[1]] <- as.name("model.frame") m$na.action <- na.action m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 x <- model.matrix(Terms, m) y <- model.extract(m, "response") attr(x, "na.action") <- attr(y, "na.action") <- attr(m, "na.action") ret <- maccest.default(x, y, ..., na.action = na.action) ret$call <- call ret$call[[1]] <- as.name("maccest") ret$terms <- Terms if (!is.null(attr(m, "na.action"))) { ret$na.action <- attr(m, "na.action") } class(ret) <- c("maccest.formula", class(ret)) return(ret) } #' ===================================================================== #' Estimates the accuracy of pairwise comparison using multi-classifiers. #' History: #' 03-12-06: Create #' 18-12-06: keep all the results #' 03-07-07: add auc and margin #' NOTE: It is difficult to provide user-defined data partition before #' data extracting for pairwise comparison. mbinest <- function(dat, cl, choices = NULL, method, pars = valipars(), ...) { dat.bin <- .dat.sel(dat, cl, choices = choices) #' get length of each pairwise comparison len <- sapply(dat.bin$cl, length) if (pars$sampling == "cv" && pars$nreps > min(len)) { pars$sampling <- "loocv" } if (pars$sampling == "loocv") pars$niter <- 1 res <- lapply(names(dat.bin$cl), function(x) { cat("\nRun = :", x, "\n") flush.console() #' for Windows maccest(dat.bin$dat[[x]], dat.bin$cl[[x]], method = method, pars = pars, ... ) }) acc <- t(sapply(res, function(x) x$acc)) auc <- t(sapply(res, function(x) x$auc)) mar <- t(sapply(res, function(x) x$mar)) #' get comparison names com <- apply(dat.bin$com, 1, paste, collapse = "-") names(res) <- rownames(acc) <- rownames(auc) <- rownames(mar) <- com ret <- list( all = res, com = dat.bin$com, acc = acc, auc = auc, mar = mar, method = method, niter = pars$niter, sampling = pars$sampling ) if (pars$sampling != "loocv") ret$nreps <- pars$nreps return(ret) } #' ==================================================================== #' Friedman test + Wilcoxon test for Multi-classifier significant test #' lwc-14-12-2006: commence mc.fried <- function(x, p.adjust.method = p.adjust.methods, ...) { p.adjust.method <- match.arg(p.adjust.method) #' significant test using Friedman test f.htest <- friedman.test(x, ...) #' global null hypothesis test p value gl.pval <- f.htest$p.value #' post-hoc test by Wilcoxon test dname <- colnames(x) n <- nrow(x) acc <- as.vector(x) algo <- factor(rep(dname, each = n)) #' t.htest <- pairwise.t.test(acc, algo, p.adj = "bonf",pool.sd = T,...) w.htest <- pairwise.wilcox.test(acc, algo, p.adjust.method = p.adjust.method, ...) lo <- lower.tri(w.htest$p.value, T) mc.pval <- w.htest$p.value[lo] #' pairwise comparison names dname <- dimnames(w.htest$p.value) tmp <- outer(dname[[1]], dname[[2]], paste, sep = "-") names(mc.pval) <- tmp[lower.tri(tmp, T)] ret <- list( fried = f.htest, wilcox = w.htest, gl.pval = gl.pval, mc.pval = mc.pval ) ret } #' ============================================================== #' ANOVA + TukeyHSD for Multi-classifier significant test. #' lwc-14-12-2006: commence mc.anova <- function(x, ...) { #' prepare for ANOVA dname <- colnames(x) n <- nrow(x) acc <- as.vector(x) algo <- factor(rep(dname, each = n)) #' ANOVA for the global null hypothesis test aov.tab <- summary(fm1 <- aov(acc ~ algo, ...)) gl.pval <- aov.tab[[1]][1, 5] #' post-hoc test using Tukey HSD t.htest <- TukeyHSD(fm1, "algo", ...) #' fm1 must be an output of 'aov' mc.pval <- t.htest$algo[, 4] #' plot(t.htest) ret <- list( anova = aov.tab, tukey = t.htest, gl.pval = gl.pval, mc.pval = mc.pval ) ret } #' ======================================================================== #' lwc-19-12-2006: normality test using shpiro.test, and plot boxplot and #' histogram #' lwc-26-02-2010: Using lattice. #' wl-12-11-2021, Fri: return two lattice objects. mc.norm <- function(x, ...) { x <- data.frame(x) #' normality test s.test <- lapply(x, function(x) shapiro.test(x)) rownames(x) <- NULL x <- stack(x) p.bw <- bwplot(~ values | ind, data = x, as.table = T, xlab = "", pch = "|", scales = list(cex = .75, relation = "free"), ...) p.hist <- histogram(~ values | ind, data = x, as.table = T, scales = list(cex = .75, relation = "free"), panel = function(x, ...) { panel.histogram(x, ...) panel.mathdensity( dmath = dnorm, col = "black", args = list(mean = mean(x), sd = sd(x)) ) }, ... ) #' densityplot(~ values | ind, data=x, as.table=T, #' scales=list(cex =.75,relation="free"), plot.points = F) #' qqmath(~ values | ind, data=x, as.table=T, #' f.value = ppoints(100), #' scales=list(cex =.75,relation="free"), #' xlab = "Standard Normal Quantiles") return(list(s.test = s.test, bwplot = p.bw, histogram = p.hist)) } #' 1) maccest.default #' 2) print.maccest #' 3) summary.maccest #' 4) print.summary.maccest #' 5) boxplot.maccest #' 6) plot.maccest #' 7) maccest #' 8) maccest.formula #' 9) mbinest #' 10) mc.fried #' 11) mc.anova #' 12) mc.norm <file_sep>/R/mt_plslda.R #' ======================================================================== #' wll-02-10-2007: tune the best number of components tune.plslda <- function(x, y, pls = "simpls", ncomp = 10, tune.pars, ...) { if (missing(tune.pars)) { tune.pars <- valipars(sampling = "rand", niter = 1, nreps = 10) } cat("ncomp tune (", ncomp, "):", sep = "") res <- sapply(1:ncomp, function(i) { cat(" ", i, sep = "") flush.console() accest(x, y, pars = tune.pars, method = "plslda", pls = pls, ncomp = i, tune = FALSE, ... )$acc }) cat("\n") list(ncomp = which.max(res), acc.tune = res) } #' ======================================================================== #' PLS+LDA for classification #' History: #' wll-21-05-2007: commence #' lwc-21-05-2012: use wrapper function of "mvr". #' lwc-21-05-2012: It should not be difficult to get R2 for "accest" with #' methods of plsc and plslda. plslda.default <- function(x, y, pls = "simpls", ncomp = 10, tune = FALSE, ...) { #' Generates Class Indicator Matrix from a Factor. #' A matrix which is zero except for the column corresponding to the class. #' note:from package NNET class.ind <- function(cl) { n <- length(cl) cl <- as.factor(cl) x <- matrix(0, n, length(levels(cl))) x[(1:n) + n * (unclass(cl) - 1)] <- 1 dimnames(x) <- list(names(cl), levels(cl)) x } #' arguments validity checking if (missing(x) || missing(y)) { stop("data set or class are missing") } if (nrow(x) != length(y)) stop("x and y don't match.") if (length(unique(y)) < 2) { stop("Classification needs at least two classes.") } if (any(is.na(x)) || any(is.na(y))) { stop("NA is not permitted in data set or class labels.") } pls <- match.arg(pls, c("kernelpls", "simpls", "oscorespls")) pls.fit <- paste(pls, ".fit", sep = "") #' initialisation x <- as.matrix(x) y <- as.factor(y) n <- nrow(x) p <- ncol(x) if (ncomp < 1 || ncomp > min(n - 1, p)) { ncomp <- min(n - 1, p) #' stop("Invalid number of components, ncomp") } #' find the best number of components if (tune) { val <- tune.plslda(x, y, pls = "simpls", ncomp, ...) ncomp <- val$ncomp } #' Use PLS for dimension reduction #' pls.out <- do.call(pls.fit, c(list(X=x, Y=class.ind(y), ncomp=ncomp), #' list(...))) #' lwc-21-05-2012: use wrapper function of "mvr". pls.out <- plsr(class.ind(y) ~ x, method = pls, ncomp = ncomp, ...) #' Use latent variables as input data for LDA. x.lv <- unclass(pls.out$scores) #' Transform test data using weight matrix (projection)(Xt = X*W) #' Ztest <- scale(Xtest,center=pls.out$Xmeans,scale=FALSE) %*% #' pls.out$projection lda.out <- lda(x.lv, y) pred <- predict(lda.out, x.lv) conf <- table(y, pred$class) acc <- round(sum(diag(conf)) * 100 / n, 2) lc <- unclass(pls.out$scores) #' latent components colnames(lc) <- paste("LC", 1:ncol(lc), sep = "") res <- list( x = lc, cl = y, pred = pred, posterior = pred$posterior, conf = conf, acc = acc, ncomp = ncomp, pls.method = pls, pls.out = pls.out, lda.out = lda.out ) if (tune) res$acc.tune <- val$acc.tune res$call <- match.call() res$call[[1]] <- as.name("plslda") class(res) <- "plslda" return(res) } #' ======================================================================== predict.plslda <- function(object, newdata, ...) { if (!inherits(object, "plslda")) stop("object not of class \"plslda\"") if (missing(newdata)) { return(list( class = object$pred, posterior = object$posterior, x = unclass(object$pls.out$scores) )) } if (is.null(dim(newdata))) { dim(newdata) <- c(1, length(newdata)) } #' a row vector newdata <- as.matrix(newdata) if (ncol(newdata) != length(object$pls.out$Xmeans)) { stop("wrong number of variables") } #' rotated data (projection) x <- scale(newdata, center = object$pls.out$Xmeans, scale = FALSE) %*% object$pls.out$projection pred <- predict(object$lda.out, x) list(class = pred$class, posterior = pred$posterior, x = x) } #' ======================================================================== #' lwc-23-05-2007: print method for plslda print.plslda <- function(x, ...) { alg <- switch(x$pls.method, kernelpls = "kernel", simpls = "simpls", oscorespls = "orthogonal scores", stop("Unknown fit method.") ) cat( "Partial least squares classification, fitted with the", alg, "algorithm." ) #' cat("\nCall:\n", deparse(x$call), "\n") cat("\nCall:\n") dput(x$call) cat("\nConfusion matrix of training data:\n") print(x$conf) cat("\nAccuracy rate of training data:\n") print(x$acc) invisible(x) } #' ======================================================================== #' lwc-23-05-2007: summary method for plslda summary.plslda <- function(object, ...) { structure(object, class = "summary.plslda") } #' ======================================================================== #' lwc-23-05-2007: summary method for plslda print.summary.plslda <- function(x, ...) { print.plslda(x) cat("\nNumber of components considered:", x$ncomp) cat("\nProportion of components:\n") evar <- 100 * x$pls.out$Xvar / x$pls.out$Xtotvar names(evar) <- paste("LC", 1:length(evar), sep = "") print(evar) lev <- levels(x$cl) cat("\nNumber of Classes: ", length(lev), "\n\n") cat("Levels:", if (is.numeric(lev)) "(as integer)", "\n", lev) cat("\n\n") } #' ========================================================================= plslda <- function(x, ...) UseMethod("plslda") #' ========================================================================= plslda.formula <- function(formula, data = NULL, ..., subset, na.action = na.omit) { call <- match.call() if (!inherits(formula, "formula")) { stop("method is only for formula objects") } m <- match.call(expand.dots = FALSE) if (identical(class(eval.parent(m$data)), "matrix")) { m$data <- as.data.frame(eval.parent(m$data)) } m$... <- NULL m[[1]] <- as.name("model.frame") m$na.action <- na.action m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 x <- model.matrix(Terms, m) y <- model.extract(m, "response") attr(x, "na.action") <- attr(y, "na.action") <- attr(m, "na.action") ret <- plslda.default(x, y, ..., na.action = na.action) ret$call <- call ret$call[[1]] <- as.name("plslda") ret$terms <- Terms if (!is.null(attr(m, "na.action"))) { ret$na.action <- attr(m, "na.action") } class(ret) <- c("plslda.formula", class(ret)) return(ret) } #' ======================================================================== #' wll-13-12-2007: plot method for plsc using lattice. plot.plslda <- function(x, dimen, ...) { lc.names <- function(object, comps) { labs <- paste("LC", 1:length(object$Xvar), sep = "") if (missing(comps)) { comps <- seq(along = labs) } else { labs <- labs[comps] } evar <- 100 * object$Xvar / object$Xtotvar evar <- evar[comps] labs <- paste(labs, " (", format(evar, digits = 2, trim = TRUE), " %)", sep = "" ) return(labs) } if (missing(dimen)) { dimen <- seq(along = colnames(x$x)) } else { #' check validity if (!all(dimen %in% c(1:ncol(x$x)))) { stop("dimen is not valid") } } dfn <- lc.names(x$pls.out, dimen) y <- x$cl x <- data.frame(x$x[, dimen, drop = FALSE]) names(x) <- dfn #' call group plot p <- grpplot(x, y, plot = "pairs", ...) p } #' ========================================================================= #' lwc-22-05-2007: plot method for plslda. It plot PLS latent components. plot.plslda.1 <- function(x, panel = panel.plslda, cex = 0.7, dimen, abbrev = FALSE, ...) { panel.plslda <- function(x, y, ...) { text(x, y, as.character(g.nlda), cex = tcex, col = unclass(g), ...) } lc.names <- function(object, comps) { labs <- paste("LC", 1:length(object$Xvar), sep = "") if (missing(comps)) { comps <- seq(along = labs) } else { labs <- labs[comps] } evar <- 100 * object$Xvar / object$Xtotvar evar <- evar[comps] labs <- paste(labs, " (", format(evar, digits = 2, trim = TRUE), " %)", sep = "" ) return(labs) } xval <- x$x g <- x$cl if (abbrev) levels(g) <- abbreviate(levels(g), abbrev) assign("g.nlda", g) assign("tcex", cex) if (missing(dimen)) { dimen <- seq(along = colnames(xval)) } else { #' check validity if (!all(dimen %in% c(1:ncol(xval)))) { stop("dimen is not valid") } } xval <- xval[, dimen, drop = FALSE] varlab <- lc.names(x$pls.out, dimen) nDimen <- length(dimen) if (nDimen <= 2) { if (nDimen == 1) { #' One component ldahist(xval[, 1], g, ...) #' ldahist(xval, g, xlab=varlab,...) } else { #' Second component versus first xlab <- varlab[1] ylab <- varlab[2] eqscplot(xval, xlab = xlab, ylab = ylab, type = "n", ...) panel(xval[, 1], xval[, 2], ...) } } else { #' Pairwise scatterplots of several components pairs(xval, labels = varlab, panel = panel, ...) } invisible(NULL) } #' 1) tune.plslda #' 2) plslda.default #' 3) predict.plslda #' 4) print.plslda #' 5) summary.plslda #' 6) print.summary.plslda #' 7) plslda #' 8) plslda.formula #' 9) plot.plslda #' 10) plot.plslda.1 <file_sep>/man/boot.err.Rd \name{boot.err} \alias{boot.err} \title{Calculate .632 and .632+ Bootstrap Error Rate} \description{ Calculate .632 bootstrap and .632 plus bootstrap error rate. } \usage{ boot.err(err, resub) } \arguments{ \item{err}{ Average error rate of bootstrap samples.} \item{resub}{ A list including apparent error rate, class label and the predicted class label of the original training data (not resampled training data). Can be generated by \code{\link{classifier}}. } } \value{ A list with the following components: \item{ae}{Apparent error rate.} \item{boot}{Average error rate of bootstrap samples(Same as \code{err})} \item{b632 }{.632 bootstrap error rate.} \item{b632p}{.632 plus bootstrap error rate.} } \references{ <NAME>. and <NAME>. (2005) \emph{Data Mining - Practical Machine Learning and Techniques}. Elsevier. <NAME>. and <NAME>. (1993) \emph{An Introduction to the Bootstrap}. Chapman & Hall. <NAME>. and <NAME>. (1997) Improvements on cross-validation: the .632+ bootstrap method. \emph{Journal of the American Statistical Association}, \bold{92}, 548-560. } % ---------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{classifier}} } \examples{ ## iris data set data(iris) x <- subset(iris, select = -Species) y <- iris$Species ## 10 bootstrap training samples pars <- valipars(sampling = "boot", niter = 1, nreps = 10) tr.idx <- trainind(y, pars=pars)[[1]] ## bootstrap error rate err <- sapply(tr.idx, function(i){ pred <- classifier(x[i,,drop = FALSE],y[i],x[-i,,drop = FALSE],y[-i], method = "knn")$err }) ## average bootstrap error rate err <- mean(err) ## apparent error rate resub <- classifier(x,y,method = "knn") ## err.boot <- boot.err(err, resub) } \keyword{classif} <file_sep>/man/classifier.Rd % wll-11-01-2007 % wll-02-07-2007: add some stuff of posteriot, marin and auc \name{classifier} \alias{classifier} \title{ Wrapper Function for Classifiers } \description{ Wrapper function for classifiers. The classification model is built up on the training data and error estimation is performed on the test data. } \usage{ classifier(dat.tr, cl.tr, dat.te=NULL, cl.te=NULL, method, pred.func=predict,\dots) } % -------------------------------------------------------------------- \arguments{ \item{dat.tr}{ A data frame or matrix of training data. The classification model are built on it. } \item{cl.tr}{ A factor or vector of training class. } \item{dat.te}{ A data frame or matrix of test data. Error rates are calculated on this data set. } \item{cl.te}{ A factor or vector of test class. } \item{method}{ Classification method to be used. Any classification methods can be employed if they have method \code{predict} (except \code{knn}) with output of predicted class label or one component with name of \code{class} in the returned list, such as \code{randomForest}, \code{svm}, \code{knn} and \code{lda}. Either a function or a character string naming the function to be called } \item{pred.func}{ Predict method (default is \code{predict}). Either a function or a character string naming the function to be called. } \item{\dots}{Additional parameters to \code{method}.} } % ---------------------------------------------------------------------------- \value{ A list including components: \item{err}{Error rate of test data.} \item{cl}{The original class of test data.} \item{pred}{The predicted class of test data.} \item{posterior}{ Posterior probabilities for the classes if \code{method} provides posterior output. } \item{acc}{ Accuracy rate of classification.} \item{margin}{ The margin of predictions from classifier \code{method} if it provides posterior output. The margin of a data point is defined as the proportion of probability for the correct class minus maximum proportion of probabilities for the other classes. Positive margin means correct classification, and vice versa. This idea come from package \pkg{randomForest}. For more details, see \code{\link[randomForest]{margin}}. } \item{auc}{ The area under receiver operating curve (AUC) if classifier \code{method} produces posterior probabilities and the classification is for two-class problem. } } % ---------------------------------------------------------------------------- \note{ The definition of margin is based on the posterior probabilities. Classifiers, such as \code{\link[randomForest]{randomForest}}, \code{\link[e1071]{svm}}, \code{\link[MASS]{lda}}, \code{\link[MASS]{qda}}, \code{\link{pcalda}} and \code{\link{plslda}}, do output posterior probabilities. But \code{\link[class]{knn}} does not. } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{accest}}, \code{\link{maccest}} } % ---------------------------------------------------------------------- \examples{ data(abr1) dat <- preproc(abr1$pos[,110:500], method="log10") cls <- factor(abr1$fact$class) ## tmp <- dat.sel(dat, cls, choices=c("1","2")) ## dat <- tmp[[1]]$dat ## cls <- tmp[[1]]$cls idx <- sample(1:nrow(dat), round((2/3)*nrow(dat)), replace = FALSE) ## constrcuct train and test data train.dat <- dat[idx,] train.cl <- cls[idx] test.dat <- dat[-idx,] test.cl <- cls[-idx] ## estimates accuracy res <- classifier(train.dat, train.cl, test.dat, test.cl, method="randomForest") res ## get confusion matrix cl.rate(obs=res$cl, res$pred) ## same as: cl.rate(obs=test.cl, res$pred) ## Measurements of Forensic Glass Fragments data(fgl, package = "MASS") # in MASS package dat <- subset(fgl, grepl("WinF|WinNF",type)) ## dat <- subset(fgl, type %in% c("WinF", "WinNF")) x <- subset(dat, select = -type) y <- factor(dat$type) ## construct train and test data idx <- sample(1:nrow(x), round((2/3)*nrow(x)), replace = FALSE) tr.x <- x[idx,] tr.y <- y[idx] te.x <- x[-idx,] te.y <- y[-idx] res.1 <- classifier(tr.x, tr.y, te.x, te.y, method="svm") res.1 cl.rate(obs=res.1$cl, res.1$pred) ## classification performance for the two-class case. pos <- "WinF" ## select positive level cl.perf(obs=res.1$cl, pre=res.1$pred, pos=pos) ## ROC and AUC cl.roc(stat=res.1$posterior[,pos],label=res.1$cl, pos=pos) } \keyword{classif} <file_sep>/man/grpplot.Rd % lwc-19-12-2007: % lwc-21-02-2010: change name as grpplot % lwc-15-07-2015: remove 'ep' from function argument and add the link of % panel.elli.1. \name{grpplot} \alias{grpplot} \title{Plot Matrix-Like Object by Group} \description{Plot matrix-like object by group} \usage{ grpplot(x, y, plot = "pairs", \dots) } \arguments{ \item{x}{A matrix or data frame to be plotted. } \item{y}{A factor or vector giving group information of columns of \code{x}.} \item{plot}{ One of plot types: \code{strip}, \code{box}, \code{density} and \code{pairs}. } \item{\dots}{Further arguments. See corresponding entry in \code{\link{xyplot}} for non-trivial details. One argument is \code{ep}: an integer for plotting ellipse. \code{1} and \code{2} for plotting overall and group ellipse, respectively. Otherwise, none. For details, see \code{\link{panel.elli.1}}. } } \value{An object of class \code{"trellis"}.} % ---------------------------------------------------------------------------- \seealso{ \code{\link{panel.elli.1}}, \code{\link{pcaplot}}, \code{\link{pca.plot.wrap}}, \code{\link{lda.plot.wrap}}, \code{\link{pls.plot.wrap}}. } \author{<NAME> } \examples{ data(iris) grpplot(iris[,1:4], iris[,5],plot="strip", main="IRIS DATA") grpplot(iris[,1:4], iris[,5],plot="box", main="IRIS DATA") grpplot(iris[,1:4], iris[,5],plot="density", main="IRIS DATA") grpplot(iris[,1:4], iris[,5],plot="pairs",main="IRIS DATA",ep=2) ## returns an object of class "trellis". tmp <- grpplot(iris[,c(2,1)], iris[,5],main="IRIS DATA",ep=2) tmp ## change symbol's color, type and size grpplot(iris[,c(2,1)], iris[,5],main="IRIS DATA", cex=1.5, auto.key=list(space="right", col=c("black","blue","red")), par.settings = list(superpose.symbol = list(col=c("black","blue","red"), pch=c(1:3)))) } \keyword{plot} <file_sep>/man/cl.perf.Rd % lwc-16-09-2006 % lwc-05-05-2010: \name{cl.rate} \alias{cl.rate} \alias{cl.perf} \alias{cl.roc} \alias{cl.auc} \title{ Assess Classification Performances } \description{ Assess classification performances. } \usage{ cl.rate(obs, pre) cl.perf(obs, pre, pos=levels(as.factor(obs))[2]) cl.roc(stat, label, pos=levels(as.factor(label))[2], plot=TRUE, \dots) cl.auc(stat, label, pos=levels(as.factor(label))[2]) } \arguments{ \item{obs}{ Factor or vector of observed class. } \item{pre}{ Factor or vector of predicted class. } \item{stat}{ Factor or vector of statistics for positives/cases. } \item{label}{ Factor or vector of label for categorical data. } \item{pos}{ Characteristic string for positive. } \item{plot}{ Logical flag indicating whether ROC should be plotted. } \item{\dots}{ Further arguments for plotting. } } % ---------------------------------------------------------------------------- \details{ \code{cl.perf} gets the classification performances such as accuracy rate and false positive rate. \code{cl.roc} computes receiver operating characteristics (ROC). \code{cl.auc} calculates area under ROC curve. Three functions are only for binary class problems. } \note{ AUC varies between 0.5 and 1.0 for sensible models; the higher the better. If it is less than 0.5, it should be corrected by \code{1 - AUC}. Or re-run it by using \code{1 - stat}. } % ---------------------------------------------------------------------- \value{ \code{cl.rate} returns a list with components: \item{acc}{ Accuracy rate of classification.} \item{err}{ Error rate of classification.} \item{con.mat}{ Confusion matrix. } \item{kappa}{ Kappa Statistics.} \code{cl.perf} returns a list with components: \item{acc}{ Accuracy rate} \item{tpr}{ True positive rate} \item{fpr}{ False positive rate} \item{sens}{ Sensitivity} \item{spec}{ Specificity} \item{con.mat}{ Confusion matrix. } \item{kappa}{ Kappa Statistics.} \item{positive}{ Positive level.} \code{cl.roc} returns a list with components: \item{perf}{A data frame of \code{acc}, \code{tpr},\code{fpr},\code{sens}, \code{spec} and \code{cutoff} (thresholds).} \item{auc}{ Area under ROC curve} \item{positive}{ Positive level.} \code{cl.auc} returns a scalar value of AUC. } \references{ <NAME>. (2006) \emph{An introduction to ROC analysis}. \emph{Pattern Recognition Letters}. vol. 27, 861-874. } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------- \examples{ ## Measurements of Forensic Glass Fragments library(MASS) data(fgl, package = "MASS") # in MASS package dat <- subset(fgl, grepl("WinF|WinNF",type)) ## dat <- subset(fgl, type %in% c("WinF", "WinNF")) x <- subset(dat, select = -type) y <- factor(dat$type) ## construct train and test data idx <- sample(1:nrow(x), round((2/3)*nrow(x)), replace = FALSE) tr.x <- x[idx,] tr.y <- y[idx] te.x <- x[-idx,] te.y <- y[-idx] model <- lda(tr.x, tr.y) ## predict the test data results pred <- predict(model, te.x) ## classification performances obs <- te.y pre <- pred$class cl.rate(obs, pre) cl.perf(obs, pre, pos="WinNF") ## change positive as "WinF" cl.perf(obs, pre, pos="WinF") ## ROC and AUC pos <- "WinNF" ## or "WinF" stat <- pred$posterior[,pos] ## levels(obs) <- c(0,1) cl.auc (stat,obs, pos=pos) cl.roc (stat,obs, pos=pos) ## test examples for ROC and AUC label <- rbinom(30,size=1,prob=0.2) stat <- rnorm(30) cl.roc(stat,label, pos=levels(factor(label))[2],plot = TRUE) cl.auc(stat,label,pos=levels(factor(label))[2]) ## if auc is less than 0.5, it should be adjusted by 1 - auc. ## Or re-run them: cl.roc(1 - stat,label, pos=levels(factor(label))[2],plot = TRUE) cl.auc(1 - stat,label,pos=levels(factor(label))[2]) } \keyword{classif} <file_sep>/man/mdsplot.Rd % wll-10-06-2015: copy from pcaplot.Rd and change it in Vim. \name{mdsplot} \alias{mdsplot} \title{Plot Classical Multidimensional Scaling} \description{ Plot metric MDS with categorical information. } \usage{ mdsplot(x, y, method = "euclidean", dimen = c(1,2), \dots) } \arguments{ \item{x}{A matrix or data frame to be plotted. } \item{y}{A factor or vector giving group information of columns of \code{x}.} \item{method}{ The distance measure to be used. This must be one of "euclidean", "maximum", "manhattan", "canberra", "binary" or "minkowski". Any unambiguous substring can be given. It is only for \code{mds.plot.wrap}. } \item{dimen}{ A vector of index of dimentonal to be plotted. Only two dimentions are are allowed.} \item{\dots}{ Further arguments to \code{\link{prcomp}} or \code{lattice}. See corresponding entry in \code{\link{xyplot}} for non-trivial details of \code{lattice}. For \code{pcaplot}, one argument is \code{ep}: an integer for plotting 95\% ellipse. \code{1} and \code{2} for plotting overall and group ellipse, respectively. Otherwise, none. For details, see \code{\link{panel.elli.1}}. } } \value{ \code{mdsplot} returns an object of class \code{"trellis"}. } \author{ <NAME> } \seealso{ \code{\link{grpplot}}, \code{\link{panel.elli}}, \code{\link{mds.plot.wrap}}, \code{\link{pcaplot}} } % ---------------------------------------------------------------------------- \examples{ ## examples of 'mdsplot' data(iris) x <- iris[,1:4] y <- iris[,5] mdsplot(x,y, dimen=c(1,2),ep=2) mdsplot(x,y, dimen=c(2,1),ep=1) tmp <- mdsplot(x,y, ep=2, conf.level = 0.9) tmp ## change symbol's color, type and size mdsplot(x, y, main="IRIS DATA", cex=1.2, auto.key=list(space="right", col=c("black","blue","red"), cex=1.2), par.settings = list(superpose.symbol = list(col=c("black","blue","red"), pch=c(1:3)))) } \keyword{plot} <file_sep>/R/mt_plsc.R #' ========================================================================= #' wll-02-10-2007: tune the best number of components tune.plsc <- function(x, y, pls = "simpls", ncomp = 10, tune.pars, ...) { if (missing(tune.pars)) { tune.pars <- valipars(sampling = "rand", niter = 1, nreps = 10) } cat("ncomp tune (", ncomp, "):", sep = "") res <- sapply(1:ncomp, function(i) { cat(" ", i, sep = "") flush.console() accest(x, y, pars = tune.pars, method = "plsc", pls = pls, ncomp = i, tune = FALSE, ... )$acc }) cat("\n") list(ncomp = which.max(res), acc.tune = res) } #' ======================================================================== #' PLS for classification #' History: #' wll-01-10-2007: commence #' lwc-21-05-2012: use wrapper function of "mvr". #' lwc-21-05-2012: It should not be difficult to get R2 for "accest" with #' methods of plsc and plslda. plsc.default <- function(x, y, pls = "simpls", ncomp = 10, tune = FALSE, ...) { #' arguments validity checking if (missing(x) || missing(y)) { stop("data set or class are missing") } if (nrow(x) != length(y)) stop("x and y don't match.") if (length(unique(y)) < 2) { stop("Classification needs at least two classes.") } if (any(is.na(x)) || any(is.na(y))) { stop("NA is not permitted in data set or class labels.") } pls <- match.arg(pls, c("kernelpls", "simpls", "oscorespls")) pls.fit <- paste(pls, ".fit", sep = "") #' initialisation x <- as.matrix(x) y <- as.factor(y) n <- nrow(x) p <- ncol(x) if (ncomp < 1 || ncomp > min(n - 1, p)) { ncomp <- min(n - 1, p) #' stop("Invalid number of components, ncomp") } #' find the best number of components if (tune) { val <- tune.plsc(x, y, pls = "simpls", ncomp = ncomp, ...) ncomp <- val$ncomp } #' Convert response to be suitable format for PLS y.1 <- class.ind(y) #' Call PLS for regression #' pls.out <- do.call(pls.fit, c(list(X=x, Y=y.1, ncomp=ncomp), list(...))) #' lwc-21-05-2012: use wrapper function of "mvr". pls.out <- plsr(y.1 ~ x, method = pls, ncomp = ncomp, ...) names(pls.out$Ymeans) <- colnames(y.1) #' NOTE: New version of pls strip off the names of Ymeans #' Predict with models containing ncomp components. B <- pls.out$coefficients[, , ncomp, drop = T] if (length(pls.out$Xmeans) > 1) { #' wll: 07-01-2008 B0 <- pls.out$Ymeans - pls.out$Xmeans %*% B } else { B0 <- pls.out$Ymeans - pls.out$Xmeans * B } B0 <- rep(B0, each = nrow(x)) B1 <- x %*% B pred <- B1 + B0 #' softmax for convert values to probabilities poste <- exp(pred) poste <- poste / rowSums(poste) #' generates the predict class nm <- names(pls.out$Ymeans) pred.cl <- factor(nm[max.col(poste)], levels = levels(y)) dimnames(poste) <- list(rownames(x), nm) conf <- table(y, pred.cl) acc <- round(sum(diag(conf)) * 100 / n, 2) lc <- unclass(pls.out$scores) #' latent components colnames(lc) <- paste("LC", 1:ncol(lc), sep = "") res <- list( x = lc, cl = y, pred = pred.cl, posterior = poste, conf = conf, acc = acc, ncomp = ncomp, pls.method = pls, pls.out = pls.out ) if (tune) res$acc.tune <- val$acc.tune res$call <- match.call() res$call[[1]] <- as.name("plsc") class(res) <- "plsc" return(res) } #' ========================================================================= #' wll-01-10-2007: predict method. See predict.mvr and coef.mvr in package #' pls for details. #' NOTE: More comments are in function pred.pls which can be call directly #' by simpls.fit etc. predict.plsc <- function(object, newdata, ...) { if (!inherits(object, "plsc")) stop("object not of class \"plsc\"") if (missing(newdata)) { return(list( class = object$pred, posterior = object$posterior, x = unclass(object$pls.out$scores) )) } if (is.null(dim(newdata))) { dim(newdata) <- c(1, length(newdata)) } #' a row vector newdata <- as.matrix(newdata) if (ncol(newdata) != length(object$pls.out$Xmeans)) { stop("wrong number of variables") } #' rotated data (projection) x <- scale(newdata, center = object$pls.out$Xmeans, scale = FALSE) %*% object$pls.out$projection #' Predict with models containing ncomp components. ncomp <- object$ncomp B <- object$pls.out$coefficients[, , ncomp, drop = T] if (length(object$pls.out$Xmeans) > 1) { #' wll: 07-01-2008 B0 <- object$pls.out$Ymeans - object$pls.out$Xmeans %*% B } else { B0 <- object$pls.out$Ymeans - object$pls.out$Xmeans * B } B0 <- rep(B0, each = nrow(newdata)) B1 <- newdata %*% B pred <- B1 + B0 poste <- exp(pred) poste <- poste / rowSums(poste) #' predict classification nm <- names(object$pls.out$Ymeans) cl <- factor(nm[max.col(poste)], levels = levels(object$cl)) #' NOTE: The levels of factor must be consistent with train data set. dimnames(poste) <- list(rownames(x), nm) list(class = cl, posterior = poste, x = x) } #' ======================================================================== #' wll-23-05-2007: print method for plsc print.plsc <- function(x, ...) { alg <- switch(x$pls.method, kernelpls = "kernel", simpls = "simpls", oscorespls = "orthogonal scores", stop("Unknown fit method.") ) cat("Partial least squares classification, fitted with the", alg, "algorithm.") #' cat("\nCall:\n", deparse(x$call), "\n") cat("\nCall:\n") dput(x$call) cat("\nConfusion matrix of training data:\n") print(x$conf) cat("\nAccuracy rate of training data:\n") print(x$acc) invisible(x) } #' ======================================================================== #' wll-23-05-2007: summary method for plsc summary.plsc <- function(object, ...) { structure(object, class = "summary.plsc") } #' ======================================================================== #' lwc-23-05-2007: summary method for plsc print.summary.plsc <- function(x, ...) { print.plsc(x) cat("\nNumber of components considered:", x$ncomp) cat("\nProportion of components:\n") evar <- 100 * x$pls.out$Xvar / x$pls.out$Xtotvar names(evar) <- paste("LC", 1:length(evar), sep = "") print(evar) lev <- levels(x$cl) cat("\nNumber of Classes: ", length(lev), "\n\n") cat("Levels:", if (is.numeric(lev)) "(as integer)", "\n", lev) cat("\n\n") } #' ========================================================================= plsc <- function(x, ...) UseMethod("plsc") #' ========================================================================= plsc.formula <- function(formula, data = NULL, ..., subset, na.action = na.omit) { call <- match.call() if (!inherits(formula, "formula")) { stop("method is only for formula objects") } m <- match.call(expand.dots = FALSE) if (identical(class(eval.parent(m$data)), "matrix")) { m$data <- as.data.frame(eval.parent(m$data)) } m$... <- NULL m[[1]] <- as.name("model.frame") m$na.action <- na.action m <- eval(m, parent.frame()) Terms <- attr(m, "terms") attr(Terms, "intercept") <- 0 x <- model.matrix(Terms, m) y <- model.extract(m, "response") attr(x, "na.action") <- attr(y, "na.action") <- attr(m, "na.action") ret <- plsc.default(x, y, ..., na.action = na.action) ret$call <- call ret$call[[1]] <- as.name("plsc") ret$terms <- Terms if (!is.null(attr(m, "na.action"))) { ret$na.action <- attr(m, "na.action") } class(ret) <- c("plsc.formula", class(ret)) return(ret) } #' ======================================================================== #' wll-13-12-2007: plot method for plsc using lattice. plot.plsc <- function(x, dimen, ...) { lc.names <- function(object, comps) { labs <- paste("LC", 1:length(object$Xvar), sep = "") if (missing(comps)) { comps <- seq(along = labs) } else { labs <- labs[comps] } evar <- 100 * object$Xvar / object$Xtotvar evar <- evar[comps] labs <- paste(labs, " (", format(evar, digits = 2, trim = TRUE), " %)", sep = "" ) return(labs) } if (missing(dimen)) { dimen <- seq(along = colnames(x$x)) } else { #' check validity if (!all(dimen %in% c(1:ncol(x$x)))) { stop("dimen is not valid") } } dfn <- lc.names(x$pls.out, dimen) y <- x$cl x <- data.frame(x$x[, dimen, drop = FALSE]) names(x) <- dfn #' call group plot p <- grpplot(x, y, plot = "pairs", ...) p } #' ========================================================================= #' wll-22-05-2007: plot method for plsc. It plot PLS latent components. plot.plsc.1 <- function(x, panel = panel.plsc, cex = 0.7, dimen, abbrev = FALSE, ...) { panel.plsc <- function(x, y, ...) { text(x, y, as.character(g.nlda), cex = tcex, col = unclass(g), ...) } lc.names <- function(object, comps) { labs <- paste("LC", 1:length(object$Xvar), sep = "") if (missing(comps)) { comps <- seq(along = labs) } else { labs <- labs[comps] } evar <- 100 * object$Xvar / object$Xtotvar evar <- evar[comps] labs <- paste(labs, " (", format(evar, digits = 2, trim = TRUE), " %)", sep = "" ) return(labs) } xval <- x$x g <- x$cl if (abbrev) levels(g) <- abbreviate(levels(g), abbrev) assign("g.nlda", g) assign("tcex", cex) if (missing(dimen)) { dimen <- seq(along = colnames(xval)) } else { #' check validity if (!all(dimen %in% c(1:ncol(xval)))) { stop("dimen is not valid") } } xval <- xval[, dimen, drop = FALSE] varlab <- lc.names(x$pls.out, dimen) nDimen <- length(dimen) if (nDimen <= 2) { if (nDimen == 1) { #' One component ldahist(xval[, 1], g, ...) #' ldahist(xval, g, xlab=varlab,...) } else { #' Second component versus first xlab <- varlab[1] ylab <- varlab[2] eqscplot(xval, xlab = xlab, ylab = ylab, type = "n", ...) panel(xval[, 1], xval[, 2], ...) } } else { #' Pairwise scatterplots of several components pairs(xval, labels = varlab, panel = panel, ...) } invisible(NULL) } #' 1) tune.plsc #' 2) plsc.default #' 3) predict.plsc #' 4) print.plsc #' 5) summary.plsc #' 6) print.summary.plsc #' 7) plsc #' 8) plsc.formula #' 9) plot.plsc #' 10) plot.plsc.1 <file_sep>/man/plot.accest.Rd % lwc-10-10-2006: First draft % \name{plot.accest} \alias{plot.accest} \title{ Plot Method for Class 'accest' } \description{ Plot accuracy rate of each iteration. } \usage{ \method{plot}{accest}(x, main = NULL, xlab = NULL, ylab = NULL, \dots) } % ---------------------------------------------------------------------------- \arguments{ \item{x}{ An object of class \code{accest}. } \item{main}{ An overall title for the plot. } \item{xlab}{ A title for the x axis. } \item{ylab}{ A title for the y axis. } \item{\dots}{ Additional arguments to the plot. } } \value{ Returns plot of class \code{accest}. } % ---------------------------------------------------------------------------- \details{ This function is a method for the generic function \code{plot()} for class \code{accest}. It plots the accuracy rate against the index of iterations. } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{accest}} } \examples{ # Iris data data(iris) # Stratified cross-validation of PCALDA for Iris data pars <- valipars(sampling="cv", niter=10, nreps=10, strat=TRUE) acc <- accest(Species~., data = iris, method = "pcalda", pars = pars) acc summary(acc) plot(acc) } \keyword{plot} <file_sep>/man/panel.smooth.line.Rd % lwc-14-02-2010: % \name{panel.smooth.line} \alias{panel.smooth.line} \title{ Panel Function for Plotting Regression Line} \description{ \pkg{lattice} panel function for plotting regression line with red colour. } \usage{ panel.smooth.line(x, y,\dots) } \arguments{ \item{x, y}{ Variables to be plotted.} \item{\dots}{Further arguments. See corresponding entry in \code{\link{xyplot}} for non-trivial details. } } \value{ An object of class \code{"trellis"}. } \author{<NAME> } \examples{ library(lattice) data(iris) splom(~iris[,1:4], varname.cex = 1.0, pscales = 0, panel = panel.smooth.line) } \keyword{plot} <file_sep>/man/accest.Rd % lwc-14-09-2006: First draft % lwc-09-10-2006: Modify the method description and note. % \name{accest} \alias{accest} \alias{aam.cl} \alias{aam.mcl} \alias{accest.formula} \alias{accest.default} \alias{print.accest} \alias{summary.accest} \alias{print.summary.accest} \title{Estimate Classification Accuracy By Resampling Method} % ------------------------------------------------------------------------ \description{ Estimate classification accuracy rate by resampling method. } % ------------------------------------------------------------------------ \usage{ accest(dat, \dots) \method{accest}{default}(dat, cl, method, pred.func=predict,pars = valipars(), tr.idx = NULL, \dots) \method{accest}{formula}(formula, data = NULL, \dots, subset, na.action = na.omit) aam.cl(x,y,method, pars = valipars(),\dots) aam.mcl(x,y,method, pars = valipars(),\dots) } % ------------------------------------------------------------------------ \arguments{ \item{formula}{ A formula of the form \code{groups ~ x1 + x2 + \dots} That is, the response is the grouping factor and the right hand side specifies the (non-factor) discriminators. } \item{data}{ Data frame from which variables specified in \code{formula} are preferentially to be taken. } \item{dat,x}{ A matrix or data frame containing the explanatory variables if no formula is given as the principal argument. } \item{cl,y}{ A factor specifying the class for each observation if no formula principal argument is given. } \item{method}{ Classification method whose accuracy rate is to be estimated, such as \code{randomForest}, \code{svm}, \code{knn} and \code{lda}. For details, see \code{note} below. Either a function or a character string naming the function to be called. } \item{pred.func}{ Predict method (default is \code{predict}). Either a function or a character string naming the function to be called. } \item{pars}{ A list of parameters using by the resampling method such as \emph{Leave-one-out cross-validation}, \emph{Cross-validation}, \emph{Bootstrap} and \emph{Randomised validation (holdout)}. See \code{\link{valipars}} for details. } \item{tr.idx}{ User defined index of training samples. Can be generated by \code{trainind}. } \item{\dots}{Additional parameters to \code{method}.} \item{subset}{ Optional vector, specifying a subset of observations to be used. } \item{na.action}{ Function which indicates what should happen when the data contains \code{NA}'s, defaults to \code{\link{na.omit}}. } } % ---------------------------------------------------------------------------- \details{ The accuracy rates of classification are estimated by techniques such as \emph{Random Forest}, \emph{Support Vector Machine}, \emph{k-Nearest Neighbour Classification} and \emph{Linear Discriminant Analysis} based on resampling methods, including \emph{Leave-one-out cross-validation}, \emph{Cross-validation}, \emph{Bootstrap} and \emph{Randomised validation (holdout)}. } % ---------------------------------------------------------------------------- \note{ The \code{accest} can take any classification models if their argument format is \code{model(formula, data, subset, na.action, ...)} and their corresponding method \code{predict.model(object, newdata, ...)} can either return the only predicted class label or a list with a component called \code{class}, such as \code{lda} and \code{pcalda}. If classifier \code{method} provides posterior probabilities, the prediction margin \code{mar} will be generated, otherwise \code{NULL}. If classifier \code{method} provides posterior probabilities and the classification is for two-class problem, \code{auc} will be generated, otherwise \code{NULL}. \code{aam.cl} is a wrapper function of \code{accest}, returning accuracy rate, AUC and classification margin. \code{aam.mcl} accepts multiple classifiers in one running. } % ---------------------------------------------------------------------------- \value{ \code{accest} returns an object including the components: \item{method}{Classification method used.} \item{acc}{Overall accuracy rate.} \item{acc.iter}{Average accuracy rate for each iteration.} \item{acc.all}{Accuracy rate for each iteration and replication.} \item{auc}{Overall area under receiver operating curve (AUC).} \item{auc.iter}{Average AUC for each iteration.} \item{auc.all}{AUC for each iteration and replication.} \item{mar}{Overall prediction margin.} \item{mar.iter}{Average prediction margin for each iteration.} \item{mar.all}{Prediction margin for each iteration and replication.} \item{err}{Overall error rate.} \item{err.iter}{Average error rate for each iteration.} \item{err.all}{Error rate for each iteration and replication.} \item{sampling}{Sampling scheme used.} \item{niter}{Number of iteration.} \item{nreps}{Number of replications in each iteration if resampling is not \code{loocv}. } \item{conf}{Overall confusion matrix.} \item{res.all}{All results which can be further processed.} \item{acc.boot}{ A list of bootstrap accuracy such as \code{.632} and \code{.632+} if the resampling method is bootstrap. } \code{aam.cl} returns a vector with \code{acc} (accuracy), \code{auc}(area under ROC curve) and \code{mar}(class margin). \code{aam.mcl} returns a matrix with columns of \code{acc} (accuracy), \code{auc}(area under ROC curve) and \code{mar}(class margin). } % ------------------------------------------------------------------------ \author{ <NAME> } \seealso{ \code{\link{binest}}, \code{\link{maccest}}, \code{\link{valipars}}, \code{\link{trainind}}, \code{\link{classifier}} } % ------------------------------------------------------------------------ \examples{ # Iris data data(iris) # Use KNN classifier and bootstrap for resampling acc <- accest(Species~., data = iris, method = "knn", pars = valipars(sampling = "boot",niter = 2, nreps=5)) acc summary(acc) acc$acc.boot # alternatively the traditional interface: x <- subset(iris, select = -Species) y <- iris$Species ## ----------------------------------------------------------------------- # Random Forest with 5-fold stratified cv pars <- valipars(sampling = "cv",niter = 4, nreps=5, strat=TRUE) tr.idx <- trainind(y,pars=pars) acc1 <- accest(x, y, method = "randomForest", pars = pars, tr.idx=tr.idx) acc1 summary(acc1) # plot the accuracy in each iteration plot(acc1) ## ----------------------------------------------------------------------- # Forensic Glass data in chap.12 of MASS data(fgl, package = "MASS") # in MASS package # Randomised validation (holdout) of SVM for fgl data acc2 <- accest(type~., data = fgl, method = "svm", cost = 100, gamma = 1, pars = valipars(sampling = "rand",niter = 10, nreps=4,div = 2/3) ) acc2 summary(acc2) # plot the accuracy in each iteration plot(acc2) ## ----------------------------------------------------------------------- ## Examples of amm.cl and aam.mcl aam.1 <- aam.cl(x,y,method="svm",pars=pars) aam.2 <- aam.mcl(x,y,method=c("svm","randomForest"),pars=pars) ## If use two classes, AUC will be calculated idx <- (y == "setosa") aam.3 <- aam.cl(x[!idx,],factor(y[!idx]),method="svm",pars=pars) aam.4 <- aam.mcl(x[!idx,],factor(y[!idx]),method=c("svm","randomForest"),pars=pars) } \keyword{classif} <file_sep>/NEWS.md # mt 2.0-1.1: - `pca.outlier`: add prefix to ellipse to avoid conflict with package car. # mt 2.0-1.2: - `corrgram.circle` and `corrgram.ellipse`: move 'scales' into argument list. # mt 2.0-1.3: - `cl.auc`: adjust auc: `if (auc < 0.5) auc <- 1 - auc` # mt 2.0-1.4: (14-05-2012) - `accest`: Fix bugs and add more output. # mt 2.0-1.5: (16-05-2012) - `cl.roc`: change output and fix bugs. # mt 2.0-1.6: (19-05-2012) - `cl.roc`: add output of positive level. - `cl.perf`: add confusion matrix, Kappa and positive outputs. # mt 2.0-1.7: (21-05-2012) - `plsc`: minor changes for returned component pls.out so that R2 can be applied. - `plslda`: minor changes for returned component pls.out so that R2 can be applied. # mt 2.0-1.8: (22-05-2012) - `classifier`: bugs fix # mt 2.0-1.9: (22-05-2013) - `df.summ`: add dots argument and modify an user-defined function. # mt 2.0-1.10: (09-01-2014) - `stats.vec` and `stats.mat`: summarising two groups stats, such as p-values. - `fs.cl.2`, `perf` and `perf.aam`: internal functions (21-01-2014). # mt 2.0-1.11: (22-01-2014) - re-write `shrink.list` - add `proprec.auto` as hidden function and tidy up # mt 2.0-1.12: (07-08-2014) - fix bug in `pval.reject`: add na.rm = TRUE - change in `.grpplot`: remove xlab and ylab inside. # mt 2.0-1.13: (10-06-2015) - two functions `mdsplot` and `mds.plot.wrap` for MDS plotting. # mt 2.0-1.14: (16-07-2015) - Re-write two functions for ellipse: `panel.elli` and `panel.elli.1` - Provide `panel.outl` for labelling data points outside ellipse. They can be considered as outliers. # mt 2.0-1.15: (19-11-2015) - `.path.package` is defunct so use `path.package` instead in `csv2xls`. - Use `pls:::coef.mvr` replace `coef.mvr` since it hides in the new version of 'pls'. # mt 2.0-1.16: (30-11-2015) - Add log2 fold-change for `stats.mat` and `stats.vec`. - Add adjusted p-values for `stats.mat` only. - Provide documents for `pca.outlier` and `pca.outlier.1`. The former is lattice version and the later is the basic graphics version. # mt 2.0-1.17: (26-01-2016) - Remove returned values of "direction" in `stats.vec` and `stats.mat`. So the results are numeric, not character. # mt 2.0-1.18: (06-11-2021) - Replace `melt` and `ddply` with base R functions. So no need to load two packages `reshape` and `plyr`. - Move out some un-documented functions. - Change CHANGELOG as NEWS.md - Move packages from Depends to Imports in DESCRIPTION - Spell check on R scripts using RStudio # mt 2.0-1.19: (31-01-2022) - no changes but test against new version of randomForest 4.7.1 as requested.<file_sep>/man/mc.anova.Rd % lwc-30-01-2007: \name{mc.anova} \alias{mc.anova} \title{ Multiple Comparison by 'ANOVA' and Pairwise Comparison by 'HSDTukey Test' } \description{ Performs multiple comparison by \code{ANOVA} and pairwise comparison by \code{HSDTukey Test}. } \usage{ mc.anova(x, \dots) } % -------------------------------------------------------------------- \arguments{ \item{x}{ A matrix or data frame to be tested. } \item{\dots}{Additional arguments pass to \code{anova} or \code{HSDTukey test}.} } % ---------------------------------------------------------------------- \value{ A list with components: \item{anova}{Hypothesis test results of \code{anova}.} \item{tukey}{Hypothesis test results of \code{HSDTukey.test}.} \item{gl.pval}{Global or overall p value returned by \code{anova}.} \item{mc.pval}{Pairwise p value returned by \code{HSDTukey.test}.} } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{maccest}}, \code{\link{mc.fried}} } % ---------------------------------------------------------------------- \examples{ # Iris data data(iris) x <- subset(iris, select = -Species) y <- iris$Species method <- c("randomForest","svm","pcalda","knn") pars <- valipars(sampling="boot", niter = 10, nreps=4) res <- maccest(x, y, method=method, pars=pars, comp="anova") res htest <- mc.anova(res$acc.iter) oldpar <- par(mar = c(5,10,4,2) + 0.1) plot(htest$tukey,las=1) ## plot the tukey results par(oldpar) } \keyword{classif} <file_sep>/R/mt_util.R #' ======================================================================== #' lwc-29-03-2013: PCA outlier plot by lattice #' wll-29-11-2015: Examples of 'panel.elli' and 'panel.outl' give more #' general information about ellipses and outliers. If you *only* want to #' plot outliers based on PCA in a general way, for example outliers in #' different groups or in conditional panel, you can write an wrapper #' function combining with 'pca.comp', 'panel.elli' and 'panel.oult'. The #' example is 'pca.plot.wrap'. pca.outlier <- function(x, center = TRUE, scale = TRUE, conf.level = 0.975, ...) { #' lwc-29-03-2013: Lattice panel for plotting outliers with ellipse #' lwc-03-04-2013: To avoid error: formal argument "***" matched by #' multiple actual arguments, use: ..., type,col, lty, lwd. #' wll-29-11-2015: More general panel function for outlier 'panel.outl' panel.outlier <- function(x, y, groups = NULL, elli, labs, id, ..., type, col, lty, lwd) { #' dots <- list(...) if (is.null(groups)) { panel.xyplot(x, y, ...) } else { panel.superpose(x, y, groups, ...) } panel.abline(h = 0, v = 0, col = c("gray"), lty = 2) #' overall ellipse line panel.points(elli[, 1], elli[, 2], type = "l", col = "red", lwd = 2, ...) #' labelling outliers if (any(id)) { ltext(x[id], y[id], labs[id], ...) #' cex = dots$cex, adj = dots$adj) } } #' argument list dots <- list(...) if (length(dots) > 0) { args <- dots } else { args <- list() } #' calculate PCA pcs <- 1:2 #' only use PC1 and PC2 pca <- prcomp(x, center = center, scale. = scale) #' strip off dot arguments vars <- pca$sdev^2 vars <- vars / sum(vars) #' Proportion of Variance names(vars) <- colnames(pca$rotation) vars <- round(vars * 100, 2) dfn <- paste(names(vars), " (", vars[names(vars)], "%)", sep = "") x <- data.frame(pca$x) names(x) <- dfn x <- x[, pcs] #' outlier detection by Mahalanobis distances cent <- colMeans(x) cova <- cov(x) dist <- sqrt(mahalanobis(x, center = cent, cov = cova)) cuto <- sqrt(qchisq(conf.level, ncol(x))) id <- dist > cuto #' get ellipse point elli <- ellipse(var(x), centre = cent, level = conf.level) #' handle args labs <- rownames(x) args <- c(list(x = x[, 2] ~ x[, 1], data = x), args) if (is.null(args$xlab)) args$xlab <- names(x)[1] if (is.null(args$ylab)) args$ylab <- names(x)[2] if (F) { xlim <- c(min(x[, 1], elli[, 1]), max(x[, 1], elli[, 1])) ylim <- c(min(x[, 2], elli[, 2]), max(x[, 2], elli[, 2])) if (is.null(args$xlim)) args$xlim <- xlim if (is.null(args$ylim)) args$ylim <- ylim } args <- c(args, panel = panel.outlier) #' arguments for panel.outlier args$elli <- elli args$labs <- labs args$id <- id p <- do.call("xyplot", args) ret <- list( plot = p, outlier = which(id), conf.level = conf.level, mah.dist = dist, cutoff = cuto ) return(ret) } #' ======================================================================== #' lwc-03-06-2010: PCA plot with outlier detection #' lwc-01-09-2010: Add group info. #' To-Do: #' 1.) Display group text inside the ellipse pca.outlier.1 <- function(x, center = TRUE, scale = TRUE, conf.level = 0.975, group = NULL, main = "PCA", cex = 0.7, ...) { #' calculate PCA pcs <- 1:2 #' only use PC1 and PC2 pca <- prcomp(x, center = center, scale. = scale) #' strip off dot arguments vars <- pca$sdev^2 vars <- vars / sum(vars) #' Proportion of Variance names(vars) <- colnames(pca$rotation) vars <- round(vars * 100, 2) dfn <- paste(names(vars), " (", vars[names(vars)], "%)", sep = "") x <- data.frame(pca$x) names(x) <- dfn x <- x[, pcs] #' outlier detection by Mahalanobis distances cent <- colMeans(x) cova <- cov(x) dis <- sqrt(mahalanobis(x, center = cent, cov = cova)) cutoff <- sqrt(qchisq(conf.level, ncol(x))) outlier <- which(dis > cutoff) #' Plot PCA with ellipse and outliers z <- ellipse(x = cova, center = cent, level = conf.level) x1 <- c(min(x[, 1], z[, 1]), max(x[, 1], z[, 1])) y1 <- c(min(x[, 2], z[, 2]), max(x[, 2], z[, 2])) if (is.null(group)) { plot(x, xlim = x1, ylim = y1, main = main, ...) } else { col <- unclass(group) pch <- unclass(group) plot(x, xlim = x1, ylim = y1, main = main, col = col, pch = pch, ...) legend("topright", legend = sort(unique(levels(group))), cex = cex, col = sort(as.vector(unique(col))), pch = sort(as.vector(unique(pch))) ) } lines(z, type = "l", col = "red") #' plot ellipse #' plot the outliers if (length(outlier) > 0) { xrange <- par("usr") xrange <- xrange[2] - xrange[1] #' control offset of text position #' display names txt <- names(dis[outlier]) if (is.null(txt)) txt <- outlier text(x[outlier, 1], x[outlier, 2] + xrange / 50, txt, col = "blue", cex = cex ) } ret <- list( outlier = outlier, conf.level = conf.level, mah.dist = dis, cutoff = cutoff ) return(ret) } #' ======================================================================= #' lwc-13-11-2007: Plot columns of matrix-like object by group #' lwc-18-12-2007: Major changes. Call .grpplot. #' lwc-28-09-2008: Add ocall. For details, see plot.shingle in lattice #' lwc-11-02-2010: Change the point of median in boxplot as line #' lwc-21-02-2010: change name from gplot to grpplot. #' lwc-15-07-2015: remove 'ep' #' Note: Some examples of auto.key: #' auto.key=list(columns=nlevels(x.1$.y)), #' auto.key = list(space = "right"), #' Usages #' data(iris) #' x <- iris[,1:4] #' y <- iris[,5] #' grpplot(x[,1:2],y, scale=T, pcs=c(2,1),ep=2) #' grpplot(x,y, scale=T, pcs=c(2,1),ep=1) grpplot <- function(x, y, plot = "pairs", ...) { ocall <- sys.call(sys.parent()) ocall[[1]] <- quote(grpplot) x <- as.data.frame(x) #' lwc-19-12-07: data.frame(x) will change names of columns in some #' situations. y <- factor(y) plot <- match.arg(plot, c("strip", "box", "density", "pairs")) #' reshape data set for "strip", "boxplot" and "density". x.1 <- stack(x) x.1$ind <- factor(x.1$ind, levels = unique.default(x.1$ind)) #' lwc-21-11-2007: Original ind of stack is a sorted-level factor. Lattice #' will use this factor level to arrange panel order. To be consist with #' feature rank descent order, the factor levels are ordered by the #' feature rank from to to bottom. Therefore, no sort used inside factor #' function. x.1$.y <- rep(y, ncol(x)) grpplot <- switch(tolower(plot), strip = stripplot(values ~ .y | ind, data = x.1, as.table = T, , ... ), box = bwplot(values ~ .y | ind, data = x.1, as.table = T, pch = "|", ... ), density = densityplot(~ values | ind, data = x.1, groups = x.1$.y, plot.points = F, #' kern = "rect", as.table = T, ... ), pairs = .grpplot(x, y, ...) ) grpplot$call <- ocall return(grpplot) } #' ======================================================================= #' lwc-13-12-2007: Scatter plot by group #' lwc-17-12-2007: deal with 1 column problem. This idea is from #' ldahist #' lwc-09-01-2008: argument of default of auto.key. #' lwc-12-01-2008-note: #' Colours in supervose.symbol re-cycle after 7 by default. I did not #' change the default number inside the function by: par.settings = #' list(superpose.symbol=list(pch=1:nlevels(y), col=1:nlevels(y))), #' For convenient, user can change the color scheme outside .grpplot, such as #' superpose.symbol <- trellis.par.get("superpose.symbol") #' superpose.symbol$col <- rainbow(16) #' trellis.par.set("superpose.symbol",superpose.symbol) #' Then call .grpplot. A list of colour's names is produced by colors(). #' lwc-17-01-2008: pch range from 1 to 25. So I recycle the symbols if number #' of symbol exceed the limits. #' TO-DO: 1.) Check validity of arguments #' 2.) Better way to process ep(Currently, ep will be 0,1 and 2). #' 3.) panel.xyplot doesn't know anything about xlab, ylab, etc., and #' you can specify cex as part of the top level call. (claimed by #' <NAME>, author of package lattice). So #' trellis.par.get(), trellis.par.set() or par.settings will be #' help for global or local parameters setting. #' lwc-15-07-2015: remove 'ep' and call panel.elli.1 .grpplot <- function(x, y, auto.key = list(space = "right"), par.settings = list(superpose.symbol = list(pch = rep(1:25))), xlab, ylab, ...) { ocall <- sys.call(sys.parent()) ocall[[1]] <- quote(.grpplot) if (!is.matrix(x) && !is.data.frame(x)) { x <- as.matrix(x) } y <- factor(y) if (ncol(x) == 2) { if (missing(xlab)) xlab <- names(x)[2] if (missing(ylab)) ylab <- names(x)[1] p <- xyplot(x[, 1] ~ x[, 2], groups = y, as.table = T, #' xlab=names(x)[2], ylab=names(x)[1], #' lwc-07-08-14 xlab = xlab, ylab = ylab, auto.key = auto.key, par.settings = par.settings, #' par.settings = #' list(superpose.symbol=list(pch = rep(1:25, len = nlevels(y)))), scales = list(cex = 0.8), #' for axis font panel = function(x, y, ...) { panel.xyplot(x, y, ...) panel.elli.1(x, y, ...) #' panel.outl(x,y, ...) }, ... ) } else if (ncol(x) > 2) { p <- splom(~x, groups = y, as.table = T, xlab = "", auto.key = auto.key, par.settings = par.settings, #' par.settings = #' list(superpose.symbol=list(pch = rep(1:25, len = nlevels(y))), #' axis.text=list(cex=0.7)), #' varname.cex = 1.0, cex=0.6, #' pscales = 0, panel = function(x, y, ...) { panel.xyplot(x, y, ...) panel.elli.1(x, y, ...) #' panel.outl(x,y, ...) }, ... ) } else { p <- stripplot(x[, 1] ~ y, groups = y, as.table = T, ylab = colnames(x)[1], ...) #' p <- stripplot(x ~ y, groups=y, as.table=T, ylab= "", ...) #' p <- densityplot(~ x, groups = y, as.table=T, #' auto.key = auto.key, #' par.settings = list(superpose.line = list(lty=c(1:7))), #' plot.points = FALSE, ref = TRUE,...) if (F) { p <- histogram(~ x | y, xlab = "", type = "density", layout = c(1, nlevels(y)), panel = function(x, ...) { panel.histogram(x, ...) panel.mathdensity( dmath = dnorm, col = "black", args = list(mean = mean(x), sd = sd(x)) ) }, ... ) } } p$call <- ocall p } #' ======================================================================== #' lwc-12-13-2007: plot PCA using lattice package #' lwc-15-07-2015: remove 'ep' #' TO-DO: 1). check validity of PCs used for plotting. #' Usage: #' data(iris) #' x <- iris[,1:4] #' y <- iris[,5] #' pcaplot(x,y, scale=T, pcs=c(2,1),ep=2) pcaplot <- function(x, y, scale = TRUE, pcs = 1:2, ...) { #' pca <- prcomp(x, scale.=scale, ...) pca <- prcomp(x, scale. = scale) vars <- pca$sdev^2 vars <- vars / sum(vars) #' Proportion of Variance names(vars) <- colnames(pca$rotation) vars <- round(vars * 100, 2) dfn <- paste(names(vars), " (", vars[names(vars)], "%)", sep = "") x <- data.frame(pca$x) names(x) <- dfn x <- x[, pcs] p <- grpplot(x, y, plot = "pairs", ...) p } #' ======================================================================= #' lwc-15-07-2015: ellipse panel function which support individual and #' combined group plotting. It is the extension of panel.elli. #' Usage: Under ep=2, there are three options to plot ellipse. #' com.grp: control which combination of groups to be plotted. #' no.grp: control which individual group not to be plotted. Note #' it will be overridden by com.grp. #' If no com.grp and no.grp, the each individual group ellipse should #' be plotted. panel.elli.1 <- function(x, y, subscripts, groups = NULL, conf.level = 0.975, ep = 0, com.grp = NULL, no.grp = NULL, ell.grp = NULL, ...) { plot.elli <- function(x, y, ...) { #' plot ellipse Var <- var(cbind(x, y)) Mean <- cbind(mean(x), mean(y)) Elli <- ellipse(Var, centre = Mean, level = conf.level) #' panel.xyplot(x, y,...) #' panel.xyplot(Elli[,1], Elli[,2],...) panel.points(Elli[, 1], Elli[, 2], ...) } #' lwc-14-07-2015: do NOT plot x and y inside the panel function if (FALSE) { if (!is.null(groups)) { panel.superpose(x, y, subscripts, groups, ...) } else { panel.xyplot(x, y, ...) } panel.abline(h = 0, v = 0, col = c("gray"), lty = 2) } if (!is.null(ell.grp)) { #' ellipse based on other group info grp <- ell.grp[subscripts] tmp <- data.frame(x = x, y = y, grp = grp) #' wl-05-11-2021, Fri: use base R 'by' by(tmp, tmp$grp, function(x) { plot.elli(x$x, x$y, ..., type = "l", lty = 2, col = "cyan") }) ## plyr::ddply(tmp, .(grp), function(x) { ## plot.elli(x$x, x$y, ..., type = "l", lty = 2, col = "cyan") ## }) } else if (ep == 1) { #' over-line ellipse plot.elli(x, y, type = "l", col = "red", ...) #' lwd=2 #' ellipse based on groups, individual or combination. } else if (ep == 2) { #' plot group ellipse if (!is.null(com.grp)) { #' plot combined groups grp <- groups[subscripts] for (i in names(com.grp)) { id <- grp %in% com.grp[[i]] plot.elli(x[id], y[id], ..., type = "l", col = "gray") } } else if (!is.null(no.grp)) { #' plot remained groups grp <- groups[subscripts] for (i in levels(grp)) { id <- i == grp if (!(i %in% no.grp)) { plot.elli(x[id], y[id], ..., type = "l", col = "gray") } } } else { #' plot all groups panel.superpose(x, y, subscripts, groups, ..., panel.groups = plot.elli, type = "l", lty = 2 ) } } } #' ======================================================================= #' lwc-02-04-2013: Panel function for plotting ellipse used by lattice. #' Note: For details, see panel.ellipse of package latticeExtra panel.elli <- function(x, y, groups = NULL, conf.level = 0.975, ...) { if (!is.null(groups)) { panel.superpose( x = x, y = y, groups = groups, conf.level = conf.level, panel.groups = panel.elli, ... ) } else { Var <- var(cbind(x, y)) Mean <- cbind(mean(x), mean(y)) Elli <- ellipse(Var, centre = Mean, level = conf.level) panel.xyplot(Elli[, 1], Elli[, 2], ...) } } #' ======================================================================= #' lwc-02-04-2013: Panel function for plotting outliers with ellipse in #' lattice #' To-Do: How to keep colour of text as consistent with groups? panel.outl <- function(x, y, subscripts, groups = NULL, conf.level = 0.975, labs, ...) { if (!is.null(groups)) { panel.superpose( x = x, y = y, groups = groups, subscripts = subscripts, conf.level = conf.level, labs = labs, panel.groups = panel.outl, ... ) } else { #' outlier detection by Mahalanobis distances mat <- cbind(x, y) #' row.names(mat) <- labs[subscripts] cent <- colMeans(mat) cova <- cov(mat) dist <- sqrt(mahalanobis(mat, center = cent, cov = cova)) cuto <- sqrt(qchisq(conf.level, ncol(mat))) id <- dist > cuto if (any(id)) { panel.text(x[id], y[id], labs[subscripts][id], ...) #' ltext(x[id], y[id], labs[subscripts][id],...) #' from lattice: panel.text <- function(...) ltext(...) } } } #' ======================================================================= #' lwc-30-07-2013: group stats of column of a matrix/data frame including #' fold changes, auc and p-values. #' lwc-08-01-2014: tidy up and minor changes. #' wll-24-11-2015: add the adjusted p-values. Beware that 'stats.vec' has no #' such thing. stats.mat <- function(x, y, method = "mean", test.method = "wilcox.test", padj.method = "fdr", fc = TRUE, ...) { #' function for calculation based on column vector. x <- as.data.frame(x, stringsAsFactors = F) res <- t(sapply(x, function(i) stats.vec(i, y, method, test.method, fc, ...))) res <- as.data.frame(res, stringsAsFactors = FALSE) #' get adjusted p-values padj <- round(p.adjust(res$pval, method = padj.method), digits = 4) res <- cbind(res, padj) #' or res <- data.frame(res, padj) return(res) } #' ======================================================================= #' lwc-30-07-2013: group stats for vector #' lwc-09-01-2014: lack of error handling, such as limits of 'method' and #' two groups. #' wll-11-08-2014: add overall mean #' wll-24-11-2015: add adjusted p-values #' wll-01-12-2015: add an argument for fold-change. fc is only for positive #' values of 'x'. If not, the results are useless. #' wll-26-01-2016: drop off change direction so the results are numeric, not #' the character. Note that the fold change indicates the changing #' direction. stats.vec <- function(x, y, method = "mean", test.method = "wilcox.test", fc = TRUE, ...) { #' overall mean omn <- do.call(method, list(x, na.rm = TRUE)) names(omn) <- method #' group mean gmn <- tapply(x, y, method, na.rm = TRUE) names(gmn) <- paste(names(gmn), method, sep = ".") auc <- round(cl.auc(x, y), digits = 2) p.val <- round(.pval(x, y, test.method = test.method, ...), digits = 4) #' p.val <- wilcox.test(x ~ y,correct = FALSE)$"p.value" if (F) { direc <- if (gmn[1] > gmn[2]) { "Down" } else if (gmn[1] < gmn[2]) { "Up" } else { "No change" } } if (fc) { fc <- round(.foldchange(gmn[2], gmn[1]), digits = 2) #' lwc-23-08-2013: gmn[1] is baseline names(fc) <- NULL log2.fc <- round(.foldchange2logratio(fc), digits = 2) res <- c(omn, gmn, #' direction=direc, fold.change = fc, log2.fold.change = log2.fc, auc = auc, pval = p.val ) } else { res <- c(omn, gmn, #' direction=direc, auc = auc, pval = p.val ) } return(res) } #' ======================================================================= #' lwc-30-07-2013: wrapper functions for p-values from test .pval <- function(x, y, test.method = "oneway.test", ...) { test.method <- if (is.function(test.method)) { test.method } else if (is.character(test.method)) { get(test.method) } else { eval(test.method) } pval <- test.method(x ~ y, ...)$p.value return(pval) } #' ======================================================================= #' lwc-16-07-2013: Fold change from gtools #' Usage: #' a <- 1:21 #' b <- 21:1 #' f <- .foldchange(a, b) #' cbind(a, b, f) .foldchange <- function(num, denom) { ifelse(num >= denom, num / denom, -denom / num) } .foldchange2logratio <- function(foldchange, base = 2) { retval <- ifelse(foldchange < 0, 1 / -foldchange, foldchange) retval <- log(retval, base) retval } .logratio2foldchange <- function(logratio, base = 2) { retval <- base^ (logratio) retval <- ifelse(retval < 1, -1 / retval, retval) retval } #' ======================================================================== #' lwc-24-08-2011: Summary function for data vector. #' lwc-26-08-2011: add error checking (All NAs) #' Usage: #' x <- iris[,1] #' vec.summ.1(x) vec.summ.1 <- function(x) { if (sum(!is.na(x)) < 2) { #' if (all(is.na(x))) { mean <- median <- sd <- iqr <- CI.L <- CI.H <- NA } else { mean <- mean(x, na.rm = T) median <- median(x, na.rm = T) sd <- sd(x, na.rm = T) iqr <- IQR(x, na.rm = T) conf <- t.test(x)$conf.int CI.L <- conf[1] CI.H <- conf[2] } res <- c( N = sum(!is.na(x)), Mean = mean, Median = median, "95% CI.l" = CI.L, "95% CI.u" = CI.H, IQR = iqr, Std = sd ) #' res <- format(res,digits=3) res <- round(res, digits = 3) return(res) } #' ======================================================================= #' lwc-03-03-2010: Summary function for vector data #' lwc-11-11-2011: Change Nval as N. vec.summ <- function(x) { res <- c( N = sum(!is.na(x)), Min = min(x, na.rm = T), Mean = mean(x, na.rm = T), Median = median(x, na.rm = T), Max = max(x, na.rm = T), Std = sd(x, na.rm = T) ) res <- round(res, digits = 3) return(res) } #' ======================================================================= #' lwc-03-03-2010: Summary function for data frame/matrix by column. #' lwc-24-08-2011: Summary function for data matrix (wrapper function of #' vec.summ). #' lwc-22-05-2013: add dots for method's arguments. #' Usage: #' data(abr1) #' dat <- (abr1$pos)[,110:150] #' dat <- mv.zene(dat) #' summ <- df.summ(dat, method=vec.summ) #' summ.1 <- df.summ(dat, method=vec.summ.1) df.summ <- function(dat, method = vec.summ, ...) { method <- if (is.function(method)) { method } else if (is.character(method)) { get(method) } else { eval(method) } dat <- as.data.frame(dat, stringsAsFactors = F) #' only numeric, not categorical data dat <- Filter(is.numeric, dat) #' lwc-11-10-2011: dat must be data frame here. res <- t(sapply(dat, function(i) method(i, ...))) res <- as.data.frame(res, stringsAsFactors = FALSE) #' res <- cbind(Variable=rownames(res),res) #' Do we need to add one column? return(res) } #' ========================================================================= #' lwc-11-10-2011: replace zero/negative with NA. mv.zene <- function(dat) { vec.func <- function(x) { x <- ifelse(x < .Machine$double.eps, NA, x) } dat <- as.data.frame(dat, stringsAsFactors = F) res <- sapply(dat, function(i) vec.func(i)) return(res) } #' ======================================================================== #' lwc-23-04-2010: Fill the zero/NA values by the mean of vector. #' lwc-15-06-2011: minor changes. #' lwc-22-06-2011: Replace ifelse(x < .Machine$double.eps, m, x) with #' ifelse(x < .Machine$double.eps, NA, x) and change its line position. #' wl-27-11-2021, Sat: bug. if dat have minus values, do not use 'ze_ne = T' #' Usage #' data(abr1) #' dat <- abr1$pos[,1970:1980] #' dat.1 <- mv.fill(dat,method="mean",ze_ne = TRUE) mv.fill <- function(dat, method = "mean", ze_ne = FALSE) { method <- if (is.function(method)) { method } else if (is.character(method)) { get(method) } else { eval(method) } vec.func <- function(x) { if (ze_ne) { x <- ifelse(x < .Machine$double.eps, NA, x) #' vectorisation of ifelse } m <- method(x, na.rm = TRUE) x[is.na(x)] <- m #' 10-10-2011: more general for multiple filing points x #' x <- ifelse(is.na(x), m, x) #' for missing values #' x <- ifelse(is.nan(x), m, x) #' for missing values } dat <- as.data.frame(dat, stringsAsFactors = F) res <- sapply(dat, function(i) vec.func(i)) return(res) } #' ======================================================================= #' lwc-07-09-2010: Test codes for missing values processing #' wll-11-12-2007: Statistics and plot for missing values #' lwc-03-03-2010: Get number of missing values by column. #' lwc-15-06-2011: re-write #' lwc-10-10-2011: major change. remove stats based on row. #' Usage: #' data(metaboliteData, package="pcaMethods") #' dat <- t(metaboliteData) #' colnames(dat) <- paste("V", 1:ncol(dat), sep="") #' cls <- rownames(dat) #' cls <- sub("[.].*", "", cls) #' cls <- factor(cls) #' tmp <- mv.stats(dat, grp=cls) #' tmp <- mv.fill(dat, method = "median") #' tmp <- mt:::mv.pattern(dat) mv.stats <- function(dat, grp = NULL, ...) { #' overall missing values rate mv.all <- sum(is.na(as.matrix(dat))) / length(as.matrix(dat)) #' MV stats function for vector vec.func <- function(x) round(sum(is.na(x) | is.nan(x)) / length(x), digits = 3) #' vec.func <- function(x) sum(is.na(x)|is.nan(x)) #' number of MV #' sum(is.na(x)|is.nan(x)|(x==0)) #' get number of Na, NaN and zero in each of feature/variable #' mv.rep <- apply(dat, 1, vec.func) mv.var <- apply(dat, 2, vec.func) ret <- list(mv.overall = mv.all, mv.var = mv.var) if (!is.null(grp)) { #' MV rate with respect of variables and class info mv.grp <- sapply(levels(grp), function(y) { idx <- (grp == y) mat <- dat[idx, ] mv <- apply(mat, 2, vec.func) }) #' lwc-10-10-2011: Use aggregate. Beware that values pased in the #' function is vector(columns). #' mv.grp <- aggregate(dat, list(cls), vec.func) #' rownames(mv.grp) <- mv.grp[,1] #' mv.grp <- mv.grp[,-1] #' mv.grp <- as.data.frame(t(mv.grp),stringsAsFactors=F) #' reshape matrix for lattice mv.grp.1 <- data.frame(mv.grp) mv.grp.1$all <- mv.var #' Combine all var <- rep(1:nrow(mv.grp.1), ncol(mv.grp.1)) mv.grp.1 <- stack(mv.grp.1) mv.grp.1$ind <- factor(mv.grp.1$ind, levels = unique.default(mv.grp.1$ind) ) mv.grp.1$var <- var mv.grp.plot <- xyplot(values ~ var | ind, data = mv.grp.1, groups = mv.grp.1$ind, as.table = T, layout = c(1, nlevels(mv.grp.1$ind)), type = "l", auto.key = list(space = "right"), #' main="Missing Values Percentage With Respect of Variables", xlab = "Index of variables", ylab = "Percentage of missing values", ... ) ret$mv.grp <- mv.grp ret$mv.grp.plot <- mv.grp.plot } ret } #' ======================================================================== #' wll-05-12-2007: Calculate the pattern of missing values. #' Value: #' A matrix with (nrow(x)+1, ncol(x)+1) dimension. Except the last row and #' column, each row corresponds to a missing data pattern #' (1=observed, 0=missing). The row names shows the number of pattern. #' The last row contains the number of missing values #' with respect to each column and the last column represent the counts of #' each row. #' See Also: #' md.pattern in package mice and prelim.norm in package norm. #' NOTE: 1.The motivation of the function is that <NAME> mentioned that #' that prelim.norm can only encode NA-patterns in an R integer for up #' to 31 columns. More than that, and it will not work properly or at #' all. (http://article.gmane.org/gmane.comp.lang.r.general/55185). #' Function md.pattern has also this problem since it modified from #' prelim.norm. 2. The function is not sorted at current stage. #' Usage: #' library(mice) #' data(nhanes) #' md.pattern(nhanes) #' from mice #' mv.pattern(nhanes) mv.pattern <- function(x) { "%all.==%" <- function(a, b) apply(b, 2, function(x) apply(t(a) == x, 2, all)) if (!(is.matrix(x) | is.data.frame(x))) { stop("Data should be a matrix or data frame") } #' get the pattern of missing values mat <- 1 * !is.na(x) pattern <- unique(mat) counts <- colSums(mat %all.==% t(unique(mat))) rownames(pattern) <- counts #' number of missing values with respect to column (variable) nmis <- apply(1 * is.na(x), 2, sum) #' number of missing values in the pattern pmis <- ncol(pattern) - apply(pattern, 1, sum) pattern <- rbind(pattern, c(nmis)) #' a trick to take off the row name pattern <- cbind(pattern, c(pmis, sum(nmis))) pattern } #' ========================================================================= #' lwc-24-11-2010: Get heatmap colours #' Note: compare this: #' col.regions = colorRampPalette(c("green", "black", "red")) #' in lattice levelplot hm.cols <- function(low = "green", high = "red", n = 123) { low <- col2rgb(low) / 255 if (is.character(high)) { high <- col2rgb(high) / 255 } col <- rgb( seq(low[1], high[1], len = n), seq(low[2], high[2], len = n), seq(low[3], high[3], len = n) ) return(col) } #' ========================================================================= #' wll-23-10-2008: Wrapper function for plotting PCA. The first two PCs are #' fixed in this routine. #' wll-12-01-2008: add dot arguments for lattice plot arguments #' Arguments: #' data.list - A two-layer list structure, in which the second layer #' include a data frame and a factor of class label. It should #' be noted the names of the first layer of data.list must be #' given. #' title - A part of title string for plotting pca.plot.wrap <- function(data.list, title = "plotting", ...) { if (is.null(names(data.list))) { names(data.list) <- paste(deparse(substitute(data.list)), 1:length(data.list), sep = ":") } dn <- names(data.list) pca <- lapply(dn, function(x) { pca <- pca.comp(data.list[[x]]$dat, scale = F, pcs = 1:2) scores <- cbind(pca$scores, cls = data.list[[x]]$cls, type = x) list(scores = scores, vars = pca$vars) }) names(pca) <- dn pca.scores <- do.call(rbind, lapply(pca, function(x) x$scores)) pca.vars <- do.call(rbind, lapply(pca, function(x) x$vars)) pca.p <- xyplot(PC2 ~ PC1 | type, data = pca.scores, groups = pca.scores$cls, as.table = T, xlab = "PC1", ylab = "PC2", main = paste(title, ": PCA", sep = ""), auto.key = list(space = "right"), par.settings = list(superpose.symbol = list(pch = rep(1:25))), #' par.settings = list(superpose.symbol=list(pch=rep(1:25), #' col=c("black","brown3"))), #' lwc-15-12-2010: check R_colour_card #' scales = "free", panel = function(x, y, ...) { panel.xyplot(x, y, ...) panel.elli.1(x, y, ...) #' panel.outl(x,y, ...) #' wll-29-11-15: need to provide 'labs' }, ... ) #' plot the PCA proportion of variance (#' reverse pca.vars for dotplot) pca.p.1 <- dotplot(pca.vars[nrow(pca.vars):1, , drop = F], groups = T, as.table = T, auto.key = list(space = "right"), par.settings = list(superpose.symbol = list(pch = rep(1:25))), xlab = "Percentage", main = paste(title, ": PCA proportion of variance", sep = ""), ... ) list(pca.p = pca.p, pca.p.1 = pca.p.1, pca.vars = pca.vars) } #' ========================================================================= #' wll-01-06-2015: Wrapper function for plotting MDS. Only the first two #' dimensions are plotted. mds.plot.wrap <- function(data.list, method = "euclidean", title = "plotting", ...) { if (is.null(names(data.list))) { names(data.list) <- paste(deparse(substitute(data.list)), 1:length(data.list), sep = ":") } dn <- names(data.list) #' MDS METHODS <- c( "euclidean", "maximum", "manhattan", "canberra", "binary", "minkowski" ) meth <- pmatch(method, METHODS) mds <- lapply(dn, function(x) { dis <- dist(data.list[[x]]$dat, method = METHODS[meth]) mds <- cmdscale(dis) #' only consider 2 dimension mds <- as.data.frame(mds) names(mds) <- c("Coord_1", "Coord_2") mds <- cbind(mds, cls = data.list[[x]]$cls, type = x) }) names(mds) <- dn mds <- do.call(rbind, lapply(mds, function(x) x)) #' MDS plot mds.p <- xyplot(Coord_2 ~ Coord_1 | type, data = mds, groups = mds$cls, as.table = T, xlab = "Coordinate 1", ylab = "Coordinate 2", main = paste(title, ": MDS Plot", sep = ""), auto.key = list(space = "right"), par.settings = list(superpose.symbol = list(pch = rep(1:25))), panel = function(x, y, ...) { panel.xyplot(x, y, ...) panel.elli.1(x, y, ...) #' panel.outl(x,y, ...)#' wll-29-11-15: need to provide 'labs' }, ... ) } #' ========================================================================= #' wll-23-10-2008: Wrapper function for plotting PCALDA #' lwc-11-02-2010: replace nlda with pcalda and change correspondingly, e.g. #' DF to LD. #' lwc-19-10-2010: handle with 2-class and more than 3-class problem. For #' 2-class, DF2 is a dummy variable, identical to LD1 for #' general plotting reason. #' Arguments: #' data.list - A two-layer list structure, in which the second layer #' include a data frame and a factor of class label. It should #' be noted the names of the first layer of data.list must be #' given. #' title - A part of title string for plotting lda.plot.wrap <- function(data.list, title = "plotting", ...) { if (is.null(names(data.list))) { names(data.list) <- paste(deparse(substitute(data.list)), 1:length(data.list), sep = ":") } dn <- names(data.list) lda <- lapply(dn, function(x) { #' x=dn[1] res <- pcalda(data.list[[x]]$dat, data.list[[x]]$cls) dfs <- as.data.frame(res$x) eig <- res$lda.out$svd if (ncol(dfs) > 2) { dfs <- dfs[, 1:2, drop = F] eig <- eig[1:2] } if (ncol(dfs) == 1) { dfs$LD2 <- dfs$LD1 eig <- c(eig, eig) } dfs <- cbind(dfs, cls = data.list[[x]]$cls, type = x) names(eig) <- c("LD1", "LD2") list(dfs = dfs, eig = eig) }) names(lda) <- dn lda.dfs <- do.call(rbind, lapply(lda, function(x) x$dfs)) lda.eig <- do.call(rbind, lapply(lda, function(x) x$eig)) lda.p <- xyplot(LD2 ~ LD1 | type, data = lda.dfs, groups = lda.dfs$cls, as.table = T, xlab = "DF1", ylab = "DF2", main = paste(title, ": LDA", sep = ""), #' auto.key = list(columns=nlevels(cl)), auto.key = list(space = "right"), par.settings = list(superpose.symbol = list(pch = rep(1:25))), #' scales = "free", panel = function(x, y, ...) { panel.xyplot(x, y, ...) panel.elli.1(x, y, ...) #' panel.outl(x,y, ...)#' wll-29-11-15: need to provide 'labs' }, ... ) #' plot LDA eigenvales lda.p.1 <- dotplot(lda.eig[nrow(lda.eig):1, , drop = F], groups = T, as.table = T, auto.key = list(space = "right"), par.settings = list(superpose.symbol = list(pch = rep(1:25))), xlab = "Eigenvalues", main = paste(title, ": LDA Eigenvalues", sep = ""), ... ) list(lda.p = lda.p, lda.p.1 = lda.p.1, lda.eig = lda.eig) } #' ========================================================================= #' wll-23-10-2008: Wrapper function for plotting PCALDA #' Note: Will plot 2-class problem differently with stripplot. lda.plot.wrap.1 <- function(data.list, title = "plotting", ...) { if (is.null(names(data.list))) { names(data.list) <- paste(deparse(substitute(data.list)), 1:length(data.list), sep = ":") } dn <- names(data.list) lda <- lapply(dn, function(x) { res <- pcalda(data.list[[x]]$dat, data.list[[x]]$cls) dfs <- as.data.frame(res$x) dfs <- cbind(dfs, cls = data.list[[x]]$cls, type = rep(x, nrow(data.list[[x]]$dat)) ) #' list(dfs=dfs, eig=res$Tw) eig <- res$lda.out$svd names(eig) <- colnames(res$x) list(dfs = dfs, eig = eig) }) names(lda) <- dn lda.dfs <- do.call(rbind, lapply(lda, function(x) x$dfs)) lda.eig <- do.call(rbind, lapply(lda, function(x) x$eig)) if (length(grep("LD2", colnames(lda.dfs))) > 0) { lda.p <- xyplot(LD2 ~ LD1 | type, data = lda.dfs, groups = lda.dfs$cls, as.table = T, xlab = "LD1", ylab = "LD2", main = paste(title, ": LDA", sep = ""), #' auto.key = list(columns=nlevels(cl)), auto.key = list(space = "right"), par.settings = list(superpose.symbol = list(pch = rep(1:25))), #' scales = "free", panel = function(x, y, ...) { panel.xyplot(x, y, ...) panel.elli.1(x, y, ...) #' panel.outl(x,y, ...) }, ... ) } else { lda.p <- stripplot(LD1 ~ cls | type, data = lda.dfs, as.table = T, groups = lda.dfs$cls, auto.key = list(space = "right"), par.settings = list(superpose.symbol = list(pch = rep(1:25))), #' scales = "free", main = paste(title, ": LDA", sep = ""), ... ) #' wll-07-05-2009: I have added the auto.key and par.settings #' only large number of sub-figures. Otherwise, this two #' line should be removed. } #' plot LDA eigenvales lda.p.1 <- dotplot(lda.eig[nrow(lda.eig):1, , drop = F], groups = T, as.table = T, auto.key = list(space = "right"), par.settings = list(superpose.symbol = list(pch = rep(1:25))), xlab = "Eigenvalues", main = paste(title, ": LDA Eigenvalues", sep = ""), ... ) list(lda.p = lda.p, lda.p.1 = lda.p.1, lda.eig = lda.eig) } #' ========================================================================= #' wll-23-10-2008: Wrapper function for plotting PLSDA. Only the first two #' components are plotted. #' Note: Use plsc instead of plslda. You can call it PLSDA if PLS is #' employed for discrimination. #' Arguments: #' data.list - A two-layer list structure, in which the second layer #' include a data frame and a factor of class label. It should #' be noted the names of the first layer of data.list must be #' given. #' title - A part of title string for plotting pls.plot.wrap <- function(data.list, title = "plotting", ...) { if (is.null(names(data.list))) { names(data.list) <- paste(deparse(substitute(data.list)), 1:length(data.list), sep = ":" ) } dn <- names(data.list) pls <- lapply(dn, function(x) { res <- plsc(data.list[[x]]$dat, data.list[[x]]$cls) scores <- as.data.frame(res$x)[, 1:2] #' The first two components scores <- cbind(scores, cls = data.list[[x]]$cls, type = rep(x, nrow(data.list[[x]]$dat)) ) vars <- round((res$pls.out$Xvar / res$pls.out$Xtotvar) * 100, 2)[1:2] names(vars) <- c("LC1", "LC2") list(scores = scores, vars = vars) }) names(pls) <- dn pls.scores <- do.call(rbind, lapply(pls, function(x) x$scores)) pls.vars <- do.call(rbind, lapply(pls, function(x) x$vars)) pls.p <- xyplot(LC2 ~ LC1 | type, data = pls.scores, groups = pls.scores$cls, as.table = T, xlab = "LC1", ylab = "LC2", main = paste(title, ": PLS", sep = ""), auto.key = list(space = "right"), par.settings = list(superpose.symbol = list(pch = rep(1:25))), #' scales = "free", panel = function(x, y, ...) { panel.xyplot(x, y, ...) panel.elli.1(x, y, ...) #' panel.outl(x,y, ...) }, ... ) #' plot PLS proportion of variance pls.p.1 <- dotplot(pls.vars[nrow(pls.vars):1, , drop = F], groups = T, as.table = T, auto.key = list(space = "right"), par.settings = list(superpose.symbol = list(pch = rep(1:25))), xlab = "Percentage", main = paste(title, ": PLS proportion of variance", sep = ""), ... ) list(pls.p = pls.p, pls.p.1 = pls.p.1, pls.vars = pls.vars) } #' ======================================================================= #' wll-01-10-2009: Misc function for splitting PCA/LDA/PLS plot. #' Note: 1.) To deal with data.frame and matrix in the same way, should use #' a.) colnames instead of names; #' b.) index should be tmp[,x] instead of tmp[x] or tmp[[x]]. #' c.) keep [,,drop=F]. #' TO-DO: 1.) How to extract strip text? #' 2.) How to keep the legend being consistent with sub-figure's #' symbol /colour? It means how to remove the irrelevant #' symbol/colours in legend to keep consistent with #' sub-figure's. #' Note: Internal function. #' Usage: #' data(iris) #' x <- subset(iris, select = -Species) #' y <- iris$Species #' #' generate data list by dat.sel #' iris.pw <- dat.sel(x,y,choices=NULL) #' res <- pls.plot.wrap(iris.pw) #' ph <- plot.wrap.split(res[[1]], res[[3]], perc=F) #' win.metafile(filename = paste("pls_plot","%02d.emf",sep="_")) #' for(i in 1:length(ph)) plot(ph[[i]]) #' dev.off() plot.wrap.split <- function(plot.handle, plot.lab, perc = T) { n <- dim(plot.handle) pca.ph <- lapply(1:n, function(x) { ph <- plot.handle[x] xylab <- round(plot.lab[x, , drop = F], digits = 2) xylab <- lapply(colnames(xylab), function(y) { if (perc) { paste(y, " (", xylab[, y], "%)", sep = "") } else { paste(y, " (", xylab[, y], ")", sep = "") } }) ph$ylab <- xylab[[1]] if (length(xylab) > 1) ph$xlab <- xylab[[2]] ph }) } #' ======================================================================== #' lwc-09-06-2015: plot MDS using lattice package #' Usage: #' data(iris) #' x <- iris[,1:4] #' y <- iris[,5] #' mdsplot(x,y, dimen = c(1,2),ep = 2) #' mdsplot(x,y, dimen = c(2,1),ep = 1) mdsplot <- function(x, y, method = "euclidean", dimen = c(1, 2), ...) { METHODS <- c( "euclidean", "maximum", "manhattan", "canberra", "binary", "minkowski" ) meth <- pmatch(method, METHODS) dis <- dist(x, method = METHODS[meth]) #' mds <- cmdscale(dis) #' only consider 2 dimension #' mds <- as.data.frame(mds) mds <- cmdscale(dis, k = 2, eig = TRUE) #' Classical MDS #' mds <- isoMDS(dis, k=2) #' Non-metric MDS mds <- as.data.frame(mds$points) names(mds) <- c("Coordinate 1", "Coordinate 2") #' want to change order? mds <- mds[, dimen] #' call group plot p <- grpplot(mds, y, plot = "pairs", ...) p } #' ======================================================================== #' lwc-29-05-2008: Another version of PCA plot with proportion indication. #' Use text indicate different groups. #' lwc-13-09-2010: major changes and add ellipse plot pca.plot <- function(x, y, scale = TRUE, abbrev = FALSE, ep.plot = FALSE, ...) { #' lwc-12-12-2008: Plot ellipse elli.plot <- function(x, y, ...) { Var <- var(cbind(x, y)) Mean <- cbind(mean(x), mean(y)) Elli <- ellipse(Var, centre = Mean, level = 0.975) lines(Elli[, 1], Elli[, 2], ...) } x <- as.matrix(x) y <- factor(y) pca <- pca.comp(x, scale = scale, pcs = 1:2, ...) val <- pca$scores val <- val[c("PC2", "PC1")] if (abbrev) levels(y) <- abbreviate(levels(y), abbrev) plot(val, type = "n", cex = 1.0, cex.lab = 1.0, cex.axis = 1.0, cex.main = 1.0, ylab = paste("PC1", " (", pca$vars[1], "%)", sep = ""), xlab = paste("PC2", " (", pca$vars[2], "%)", sep = ""), ... ) text(val[, 1], val[, 2], as.character(y), cex = 0.7, col = unclass(y), ...) if (ep.plot) { tmp <- as.factor(as.numeric(unclass(y))) for (i in levels(tmp)) { idx <- tmp == i elli.plot(val[idx, 1], val[idx, 2], col = i) } #' Note: I think that it is not a good way to keep the colors #' of ellipse consistent with group text colors. } invisible(NULL) } #' ======================================================================== #' wll-29-03-2008: Compute the PCA scores and proportion of variance #' TO-DO: 1.) check validity of argument pcs. pca.comp <- function(x, scale = FALSE, pcs = 1:2, ...) { pca <- prcomp(x, scale. = scale, ...) vars <- pca$sdev^2 #' i.e. eigenvalues/variance vars <- vars / sum(vars) #' Proportion of Variance names(vars) <- colnames(pca$rotation) vars <- round(vars * 100, 2) #' dfn <- paste(names(vars)," (",vars[names(vars)],"%)",sep="") dfn <- paste(names(vars), ": ", vars[names(vars)], "%", sep = "") x <- data.frame(pca$x) x <- x[, pcs] vars <- vars[pcs] dfn <- dfn[pcs] return(list(scores = x, vars = vars, varsn = dfn)) } #' ======================================================================== #' wll-17-11-2008: Get number of rejected hypotheses for several multiple #' testing procedures based on Type I error rates. #' WLL-21-07-2014: Add na.rm = TRUE in sum. #' Arguments: #' adjp - a matrix-like p-values of simultaneously testing #' alpha - a vector of cut-off of p-values or Type I error rate. #' Note: See mt.reject in package multest. pval.reject <- function(adjp, alpha) { adjp <- as.data.frame(adjp) tmp <- sapply(alpha, function(y) { p.num <- sapply(adjp, function(x) sum(x <= y, na.rm = TRUE)) }) colnames(tmp) <- alpha tmp <- t(tmp) return(tmp) } #' ======================================================================== #' lwc-20-01-2009: Calculate the p-values for columns of data matrix #' with respect to group information. Support multiple categorical data. #' lwc-16-06-2010: Provide argument method. Support user defined test method #' which has formula format and returned p.value. #' Arguments: #' x - data frame or matrix #' y - categorical data #' method - hypothesis test such as t.test and wilcox.test. pval.test <- function(x, y, method = "oneway.test", ...) { method <- if (is.function(method)) { method } else if (is.character(method)) { get(method) } else { eval(method) } pval <- sapply(as.data.frame(x), function(x) { method(x ~ y, ...)$p.value #' Normality test. Note that H0 is normal distribution! #' shapiro.test(x)$p.value #' t.test(x ~ y,var.equal=F)$p.value #' oneway.test(x ~ y,var.equal=F)$p.value }) #' return(list(pval=pval, method=method)) return(pval) } #' ======================================================================== #' lwc-23-03-2010: corrgram with ellipse #' lwc-27-04-2012: put scales in argument list #' Arguments: #' co - Correlation matrices #' lable - Logical value indicating whether the correlation coefficient (x #' 100) should be displayed. #' \references{ #' <NAME> (2002). #' \emph{Corrgrams: Exploratory displays for correlation matrices}. #' The American Statistician, 56, 316--324. #' <NAME>, <NAME> (1996). #' \emph{A graphical display of large correlation matrices}. #' The American Statistician, 50, 178--180. #' } #' Usages: #' tmp <- iris[,1:4] #' co <- cor(tmp) #' corrgram.ellipse(co,label=T) #' corrgram.circle(co) corrgram.ellipse <- function(co, label = FALSE, col.regions = colorRampPalette(c("red", "white", "blue")), scales = list(x = list(rot = 90)), ...) { ph <- levelplot(co, xlab = NULL, ylab = NULL, at = do.breaks(c(-1.01, 1.01), 20), colorkey = list(space = "right"), #' col.regions = heat.colors,#terrain.colors,#cm.colors, #' col.regions = colorRampPalette(c("yellow", "red")), col.regions = col.regions, scales = scales, panel = panel.corrgram.ell, label = label, ... ) return(ph) } #' ======================================================================== #' lwc-23-03-2010: corrgram with circle/pie #' lwc-27-04-2012: put scales in argument list #' Arguments: #' co - Correlation matrices corrgram.circle <- function(co, col.regions = colorRampPalette(c("red", "white", "blue")), scales = list(x = list(rot = 90)), ...) { ph <- levelplot(co, xlab = NULL, ylab = NULL, colorkey = list(space = "right"), at = do.breaks(c(-1.01, 1.01), 101), col.regions = col.regions, scales = scales, panel = panel.corrgram.cir, ... ) return(ph) } #' ======================================================================== #' lwc-23-03-2010: Panel function for ellipse corrgram. #' From Lattice book chapter 13. Internal function. panel.corrgram.ell <- function(x, y, z, subscripts, at, level = 0.9, label = FALSE, ...) { #' require("ellipse", quietly = TRUE) x <- as.numeric(x)[subscripts] y <- as.numeric(y)[subscripts] z <- as.numeric(z)[subscripts] zcol <- level.colors(z, at = at, ...) for (i in seq(along = z)) { ell <- ellipse(z[i], level = level, npoints = 50, scale = c(.2, .2), centre = c(x[i], y[i]) ) panel.polygon(ell, col = zcol[i], border = zcol[i], ...) } if (label) { panel.text( x = x, y = y, lab = 100 * round(z, 2), cex = 0.8, col = ifelse(z < 0, "white", "black") ) } } #' ========================================================================= #' lwc-23-03-2010:Panel function for partially filled circles corrgram. #' From Lattice book chapter 13. Internal function. panel.corrgram.cir <- function(x, y, z, subscripts, at = pretty(z), scale = 0.8, ...) { x <- as.numeric(x)[subscripts] y <- as.numeric(y)[subscripts] z <- as.numeric(z)[subscripts] zcol <- level.colors(z, at = at, ...) for (i in seq(along = z)) { lims <- range(0, z[i]) tval <- 2 * base::pi * seq(from = lims[1], to = lims[2], by = 0.01) grid.polygon( x = x[i] + .5 * scale * c(0, sin(tval)), y = y[i] + .5 * scale * c(0, cos(tval)), default.units = "native", gp = gpar(fill = zcol[i]) ) grid.circle( x = x[i], y = y[i], r = .5 * scale, default.units = "native" ) } } #' ======================================================================== #' lwc-15-04-2010: pairwise combination of categorical data set dat.sel <- function(dat, cls, choices = NULL) { #' get the index of pairwise combination idx <- combn.pw(cls, choices = choices) #' construct data set consisting of data matrix and its class info dat.pair <- lapply(idx, function(x) { cls.pw <- factor(cls[x]) #' force drop factor levels dat.pw <- dat[x, , drop = F] list(dat = dat.pw, cls = cls.pw) }) return(dat.pair) } #' ======================================================================== #' lwc-13-04-2010: Index of pairwise combination for categorical vectors. combn.pw <- function(cls, choices = NULL) { .combn.pw <- function(choices, lev) { choices <- if (is.null(choices)) lev else unique(choices) pm <- pmatch(choices, lev) if (any(is.na(pm))) { stop("'choices' should be one of ", paste(lev, collapse = ", ")) } #' Get the binary combinations using combn (core package utils) if (length(choices) == 1) { if (F) { #' simple implementation lev.1 <- setdiff(lev, choices) com <- cbind(choices, lev.1) dimnames(com) <- NULL } else { #' Keep comparable with dat.sel.1 com <- t(combn(lev, 2)) idx <- sapply(1:nrow(com), function(x) { if (match(choices, com[x, ], nomatch = 0) > 0) { return(T) } else { (F) } }) com <- com[idx, , drop = F] #' lwc-01-12-2009: fix a bug } } else { com <- t(combn(choices, 2)) } return(com) } cls <- as.factor(cls) lev <- levels(cls) if (is.list(choices)) { com <- lapply(choices, function(x) .combn.pw(x, lev)) com <- do.call("rbind", com) com <- unique(com) } else { com <- .combn.pw(choices, lev) } idx <- apply(com, 1, function(x) { ind <- cls %in% x #' ind <- which(ind) #' comment: don't use this otherwise you have to switch #' data frame to list. }) colnames(idx) <- apply(com, 1, paste, collapse = "~") idx <- as.data.frame(idx) #' for easy manipulation. return(idx) } #' ======================================================================== #' lwc-13-08-2006: Generates the pairwise data set based on the class label. #' History: #' 18-09-2006: Fix a bug. #' 31-05-2007: Major changes #' 01-12-2009: fix a bug when cl is two-class. #' 21-02-2010: Change name from dat.set to .dat.sel and treat as internal #' function #' NOTE: Using drop=F to keep the format of matrix even the matrix has one #' element. .dat.sel <- function(dat, cl, choices = NULL) { #' lwc-29-10-2006: combinations is from package gtools. #' $Id: mt_util_1.r,v 1.16 2009/07/27 10:23:41 wll Exp $ #' From email by <NAME> <<EMAIL>> to r-help #' dated Tue, 14 Dec 1999 11:14:04 +0000 (GMT) in response to #' <NAME> <<EMAIL>>. Original version was #' named "subsets" and was Written by <NAME>. combinations <- function(n, r, v = 1:n, set = TRUE, repeats.allowed = FALSE) { if (mode(n) != "numeric" || length(n) != 1 || n < 1 || (n %% 1) != 0) { stop("bad value of n") } if (mode(r) != "numeric" || length(r) != 1 || r < 1 || (r %% 1) != 0) { stop("bad value of r") } if (!is.atomic(v) || length(v) < n) { stop("v is either non-atomic or too short") } if ((r > n) & repeats.allowed == FALSE) { stop("r > n and repeats.allowed=FALSE") } if (set) { v <- unique(sort(v)) if (length(v) < n) stop("too few different elements") } v0 <- vector(mode(v), 0) #' Inner workhorse if (repeats.allowed) { sub <- function(n, r, v) { if (r == 0) { v0 } else if (r == 1) { matrix(v, n, 1) } else if (n == 1) { matrix(v, 1, r) } else { rbind(cbind(v[1], Recall(n, r - 1, v)), Recall(n - 1, r, v[-1])) } } } else { sub <- function(n, r, v) { if (r == 0) { v0 } else if (r == 1) { matrix(v, n, 1) } else if (r == n) { matrix(v, 1, n) } else { rbind(cbind(v[1], Recall(n - 1, r - 1, v[-1])), Recall(n - 1, r, v[-1])) } } } sub(n, r, v[1:n]) } func <- function(choices) { if (is.null(choices)) { choices <- g } else { choices <- unique(choices) } i <- pmatch(choices, g) if (any(is.na(i))) { stop("'choices' should be one of ", paste(g, collapse = ", ")) } #' Get the binary combinations based on the class labels (package GTOOLS) if (length(choices) == 1) { com <- combinations(length(g), 2, v = g) idx <- sapply(1:nrow(com), function(x) { if (match(choices, com[x, ], nomatch = 0) > 0) { return(T) } else { (F) } }) com <- com[idx, , drop = F] #' lwc-01-12-2009: fix a bug } else { com <- combinations(length(choices), 2, v = choices) } return(com) } if (missing(dat) || missing(cl)) { stop(" The data set and/or class label are missing!") } cl <- as.factor(cl) g <- levels(cl) if (is.list(choices)) { com <- lapply(choices, function(x) func(x)) com <- do.call("rbind", com) com <- unique(com) } else { com <- func(choices) } #' process the data set labels being selected dat.sub <- list() cl.sub <- list() for (i in (1:nrow(com))) { idx <- (cl == com[i, ][1]) | (cl == com[i, ][2]) cl.sub[[i]] <- cl[idx] cl.sub[[i]] <- cl.sub[[i]][, drop = T] #' drop the levels dat.sub[[i]] <- dat[idx, , drop = F] } #' get comparison names com.names <- apply(com, 1, paste, collapse = "~") names(dat.sub) <- names(cl.sub) <- com.names return(list(dat = dat.sub, cl = cl.sub, com = com)) } #' ======================================================================== #' lwc-28-07-2009: panel function for plotting regression line with red #' color. #' lwc-29-03-2010: add dots arguments for panel.xyplot. panel.smooth.line <- function(x, y, ...) { panel.grid(h = -1, v = -1) panel.xyplot(x, y, ...) #' panel.xyplot(x, y, type="p") if (sd(y) > 0.001) { # .Machine$double.eps) panel.loess(x, y, span = 1, col = "red") } else { panel.lmline(x, y, col = "red") } } #' ======================================================================== #' lwc-04-12-2006: Pre-process Data Set #' lwc-27-03-2007: support multiple methods #' lwc-27-06-2007: 'rescaler' function in package 'reshape' provides a R #' standard to deal with vector, matrix and data.frame using S3 method. #' Also another version of range method. Need to check source code to hack #' something. preproc <- function(x, y = NULL, method = "log", add = 1) { #' TIC normalisation TICnorm <- function(x, y = NULL) { scale <- apply(x, 1, function(x) sum(x, na.rm = T)) if (!is.null(y)) { grpm <- as.vector(by(scale, y, mean)) grpm <- grpm - mean(scale) for (k in 1:nlevels(y)) { scale[y == levels(y)[k]] <- scale[y == levels(y)[k]] - grpm[k] } } x <- sweep(x, 1, scale, "/") } me <- function(x) mean(x, na.rm = T) se <- function(x) sd(x, na.rm = T) mx <- function(x) max(x, na.rm = T) mn <- function(x) min(x, na.rm = T) #' sm <- function(x) sum(x,na.rm=T) if (!is.matrix(x) && !is.data.frame(x)) { stop("x must be a matrix or data frame.") } x <- as.data.frame(x) if (!is.null(y)) { y <- as.factor(y) } for (i in method) { i <- match.arg(i, c( "center", "auto", "range", "pareto", "vast", "level", "log", "log10", "sqrt", "asinh", "TICnorm" )) x <- switch(i, #' by colmns "center" = sapply(x, function(x) (x - me(x))), "auto" = sapply(x, function(x) (x - me(x)) / se(x)), "range" = sapply(x, function(x) (x - me(x)) / (mx(x) - mn(x))), "pareto" = sapply(x, function(x) (x - me(x)) / sqrt(se(x))), "vast" = sapply(x, function(x) (x - me(x)) * me(x) / se(x)^2), "level" = sapply(x, function(x) (x - me(x)) / me(x)), #' by all "log" = log(x + add), "log10" = log10(x + add), "sqrt" = sqrt(x), "asinh" = asinh(x), #' by row "TICnorm" = TICnorm(x, y) ) } rownames(x) <- 1:nrow(x) return(x) } #' ========================================================================= #' Remove variables which has (near) zero S.D with/without respect to class. #' lwc-18-01-2007: For more details, ?.Machine #' lwc-15-03-2008: Fix a bug #' lwc-01-03-2010: add na.rm preproc.sd <- function(x, y = NULL, na.rm = FALSE) { if (is.null(y)) { #' take off the columns with the same values. id <- which(apply(x, 2, sd, na.rm = na.rm) > .Machine$double.eps) x <- x[, id] return(x) } else { y <- factor(y) #' group s.d. with respect to features z <- sapply(data.frame(x), function(i) tapply(i, y, sd, na.rm = na.rm)) #' minimal s.d. z.1 <- sapply(data.frame(z), function(i) min(i)) #' which one is zero within group? if (any(z.1 <= .Machine$double.eps)) { z.2 <- which(z.1 <= .Machine$double.eps) x <- x[, -z.2, drop = F] } return(x) } } #' ========================================================================= #' Remove variables appear to be constant within groups/class #' lwc-24-01-2007: The function is hacked from lda.default in MASS package #' lwc-25-01-2007: Fix a bug preproc.const <- function(x, y, tol = 1.0e-4) { if (is.null(dim(x))) stop("'x' is not a matrix or data frame") x <- as.matrix(x) #' lwc-04-03-2008: must be matrix n <- nrow(x) p <- ncol(x) if (n != length(y)) { stop("nrow(x) and length(y) are different") } g <- as.factor(y) group.means <- tapply(x, list(rep(g, p), col(x)), mean) f1 <- sqrt(diag(var(x - group.means[g, ]))) #' which one is constant within group? if (any(f1 < tol)) { const <- (1:p)[f1 < tol] x <- x[, -const, drop = F] } x } #' ======================================================================== #' lwc-19-06-2008: Correlation analysis of data set and extract the #' pairs with correlation coefficient larger than cutoff #' lwc-21-10-2010: Minor modify in Manchester: 1.) add abs.f=FALSE; 2). Add #' dot arguments for passing additional info for function #' cor. #' lwc-23-06-2015: fix a minor bug #' Arguments: #' mat - A matrix-like data set #' cutoff - A scalar value of threshold #' Returns: #' A data frame with three columns, in which the first and second columns #' are variable names and their correlation (lager than cutoff) are #' given in the third column. cor.cut <- function(mat, cutoff = 0.75, abs.f = FALSE, use = "pairwise.complete.obs", method = "pearson", ...) { #' co <- cor(mat,use=use, method=method) co <- cor(x = mat, use = use, method = method, ...) #' added on 23-06-2015 co[upper.tri(co)] <- NA diag(co) <- NA co <- co[-1, -ncol(co), drop = F] #' extract items above the cutoff value if (abs.f) { idx <- which(abs(co) >= cutoff, arr.ind = T) } else { idx <- which(co >= cutoff, arr.ind = T) } if (length(idx) != 0) { #' tow-columns correlation fs1 <- rownames(co)[idx[, 1]] fs2 <- colnames(co)[idx[, 2]] res <- data.frame( com1 = fs1, com2 = fs2, cor = co[idx], stringsAsFactors = FALSE ) } else { res <- NA } res } #' ======================================================================== #' lwc-16-04-2008: Hierarchical cluster analysis based on correlation #' analysis. #' lwc-19-05-2008: Fix a tiny bug #' lwc-21-05-2008: Check extreme situation #' lwc-14-10-2009: Change name from my.fs.cor to fs.cor.bas #' lwc-17-02-2010: change name from fs.cor.bas to cor.hcl. #' lwc-18-02-2010: add use and method for function cor #' LWC-13-02-2012: plot cluster use plot method for dendrogram instead of #' for hclust. #' Arguments: #' mat - A matrix-like data set #' cutoff - A vector of cutoff (should be in increasing-order) #' fig.f - A logical value for plotting clustering #' Returns: #' A list including all clustering information. cor.hcl <- function(mat, cutoff = 0.75, use = "pairwise.complete.obs", method = "pearson", fig.f = TRUE, hang = -1, horiz = FALSE, main = "Cluster Dendrogram", ylab = ifelse(!horiz, "1 - correlation", ""), xlab = ifelse(horiz, "1 - correlation", ""), ...) { co <- cor(mat, use = use, method = method) res <- list() if (ncol(co) <= 1) { #' if number of FS is less than 2, no clustering. res$all <- co } else { hc <- hclust(as.dist(1 - co)) if (fig.f && ncol(co) > 2) { #' 14-10-09: change & to && #' plot(hc, hang=-1,sub="", ylab="1 - correlation", xlab="Variables", #' cex=0.6,...) #' lwc-13-02-2012: Not plot hc directly. den.hc <- as.dendrogram(hc, hang = hang) plot(den.hc, main = main, ylab = ylab, xlab = xlab, horiz = horiz, ...) if (horiz) { abline(v = 1 - cutoff, col = "red") } else { abline(h = 1 - cutoff, col = "red") } } id <- cutree(hc, h = 1 - cutoff) res <- lapply(unique(id), function(x) { cent <- mat[, id == x, drop = FALSE] res <- if (ncol(cent) < 2) NA else cor(cent, use = use, method = method) }) #' names(res) <- paste("Cluster",unique(id), sep="_") #' shrink the list id <- sapply(res, function(x) { if (!any(is.na(x))) TRUE else FALSE }) if (all(id == FALSE)) { #' lwc-21-05-2008: Dont't use if (!all(id)) !!! res$all <- co } else { res <- res[id] names(res) <- paste("Cluster", 1:length(res), sep = "_") res$all <- co } } return(res) } #' ======================================================================== #' lwc-18-02-2010: Correlation heatmap using lattice #' lwc-17-03-2010: add dendrogram. cor.heat <- function(mat, use = "pairwise.complete.obs", method = "pearson", dend = c("right", "top", "none"), ...) { dend <- match.arg(dend) co <- cor(mat, use = use, method = method) #' Prepare for dendrogram dd <- as.dendrogram(hclust(as.dist(1 - co))) #' for correlation only ord <- order.dendrogram(dd) co.p <- switch(dend, right = levelplot(co[ord, ord], aspect = "fill", scales = list(x = list(rot = 90), cex = 0.6), colorkey = list(space = "bottom"), legend = list(right = list( fun = dendrogramGrob, args = list( x = dd, ord = ord, side = "right", size = 6 ) )), ... ), top = levelplot(co[ord, ord], aspect = "fill", scales = list(x = list(rot = 90), cex = 0.6), colorkey = list(space = "bottom"), legend = list(top = list( fun = dendrogramGrob, args = list( x = dd, ord = ord, side = "top", size = 6 ) )), ... ), none = levelplot(co[ord, ord], #' still want to order them by HCL. aspect = "fill", scales = list(x = list(rot = 90), cex = 0.6), colorkey = list(space = "bottom"), ... ) ) #' Fix-Me: Is there any efficient and simple way to do switch inside #' levelplot? return(co.p) #' must use return for lattice object. } #' ======================================================================== #' lwc-09-03-2010: Correlation analysis between two data sets #' lwc-14-09-2010: convert into function from scratch. #' lwc-05-04-2011: Since the correlation matrix here is not squared, its #' order methods are limited. If the similarity matrix is squared, the #' functions for ordering objects using hierarchical clustering in package #' gclus can be used. These functions are order.single, order.endlink and #' order.hclust. #' Usages #' x1 <-rnorm(20,40,1) #' x2 <-rnorm(20,40,2.5) #' df1 <-data.frame(x1,x2) #' y1 <-rnorm(20,1,0.47) #' y2 <-rnorm(20,1,0.59) #' y3 <-rnorm(20,1,0.75) #' df2 <-data.frame(y1,y2,y3) #' cor(df1, df2) #' cor.heat.gram(df1, df2) cor.heat.gram <- function(mat.1, mat.2, use = "pairwise.complete.obs", method = "pearson", main = "Heatmap of correlation", cex = 0.75, ...) { co <- cor(mat.1, mat.2, use = use, method = method) co <- co[complete.cases(co), ] if (F) { ph <- levelplot(co, scales = list(x = list(rot = 90), cex = cex), xlab = "", ylab = "", main = main, ... ) #' heatmap.2(co, Rowv=T, Colv=T, col=rev(heat.colors(16)), #' #' distfun=function(x) as.dist(1 - x), #' trace="none", dendrogram = "both", density.info="none") } #' The heatmap need to be ordered by some rules so we can easily spot some #' patterns. row.dd <- as.dendrogram(hclust(dist(co))) #' not as.dist since co is not squared. row.ord <- order.dendrogram(row.dd) col.dd <- as.dendrogram(hclust(dist(t(co)))) col.ord <- order.dendrogram(col.dd) ph <- levelplot(co[row.ord, col.ord], aspect = "fill", scales = list(x = list(rot = 60), cex = cex), xlab = "", ylab = "", main = main, #' main=list(paste("Heatmap of correlation between data - ",des, sep=""), #' cex=cex), #' wll-10-09-2015: Use panel.fill() to fill the background with #' your 'NA' color.From <NAME> panel = function(...) { panel.fill(col = "black") panel.levelplot(...) }, colorkey = list(space = "bottom"), #' colorkey = list(space = "bottom", labels=list(cex=cex)), legend = list( right = list( fun = dendrogramGrob, args = list( x = col.dd, ord = col.ord, side = "right", size = 10 ) ), top = list( fun = dendrogramGrob, args = list( x = row.dd, ord = row.ord, side = "top", type = "rectangle", size = 5 ) ) ), ... ) ph ph.1 <- corrgram.circle(co[row.ord, col.ord], main = main, ...) ph.1 <- update(ph.1, scales = list(x = list(rot = 60), cex = cex)) ph.1 #' convert short format to long format co.1 <- my_melt(co) #' co.1 <- reshape::melt(co) co.1 <- co.1[complete.cases(co.1), ] #' 17-03-2010: in case NA #' co.max <- max(co.1[,3], na.rm=T) #' co.thre <- co.1[co.1[,3] >= 0.4,] #' lwc-09-03-2010: Very specific #' lwc-23-06-2015: If co is a symmetric matrix, if (F) { co.1 <- co co.1[upper.tri(co.1)] <- NA diag(co.1) <- NA co.1 <- co.1[-1, -ncol(co.1), drop = F] co.1 <- my_melt(co.1) #' co.1 <- reshape::melt(co.1) co.1 <- co.1[complete.cases(co.1), ] } res <- list(cor.heat = ph, cor.gram = ph.1, cor.short = co, cor.long = co.1) return(res) } #' ======================================================================== #' wl-05-11-2021, Fri: Convert matrix into long format #' Internal format. It is used to replace reshape::melt(x). my_melt <- function(x) { res <- matrix(x, dimnames = list(t(outer(colnames(x), rownames(x), FUN = paste)), NULL)) res <- as.data.frame(res) rn <- rownames(res) rn <- do.call("rbind", sapply(rn, strsplit, " ")) res <- cbind(rn, res) dimnames(res) <- list(1:nrow(res), c("X2", "X1", "value")) res <- res[c("X1", "X2", "value")] return(res) } #' ======================================================================= #' lwc-26-04-2008: save a list into a table file. save.tab <- function(x, filename = "temp.csv", firstline = "\n") { # options(warn = -1) #' disable warning message write(firstline, file = filename) if (is.list(x) && !is.data.frame(x)) { #' lwc-18-02-2010: fix for (i in names(x)) { write(paste("\n", i, sep = ""), file = filename, sep = ",", append = T) write.table(x[[i]], file = filename, append = T, sep = ",", na = "", quote = F, row.names = T, col.names = NA ) } } else { write(paste("\n", sep = ""), file = filename, sep = ",", append = T) write.table(x, file = filename, append = T, sep = ",", na = "", quote = F, row.names = T, col.names = NA ) } # options(warn = 0) #' restore to default value invisible() } #' ======================================================================= #' lwc-13-12-2008: Convert a list with components of vector to a data frame #' for writing into an Excel file. Shorter vector will be filled with NA. list2df <- function(x) { len <- max(sapply(x, length)) df <- sapply(x, function(y) c(y, rep(NA, len - length(y)))) #' lwc-26-06-2008: bug fix. Convert into matrix if fs.order is a vector. if (is.vector(df)) df <- t(df) return(df) } #' ======================================================================== #' lwc-26-04-2008: my version of unlist, which collapse the higher-depths #' list to 1-depth list. This function uses recursive programming skill to #' tackle any depths of list. un.list <- function(x, y = "") { res <- list() for (i in names(x)) { id <- if (y == "") i else paste(y, i, sep = "_") if (is.list(x[[i]]) && !is.data.frame(x[[i]])) { #' Since data frame has also property of list tmp <- un.list(x[[i]], y = id) res <- c(res, tmp) } else { res[[id]] <- x[[i]] } } res } #' ======================================================================== #' lwc-16-09-2010: Remove all NULL or NA entries from a list. #' Hacked from function compact of package plyr. #' wll-15-09-2015: Has some problem in new version of R. shrink.list <- function(x) { tmp <- Filter(Negate(is.null), x) tmp <- Filter(Negate(is.na), tmp) #' Note-16-09-2010: Get a warning if swapping the above two lines. return(tmp) } #' ======================================================================== #' Generates Class Indicator Matrix from a Factor. #' A matrix which is zero except for the column corresponding to the class. #' Internal function. From package NNET class.ind <- function(cl) { n <- length(cl) cl <- as.factor(cl) x <- matrix(0, n, length(levels(cl))) x[(1:n) + n * (unclass(cl) - 1)] <- 1 dimnames(x) <- list(names(cl), levels(cl)) x } #' 1) pca.outlier #' 2) pca.outlier.1 #' 3) grpplot #' 4) .grpplot #' 5) pcaplot #' 6) panel.elli.1 #' 7) panel.elli #' 8) panel.outl #' 9) stats.mat #' 10) stats.vec #' 11) .pval #' 12) .foldchange #' 13) .foldchange2logratio #' 14) .logratio2foldchange #' 15) vec.summ.1 #' 16) vec.summ #' 17) df.summ #' 18) mv.zene #' 19) mv.fill #' 20) mv.stats #' 21) mv.pattern #' 22) hm.cols #' 23) pca.plot.wrap #' 24) mds.plot.wrap #' 25) lda.plot.wrap #' 26) lda.plot.wrap.1 #' 27) pls.plot.wrap #' 28) plot.wrap.split #' 29) mdsplot #' 30) pca.plot #' 31) pca.comp #' 32) pval.reject #' 33) pval.test #' 34) corrgram.ellipse #' 35) corrgram.circle #' 36) panel.corrgram.ell #' 37) panel.corrgram.cir #' 38) dat.sel #' 39) combn.pw #' 40) .dat.sel #' 41) panel.smooth.line #' 42) preproc #' 43) preproc.sd #' 44) preproc.const #' 45) cor.cut #' 46) cor.hcl #' 47) cor.heat #' 48) cor.heat.gram #' 49) my_melt #' 50) save.tab #' 51) list2df #' 52) un.list #' 53) shrink.list #' 54) class.ind <file_sep>/man/mc.norm.Rd % lwc-30-01-2007: \name{mc.norm} \alias{mc.norm} \title{ Normality Test by Shapiro-Wilk Test } \description{ Perform Shapiro-Wilk normality test by \code{shapiro.test} and plot the density function and boxplot. } \usage{ mc.norm(x, \dots) } % -------------------------------------------------------------------- \arguments{ \item{x}{ A matrix or data frame to be tested. } \item{\dots}{Additional arguments pass to \code{shapiro.test}.} } % ---------------------------------------------------------------------- \value{ Object of \code{shapiro.test}, boxplot and histogram. } % ---------------------------------------------------------------------------- \author{ <NAME> } \seealso{ \code{\link{maccest}}, \code{\link{mc.anova}} } % ---------------------------------------------------------------------- \examples{ data(iris) x <- subset(iris, select = -Species) y <- iris$Species method <- c("randomForest","svm","pcalda","knn") pars <- valipars(sampling="boot", niter = 10, nreps=10) res <- maccest(x, y, method=method, pars=pars, comp="anova") res res$acc.iter mc.norm(res$acc.iter) } \keyword{classif} <file_sep>/man/predict.pcalda.Rd % wll-02-07-2007: % \name{predict.pcalda} \alias{predict.pcalda} \title{ Predict Method for Class 'pcalda' } \description{ Prediction of test data using \code{pcalda}. } \usage{ \method{predict}{pcalda}(object, newdata,\dots) } % ---------------------------------------------------------------------------- \arguments{ \item{object}{ Object of class \code{pcalda}. } \item{newdata}{ A matrix or data frame of cases to be classified. } \item{\dots}{ Arguments based from or to other methods. } } % ---------------------------------------------------------------------------- \details{ This function is a method for the generic function \code{predict()} for class \code{pcalda}. If \code{newdata} is omitted, the results of training data in \code{pcalda} object will be returned. } % ---------------------------------------------------------------------------- \value{ A list with components: \item{class}{ The predicted class (a factor). } \item{posterior}{ The posterior probabilities for the predicted classes. } \item{x}{ The rotated test data by the projection matrix of LDA. } } % ---------------------------------------------------------------------------- \author{ <NAME> } % ---------------------------------------------------------------------------- \seealso{ \code{\link{pcalda}}, \code{\link{plot.pcalda}} } % ---------------------------------------------------------------------------- \examples{ data(iris3) tr <- sample(1:50, 25) train <- rbind(iris3[tr,,1], iris3[tr,,2], iris3[tr,,3]) test <- rbind(iris3[-tr,,1], iris3[-tr,,2], iris3[-tr,,3]) cl <- factor(c(rep("s",25), rep("c",25), rep("v",25))) z <- pcalda(train, cl) pred <- predict(z, test) ## plot the projected data. grpplot(pred$x, pred$class, main="PCALDA: Iris") } \keyword{classif}
092327d9a6877d609bf5c103ce86927ad72acfbe
[ "Markdown", "R" ]
66
R
wanchanglin/mt
dd7f82fc7a7fec1ffb929f052b32c3e39dad1028
2d59a24ea3d988ea7a25d269ba413be1d70e619d
refs/heads/master
<repo_name>insideTheEconomy/Things-Gone-Awry<file_sep>/js/main.js var isAttract = true; vids = ["financial.ogv","911.ogv","katrina.ogv","1907.ogv"]; function init(){ $("video").on("ended", function(){ console.log("ended, looping"); if(!isAttract){ $("#vid")[0].src="vid/menu.ogv"; isAttract = true; } this.load(); }) $("#wrap").click(function(c){ if(isAttract){ click(); isAttract = false; m = 1366/4; i = ~~( c.clientX/m ) ; var v = $("#vid")[0]; v.src="vid/"+vids[i]; vid.load(); } }) } function click(){ $("audio")[0].load(); $("audio")[0].play(); } $(function(){ init(); })
98b362d75789fdbe9be337bcc25bc9c47e5077b2
[ "JavaScript" ]
1
JavaScript
insideTheEconomy/Things-Gone-Awry
93cc5a5cee78ea0532e51e78a5aafb10b171f508
b46debb078b69f4a9f6b2408de1391e41556762b
refs/heads/master
<file_sep><? //********************************************************************************************** // template.php // // Author(s): lmout82 // BEDITOR version 1.0 beta // Licence: MIT License // Link: https://github.com/lmout82/yoBEditor.git // Creation date: August 2016 // Last modification date: October 2016 //*********************************************************************************************** // !This is a "quick and dirty" script to show you how to process JSON data. ?> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en"> <head> <title>ARaynorDesign Template</title> <meta name="description" content="free website template" /> <meta name="keywords" content="enter your keywords here" /> <meta http-equiv="content-type" content="text/html; charset=utf-8" /> <meta http-equiv="X-UA-Compatible" content="IE=9" /> <link rel="stylesheet" type="text/css" href="template/css/style.css" /> </head> <body> <div id="main"> <div id="header"> <div id="banner"> <div id="welcome"> <h1>Welcome To Grey Ropes</h1> </div><!--close welcome--> <div id="menubar"> <ul id="menu"> <li class="current"><a href="index.html">Home</a></li> <li><a href="#">Our Work</a></li> <li><a href="#">Testimonials</a></li> <li><a href="#">Projects</a></li> <li><a href="#">Contact Us</a></li> </ul> </div><!--close menubar--> </div><!--close banner--> </div><!--close header--> <div id="site_content"> <?php $postdata = file_get_contents("php://input"); $postdata = explode("=", $postdata); $jsondata = json_decode ( urldecode($postdata[1]) ); $html = ''; foreach ($jsondata as $block) { switch ($block->type) { case '1col': $cell = $block->data[0]; $html .= '<div id="content"><div class="content_item">'; $html .= get_cell($cell->type, $cell->data); $html .= '</div></div>'; break; case '2col': $html .='<div id="content"><div class="content_item">'; foreach ($block->data as $cell) $html .= '<div class="content_container">'.get_cell($cell->type, $cell->data).'</div>'; $html .= '</div></div>'; break; case '3col': foreach ($block->data as $cell) $html .= '<div class="col3_blue_cell">'.get_cell($cell->type, $cell->data).'</div>'; break; } } echo $html; function get_cell($type, $data) { $cell_html = ''; switch ($type) { case 'text'; $cell_html = $data; break; case 'staticimage'; $cell_html = '<img src="'.$data.'" />'; break; case 'youtube'; $cell_html = '<div class="video-container"><iframe width="560" height="315" src="https://www.youtube.com/embed/'.$data.'" frameborder="0" allowfullscreen></iframe></div>';; break; } return $cell_html; } ?> </div><!--close site_content--> <div id="content_grey"> <div class="content_grey_container_box"> <h4>Latest Blog Post</h4> <p> Phasellus laoreet feugiat risus. Ut tincidunt, ante vel fermentum iaculis.</p> <div class="readmore"> <a href="#">Read more</a> </div><!--close readmore--> </div><!--close content_grey_container_box--> <div class="content_grey_container_box"> <h4>Latest News</h4> <p> Phasellus laoreet feugiat risus. Ut tincidunt, ante vel fermentum iaculis.</p> <div class="readmore"> <a href="#">Read more</a> </div><!--close readmore--> </div><!--close content_grey_container_box--> <div class="content_grey_container_boxl"> <h4>Contact Us</h4> <p> Phasellus laoreet feugiat risus. Ut tincidunt, ante vel fermentum iaculis.</p> <div class="readmore"> <a href="#">Read more</a> </div><!--close readmore--> </div><!--close content_grey_container_box1--> <br style="clear:both"/> </div><!--close content_grey--> </div><!--close main--> <div id="footer_container"> <div id="footer"> <a href="http://validator.w3.org/check?uri=referer">Valid XHTML</a> | <a href="http://fotogrph.com/">Images</a> | with thanks to <a href="http://www.e-shot.net/ugc/free-email-templates-are-bad-idea-for-reputable-businesses/">e-shot</a> | website template by <a href="http://www.araynordesign.co.uk">ARaynorDesign</a> </div><!--close footer--> </div><!--close footer_container--> </body> </html> <file_sep># yoBEditor Yet an Otter Block Editor (yoBEditor) is a small coding project I started to learn JavaScript and JQuery. A block editor is a rich content WYSIWYG (What You See Is What You Get) editor redesigned for the web, based on the concept of blocks. This enhanced editor is easy to use, intuitive and does not require any knowledge in any web languages such as HTML. It can be easily extended. A detailed documentation is available in the "doc" folder. Watch my demo video available at: https://youtu.be/cJyXbsgEwK0 Status: beta ## Important notice This repository was initially developed when I was learning to code a long long time ago. Please take into account that this work does not reflect my present skills. **_This repository is not maintained._**
88b5bda0504b19a50db55188bb0dd7aec692c8b2
[ "Markdown", "PHP" ]
2
PHP
lmout82/yoBEditor
59624a11ad4cef1e8be0d35a8c458b5ec2fc73b0
8b024c2b14e29cf6e28d2a41807043d38d810967
refs/heads/main
<repo_name>schaed/ProtonMailTest<file_sep>/README.md # ProtonMailTest Test from ProtonMail using data <file_sep>/churn.py import pandas as pd import matplotlib.pyplot as plt draw=True verbose=False def GetInvoicesPerCustomer(df_all_customers,df): df_customer_invoice = df.groupby(['Customer ID','Invoice','InvoiceDate'], dropna=True).sum() dfID = {} for ide in df_all_customers: dfID[ide] = [len(df_customer_invoice[df_customer_invoice.index.get_level_values(0)==ide])] return pd.DataFrame.from_dict(dfID, orient='index', columns=['Invoices']) df = pd.read_csv('online_combined.csv') df['InvoiceDate'] = pd.to_datetime(df.InvoiceDate) # add the total amount spent df['totalcost'] = df['Price']*df['Quantity'] # The church rate for the second half of 2011 is 47%. This is 2366 customers. # customers before June 2011 mask = (df['InvoiceDate'] < '2011-6-1') df_beforeJune2011=df.loc[mask] mask = (df['InvoiceDate'] >= '2011-6-1') df_afterJune2011=df.loc[mask] # build list of customers before June 2011. df_before_customers = df_beforeJune2011['Customer ID'].unique() df_all_customers = df['Customer ID'].unique() df_after_customers = df_afterJune2011['Customer ID'].unique() n_before_customers = len(df_before_customers) print('') print('Total customers before June 2011: %s' %n_before_customers) print('Total customers after June 2011: %s' %len(df_after_customers)) print('') # Compute the churn rate churn_customers=0 new_customers=0 for ide in df_before_customers: if not (ide in df_after_customers): churn_customers+=1 for ide in df_after_customers: if not (ide in df_before_customers): new_customers+=1 print('') print('Churned customers: %s new customers: %s Churn rate: %s' %(churn_customers,new_customers, 100*(churn_customers/n_before_customers))) print('') # If we select customers who have more than 1 invoice, then the churn rate is 37% print('If we select customers who have more than 1 invoice, then the churn rate is 37\%') df_ID_Ninvoice = GetInvoicesPerCustomer(df_all_customers,df) df_ID_Ninvoice_before = GetInvoicesPerCustomer(df_before_customers,df_beforeJune2011) df_ID_Ninvoice_after = GetInvoicesPerCustomer(df_after_customers,df_afterJune2011) if verbose: print(df_ID_Ninvoice) churn_customers_mto=0 customers_more_than_oneInv_before = df_ID_Ninvoice_before[df_ID_Ninvoice_before['Invoices']>1].index.unique() n_customers_more_than_oneInv_before = len(customers_more_than_oneInv_before) for ide in customers_more_than_oneInv_before: if not (ide in df_after_customers): churn_customers_mto+=1 print('') print('Customers with more than 1 Invoice rates - Churned customers: %s Churn rate: %s' %(churn_customers_mto, 100*(churn_customers_mto/n_customers_more_than_oneInv_before))) print('') # Are there customers close to churning? Going back to all customers and not just the ones with more than one invoice prior to June 2011. This I plot as the time since their last invoice, and I plot it as the number versus the month print('Are there customers close to churning? Going back to all customers and not just the ones with more than one invoice prior to June 2011. This I plot as the time since their last invoice, and I plot it as the number versus the month') i = pd.date_range('2011-06-01', periods=6, freq='1M') df_customer_invoice_after = df_afterJune2011.groupby(['Customer ID','Invoice','InvoiceDate'], dropna=True).sum() if verbose: print(df_customer_invoice_after) lastInvoice = [] for ide in df_before_customers: if ide in df_after_customers: df_thiscustomer = df_customer_invoice_after[df_customer_invoice_after.index.get_level_values(0)==ide] #['InvoiceDate'][-1] last_invoice =df_thiscustomer.index.get_level_values(2).sort_values()[-1] lastInvoice+=[last_invoice] # if you want to print customers at risk of churning, then if last_invoice.month<9 and verbose: print('Last purchase was before september. At risk of churning: %s' %ide) monthly_last_invoice=[] timeList=[] for d in i: timeList+=[d] nmonth=0 for itime in lastInvoice: if d.month==itime.month: nmonth+=1 monthly_last_invoice+=[nmonth] # Customers who have not ordered since July, August, or September are from highest to lowest priority to churn. A targeted campaign to advertise to only these customers could be put together. Trying to encourage them to buy, especially near the December holidays. Fortunately, the number of customers with their last invoice in July-September is small compared to those more recent. However, the orders are very holiday driven with more invoices near the December. print('') print('Customers who have not ordered since July, August, or September are from highest to lowest priority to churn. A targeted campaign to advertise to only these customers could be put together. Trying to encourage them to buy, especially near the December holidays. Fortunately, the number of customers with their last invoice in July-September is small compared to those more recent. However, the orders are very holiday driven with more invoices near the December.') if draw: plt.plot(list(timeList),list(monthly_last_invoice)) plt.gcf().autofmt_xdate() plt.ylabel('Number of Prior Customers') plt.xlabel('Month of Last Invoice') plt.show() <file_sep>/protonskim.py import pandas as pd import numpy as np import matplotlib.pyplot as plt import sys #from pyspark import spark df2=pd.read_excel('online_retail.xlsx') print(df2) print(df2.columns) df2.columns=['Invoice', 'StockCode', 'Description', 'Quantity', 'InvoiceDate','Price', 'Customer ID', 'Country'] print('') df=pd.read_excel('online_retail_II.xlsx') print(df) print(df.columns) df3 = pd.concat([df,df2]) df3.to_csv('myfilev2.csv') sys.exit(0) if True: df=pd.read_excel('online_retail_II.xlsx') df2=pd.read_excel('online_retail.xlsx') df3 = pd.concat([df,df2]) df3.to_csv('myfilev2.csv') #df3.write.csv("myfile.csv") df4 = pd.read_csv('myfile.csv') print(df4) #df4 = spark.read.format("myfile.csv") #print(df4) #df3.to_csv('myfile.csv') #df.printSchema() #df = pd.read_excel('online_retail_II.xlsx') sys.exit(0) df = pd.read_excel('online_detail_1.xlsx') for d in df['Description'].unique(): print(d) print (df.sort_values(by='InvoiceDate')) print(df[-1:]) print(len(df)) print(df.count()) #df[-50000:].to_excel("online_retail_II_last50k.xlsx") <file_sep>/wordAna.py import spacy import pandas as pd import numpy as np import matplotlib.pyplot as plt import os def occurrences(string, sub): count = start = 0 while True: start = string.find(sub, start) + 1 if start > 0: count+=1 else: return count # Load English tokenizer, tagger, parser and NER nlp = spacy.load("en_core_web_sm") # make sure you download python -m spacy download en_core_web_sm df = pd.read_csv('online_combined.csv') # group items i=0 wordOccMap = {} uniq_df = df['Description'].dropna().unique() for d in uniq_df: #print(d) words = d.split(' ') for w in words: if w=='': continue # check what the words are doc = nlp(d) # select nouns if len([chunk.text for chunk in doc.noun_chunks])>0: pass # skip adj if len([token.lemma_ for token in doc if token.pos_ == "ADJ"])>0: continue if w in ['IN','AND','OF','SET',',','&','+']: continue if w.isdigit(): continue if w not in wordOccMap: wordOccMap[w]=0 for diter in uniq_df: if diter==d: continue if occurrences(diter,w)>0: wordOccMap[w]+=1 doc = nlp(d) # Analyze syntax #print(" Noun phrases:", [chunk.text for chunk in doc.noun_chunks]) #print(" Verbs:", [token.lemma_ for token in doc if token.pos_ == "VERB"]) #print(" Adjectives:", [token.lemma_ for token in doc if token.pos_ == "ADJ"]) i+=1 #if i>50: # break dfwords = pd.DataFrame.from_dict(wordOccMap, orient='index', columns=['entries']) print(dfwords) print(dfwords.sort_values(by='entries')) # These are the most used items in the Description. Given more time, I would try to build a customer portfolio of the types of tiems that were purchased. This would be more involved, but I could imagine doing target advertising. I could also imagine grouping these items by customer to look for what was purchased by customers with similar purchases. Then suggest these new items to the client. print('iteration: ') for index, row in dfwords.sort_values(by='entries').iterrows(): #for d in dfwords.sort_values(by='entries'): print('%s %s' %(index,row['entries'])) <file_sep>/protonmail.py import pandas as pd import numpy as np import matplotlib.pyplot as plt import os draw=True verbose=False def Download(): # Download the input file import urllib.request urllib.request.urlretrieve('http://archive.ics.uci.edu/ml/machine-learning-databases/00502/online_retail_II.xlsx','online_retail_II.xlsx') urllib.request.urlretrieve('http://archive.ics.uci.edu/ml/machine-learning-databases/00352/Online%20Retail.xlsx','online_retail.xlsx') def ConvertCsv(): print('downloading the data...') Download() print('Reading in the data...') df=pd.read_excel('online_retail_II.xlsx') df2=pd.read_excel('online_retail.xlsx') # make sure to harmonize the data columns df2.columns=['Invoice', 'StockCode', 'Description', 'Quantity', 'InvoiceDate','Price', 'Customer ID', 'Country'] df3 = pd.concat([df,df2]) df3.to_csv('online_combined.csv') def print_full(x): pd.set_option('display.max_rows', len(x)) print(x) pd.reset_option('display.max_rows') # reading xlxs is very slow, so we convert this to csv if not os.path.exists('online_combined.csv'): ConvertCsv() df = pd.read_csv('online_combined.csv') if verbose: print (df) # Fix column type df['InvoiceDate'] = pd.to_datetime(df.InvoiceDate) if verbose: print (df.sort_values(by='InvoiceDate')) #print the column names if verbose: print(df.columns) # add the total amount spent df['totalcost'] = df['Price']*df['Quantity'] # Check for other null entries by column. The rest is corrupted data and would exclude for future analysis. print('') print('Check for other null entries by column. The rest is corrupted data and would exclude for future analysis.') for c in df.columns: print('Column: %s and the number corrupted: %s' %(c, df[c].isnull().sum())) # Fill the null users to check how big they are overall # also noted that some of the descriptions are corrupted. Given this is a short project and the size is small, I will ignore any corrupted item descriptions. print('') print('Fill the null users to check how big they are overall') print('also noted that some of the descriptions are corrupted. Given this is a short project and the size is small, I will ignore any corrupted item descriptions.') df['nullID']=df['Customer ID'].isnull() # brief summary for future analysis. I wanted to know how many users IDs are missing print('') print('Brief summary for future analysis. I wanted to know how many users IDs are missing as well as how many customers, items, and invoices there are. ') print('Unique purchases: %s' %(len(df['Description'].unique()))) print('Unique customers: %s' %(len(df['Customer ID'].unique()))) print('Unregistered purchases: %s' %(len(df[df['nullID']==True]['Invoice'].unique()))) print('Unique Invoices: %s' %(len(df['Invoice'].unique()))) # Grouping by invoice for later analysis. I'm trying to get a feeling for how often users purchase and how much. print('') print('Grouping by invoice for later analysis. I am trying to get a feeling for how often users purchase and how much.') invoice_group = df.groupby(['Invoice']).sum() if verbose: print(invoice_group) # Drawing the number of items purchased per invoice including returns to see the typical size. # Number of items can be very large. For the very large number of items, For the order with more than 87000 are typically # I want to see what was cancelled with more than 75000 items. The costumer is 642465.0. Given this very large order, I would recommend reaching out to this customer to see if they could be better served. Let them know that they are valued. print(' Drawing the number of items purchased per invoice including returns to see the typical size.') print(' Number of items can be very large. For the very large number of items, For the order with more than 87000 are typically') print(' I want to see what was cancelled with more than 75000 items. The costumer is 642465.0. Given this very large order, I would recommend reaching out to this customer to see if they could be better served. Let them know that they are valued.') if draw: plt.hist(invoice_group['Quantity'],bins=100, log=True) plt.xlabel('Number of Items / Invoice') plt.ylabel('Items purchased') plt.show() # I want to see what was cancelled with more than 75000 items print('') print('I want to see what was cancelled with more than 75000 items. It was one customer.') print(invoice_group[invoice_group['Quantity']<-50000]) # has this customer ordered many times? print('') print('has this customer ordered many times? Below is their list of orders for Customer ID 642465.0') cust_id = invoice_group[invoice_group['Customer ID']==642465.0] print('') print(' printing the list of orders') print(' This customer only ordered once and cancelled their order. Given the size of this order, I would reach out to this person to find out what shaped their decision and to see if something would change their mind about cancelling.') # printing the list of orders # This customer only ordered once and cancelled their order. Given the size of this order, I would reach out to this person to find out what shaped their decision and to see if something would change their mind about cancelling. print(cust_id) # The real target customers are those who have made very large orders. There are 4 cancellations with total order cost of more than 20k pounds. if draw: plt.hist(invoice_group['totalcost']/1.0e3,bins=100, log=True) plt.xlabel('Total Invoice [thousands of pounds]') plt.ylabel('Invoices') plt.show() # Many of these large cancellations are from new customers. Might want to double check the quantity with the user before they finish their order? Perhaps the website can be improved print('') print('Many of these large cancellations are from new customers. Might want to double check the quantity with the user before they finish their order? Perhaps the website can be improved') print('') print('Printing invoices with more than 20k pounds worth returned') print(invoice_group[invoice_group['totalcost']<-20e3]) print('') print('Printing invoices more than 10k pounds worth returned') print(invoice_group[invoice_group['totalcost']<-10e3]) # How many purchases has the same customer made? This distribution is useful for defining boundaries for classes of customers based upon their number of items purchased df_unique_customer_rmNan=df.groupby(['Customer ID'], dropna=True).size() df_unique_customer=df.groupby(['Customer ID'], dropna=False).size() if verbose: print(df_unique_customer.sum()) if draw: plt.hist(df_unique_customer_rmNan,bins=100) plt.xlabel('Number of Items Purchased / Customer') plt.ylabel('Purchases') plt.show() # I divide the customers into the number of of items purchased. This is useful to see the distribution with fewer categories of customers. This grouping could be used potentially to define levels of customers for rewards, which I discuss more below. # In this distribution, I note the very small number of customers who have made more than 500 purchases. The next distribution is to look at the gross revenue from these different categories of customers. # 1st time customers are pretty low, which means that customers very often make additional orders. It might be worth surveying to understand why the 1st time customers were unhappy. print('') print('') print(' I divide the customers into the number of of items purchased. This is useful to see the distribution with fewer categories of customers. This grouping could be used potentially to define levels of customers for rewards, which I discuss more below.') print(' In this distribution, I note the very small number of customers who have made more than 500 purchases. The next distribution is to look at the gross revenue from these different categories of customers.') print(' 1st time customers are pretty low, which means that customers very often make additional orders. It might be worth surveying to understand why the 1st time customers were unhappy.') print('') print('splitting customers based upon their number of orders') numberOfOrders={} numberOfOrders['1st Time']=(df_unique_customer.between(0,1, inclusive=True)).sum() numberOfOrders['2-10']=(df_unique_customer.between(2,10, inclusive=True)).sum() numberOfOrders['11-49']=(df_unique_customer.between(11,49, inclusive=True)).sum() numberOfOrders['50-99']=(df_unique_customer.between(50,99, inclusive=True)).sum() numberOfOrders['100-499']=(df_unique_customer.between(100,499, inclusive=True)).sum() numberOfOrders['>500']=(df_unique_customer.between(500,500000, inclusive=True)).sum() if verbose: print(list(numberOfOrders.keys())) print(list(numberOfOrders.values())) if draw: fig = plt.figure(figsize = (10, 5)) plt.bar( list(numberOfOrders.keys()),list(numberOfOrders.values())) plt.xlabel('Number of Items') plt.ylabel('Number of Customers') plt.show() # Beyond the types of customers, the revenue broken down by number of customer orders is shown. # Customers with more than 100 orders make up 82% of the total revenue. The customers with more than 500 orders have a gross revenue of more than 2 million pounds. # We need to make sure to keep these repeat customers with more than 100 orders happy and especially those with more than 500 orders # Grouping by their number number of items is a good way to target consumers. Better deals should be targeted at consumers with more than 100 orders to keep them coming back. # First time users are also a group to continue to grow. print(' Beyond the types of customers, the revenue broken down by number of customer orders is shown.') print(' Customers with more than 100 orders make up 82% of the total revenue. The customers with more than 500 orders have a gross revenue of more than 2 million pounds. ') print(' We need to make sure to keep these repeat customers with more than 100 orders happy and especially those with more than 500 orders') print(' Grouping by their number number of items is a good way to target consumers. Better deals should be targeted at consumers with more than 100 orders to keep them coming back.') print(' First time users are also a group to continue to grow.') rev={} rev_selection = [['1st Time',0,1],['2-10',2,10],['11-49',11,49],['50-99',50,99],['100-499',100,499],['>500',500,5000000]] df_unique_customer_rmNan_rev= df.groupby(['Customer ID'], dropna=True).sum() if verbose: print(df_unique_customer_rmNan_rev) for revsel in rev_selection: rev[revsel[0]]=0 # initialize for i in df_unique_customer[df_unique_customer.between(revsel[1],revsel[2], inclusive=True)].index: rev[revsel[0]]+=df_unique_customer_rmNan_rev[df_unique_customer_rmNan_rev.index==i]['totalcost'].sum()/1.0e6 if draw: fig = plt.figure(figsize = (10, 5)) plt.bar( list(rev.keys()),list(rev.values())) plt.xlabel('Customer - Number of Items') plt.ylabel('Total Purchased Rev. in Millions of Pounds') plt.show() print('Total Revenue from >100 orders: %s' %((rev['>500']+rev['100-499'])/sum(rev.values()))) # See where the revenue per country # More than 90% of the revenue is coming from the UK. There are smaller orders from a lot of countries. # If looking to expand from the UK (although this would need to be investigated with Brexit), then the EIRE, Netherlands, Germany, and France would be the best places to start advertising print(' See where the revenue per country') print(' More than 90% of the revenue is coming from the UK. There are smaller orders from a lot of countries.') print(' If looking to expand from the UK (although this would need to be investigated with Brexit), then the EIRE, Netherlands, Germany, and France would be the best places to start advertising') orders_by_country = df.groupby(['Country'], dropna=True).sum().sort_values(by='totalcost')[-6:] orders_by_country_other = df.groupby(['Country'], dropna=True).sum().sort_values(by='totalcost')[:-7]['totalcost'].sum() if verbose: print(orders_by_country) if draw: fig = plt.figure(figsize = (10, 5)) plt.bar( list(['Other'])+list(orders_by_country.index),list(list([orders_by_country_other/1.0e6])+list(orders_by_country['totalcost']/1.0e6))) plt.xlabel('Country') plt.ylabel('Total Revenue') plt.show() print('Total Revenue from >100 orders: %s' %((rev['>500']+rev['100-499'])/sum(rev.values()))) # How many items are ordered per month? Same for revenue. This code draws these distributions print('') print(' How many items are ordered per month? Same for revenue. This code draws these distributions') monthly = df.groupby(pd.Grouper(key = 'InvoiceDate', freq='1M'), dropna=True).sum() itime = pd.date_range('2009-12-01', periods=24, freq='1M') time=[] monthly_rev = [] monthly_tot = [] if verbose: print(monthly['totalcost']) j=0 for d in itime: monthly_tot+=[monthly['Quantity'][j]/1.0e5] monthly_rev+=[monthly['totalcost'][j]/1.0e6] time+=[d] j+=1 # The number of items ordered increased greatly near December for holiday purchases. More staff may be needed to process these orders # The peak number of items ordered is also increasing from Dec 2011 to Dec 2012. April is lower in 2011 than 2010, which might be interesting to investigate further print('') print('The number of items ordered increased greatly near December for holiday purchases. More staff may be needed to process these orders') print(' The peak number of items ordered is also increasing from Dec 2011 to Dec 2012. April is lower in 2011 than 2010, which might be interesting to investigate further') if verbose: print(monthly_rev) if draw: plt.plot(list(time),list(monthly_tot)) # beautify the x-labels plt.gcf().autofmt_xdate() plt.ylabel('Number of items purchased [per 100k]') plt.xlabel('Month') plt.show() # Total revenue is also strongly peaked starting in October through January. The difference in April 2010 versus April 2011 is gone, which may be an artifact of the large cancelled orders print('Total revenue is also strongly peaked starting in October through January. The difference in April 2010 versus April 2011 is gone, which may be an artifact of the large cancelled orders') if verbose: print(monthly_rev) if draw: plt.plot(list(time),list(monthly_rev)) # beautify the x-labels plt.gcf().autofmt_xdate() #plt.hist(monthly['Quantity'],bins=100, log=True) plt.ylabel('Total Revenue in Millions of Pounds') plt.xlabel('Month') plt.show() # Drawing the number of items per invoice. Again large numbers of items in a single invoice might be a concern for cancelled orders print('') print('Drawing the number of items per invoice. Again large numbers of items in a single invoice might be a concern for cancelled orders, which show up as negative entries.') if draw: plt.hist(invoice_group['Quantity'],bins=100, log=True) plt.xlabel('Number of Items / Invoice') plt.ylabel('Items purchased') plt.show()
11ef5ded7f0c0778885195411477db277606c4a0
[ "Markdown", "Python" ]
5
Markdown
schaed/ProtonMailTest
76b178b84e26d41643c7c63d8af9054c6918aad1
228d8ae25fd038ba22b5378752de8bd070c98a45
refs/heads/master
<repo_name>sqrt-of-minus-one/YTest-server<file_sep>/src/main/java/ru/jenyaiu90/ytest/mappers/GroupsMapper.java package ru.jenyaiu90.ytest.mappers; import org.springframework.jdbc.core.RowMapper; import ru.jenyaiu90.ytest.entity.Group; import java.sql.ResultSet; import java.sql.SQLException; public class GroupsMapper implements RowMapper<Group> { public Group mapRow(ResultSet rs, int rowNum) throws SQLException { Group result = new Group(); result.setId(rs.getInt("ID")); result.setName(rs.getString("NAME")); result.setAdmin(rs.getInt("ADMIN")); return result; } } <file_sep>/src/main/java/ru/jenyaiu90/ytest/repositories/TestsRepository.java package ru.jenyaiu90.ytest.repositories; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.stereotype.Component; import ru.jenyaiu90.ytest.entity.*; import ru.jenyaiu90.ytest.mappers.*; import java.util.List; @Component public class TestsRepository { @Autowired protected JdbcTemplate jdbc; //Создание теста public int createTest(Test test) { return jdbc.update("INSERT INTO \"TESTS\" (\"NAME\", \"SUBJECT\") VALUES (?, ?)", test.getName(), test.getSubject()); } //Получить последний созданный тест public List<Test> getLast() { return jdbc.query("SELECT * FROM \"TESTS\" ORDER BY \"ID\" DESC LIMIT 1", new TestsMapper()); } //Задать автора теста public int createTestUser(Test test, User user) { return jdbc.update("INSERT INTO \"TESTS_USERS\" (\"USER\", \"TEST\") VALUES (?, ?)", user.getId(), test.getId()); } //Получить тест по id public List<Test> getTest(int id) { return jdbc.query("SELECT * FROM \"TESTS\" WHERE \"ID\" = ?", new TestsMapper(), id); } //Отметить тест как проверенный учителем public int checkAnswer(Answer answer, int points) { return jdbc.update("UPDATE \"ANSWERS\" SET \"POINTS\" = ?, \"IS_CHECKED\" = TRUE WHERE \"ID\" = ?", points, answer.getId()); } //Добавить результат прохождения теста public List<Result> createResult(Test test, User user) { jdbc.update("INSERT INTO \"RESULTS\" (\"USER\", \"TEST\") VALUES (?, ?)", user.getId(), test.getId()); return jdbc.query("SELECT * FROM \"RESULTS\" WHERE \"USER\" = ? AND \"TEST\" = ?", new ResultsMapper(), user.getId(), test.getId()); } //Добавить ответ на одно из заданий теста public int createAnswer(Answer answer) { return jdbc.update("INSERT INTO \"ANSWERS\" (\"RESULT\", \"TASK\", \"ANSWER\", \"IMAGE_ANSWER\", \"IS_CHECKED\", \"POINTS\") VALUES (?, ?, ?, ?, ?, ?)", answer.getResult(), answer.getTask(), answer.getAnswer(), answer.getImageAnswer(), answer.getIsChecked(), answer.getPoints()); } //Получить результаты прохождения теста пользователем public List<Answer> getAnswers(User user, Test test) { return jdbc.query("SELECT * FROM \"ANSWERS\" WHERE \"RESULT\" IN (SELECT \"ID\" FROM \"RESULTS\" WHERE \"USER\" = ? AND \"TEST\" = ?)", new AnswersMapper(), user.getId(), test.getId()); } //Получить список тестов, заданных пользователю public List<Test> getTestsFor(User user) { return jdbc.query("SELECT * FROM \"TESTS\" WHERE \"ID\" IN (SELECT \"TEST\" FROM \"SETS\" WHERE \"GROUP\" IN (SELECT \"GROUP\" FROM \"GROUPS_USERS\" WHERE \"USER\" = ?))", //Обожаю SQL за трёхэтажные запросы new TestsMapper(), user.getId()); } //Получить список тестов, созданных пользователем public List<Test> getTestsOf(User user) { return jdbc.query("SELECT * FROM \"TESTS\" WHERE \"ID\" IN (SELECT \"TEST\" FROM \"TESTS_USERS\" WHERE \"USER\" = ?)", new TestsMapper(), user.getId()); } //Получить автора теста public List<User> getAuthorOfTest(Test test) { return jdbc.query("SELECT * FROM \"USERS\" WHERE \"ID\" IN (SELECT \"USER\" FROM \"TESTS_USERS\" WHERE \"TEST\" = ?)", new UsersMapper(), test.getId()); } //Получить результат прохождение теста пользователем public List<Result> getResultOfTest(Test test, User user) { return jdbc.query("SELECT * FROM \"RESULTS\" WHERE \"TEST\" = ? AND \"USER\" = ?", new ResultsMapper(), test.getId(), user.getId()); } } <file_sep>/src/main/java/ru/jenyaiu90/ytest/mappers/TestsMapper.java package ru.jenyaiu90.ytest.mappers; import org.springframework.jdbc.core.RowMapper; import ru.jenyaiu90.ytest.entity.Test; import java.sql.ResultSet; import java.sql.SQLException; public class TestsMapper implements RowMapper<Test> { public Test mapRow(ResultSet rs, int rowNum) throws SQLException { Test test = new Test(); test.setId(rs.getInt("ID")); test.setName(rs.getString("NAME")); test.setSubject(rs.getString("SUBJECT")); return test; } } <file_sep>/src/main/java/ru/jenyaiu90/ytest/YtestApplication.java package ru.jenyaiu90.ytest; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication public class YtestApplication { public static void main(String[] args) { SpringApplication.run(YtestApplication.class, args); } } <file_sep>/src/main/java/ru/jenyaiu90/ytest/controllers/UsersController.java package ru.jenyaiu90.ytest.controllers; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.*; import ru.jenyaiu90.ytest.entity.ServerAnswer; import ru.jenyaiu90.ytest.entity.User; import ru.jenyaiu90.ytest.repositories.UsersRepository; import java.util.List; @RestController @RequestMapping("/user") public class UsersController { @Autowired protected UsersRepository usersRep; //Создание пользователя @RequestMapping(value = "/create", method = RequestMethod.POST) public ServerAnswer createUser(@RequestBody User user) { if (usersRep.getUser(user.getLogin()).isEmpty()) //Проверка на наличие зарегистрированного пользователя с таким же логином { usersRep.createUser(user); System.out.println("User " + user.getLogin() + " was created."); return new ServerAnswer(ServerAnswer.OK); } else { System.out.println("Attempt to create user " + user.getLogin() + " was failed."); return new ServerAnswer(ServerAnswer.USER_ALREADY_EXISTS); } } //Вход в систему @RequestMapping(value = "/auth", method = RequestMethod.GET) public User signIn(@RequestParam("login") String login, @RequestParam("password") String password) { List<User> user = usersRep.getUser(login); if (!user.isEmpty() && user.get(0).getPassword().equals(password)) //Проверка логина и пароля { System.out.println("User " + login + " signed in"); return user.get(0); } else { System.out.println("User " + login + " couldn`t sign in"); User empty = new User(); empty.setId(0); return empty; } } //Получить пользователя по логину @RequestMapping(value = "/get", method = RequestMethod.GET) public User getUser(@RequestParam("login") String login) { List<User> users = usersRep.getUser(login); if (users.isEmpty()) //Проверка пользователя на существование { System.out.println("A user couldn`t get an information about user " + login + " because this user wasn`t found"); return null; } else { System.out.println("A user has got an information about " + login); users.get(0).setPassword("<PASSWORD>"); return users.get(0); } } //Изменить данные пользователя @RequestMapping(value = "/update", method = RequestMethod.PUT) public ServerAnswer update(@RequestParam("login") String login, @RequestParam("name") String name, @RequestParam("surname") String surname, @RequestParam("email") String email, @RequestParam("phone_number") String phone_number, @RequestParam("old_password") String old_password, @RequestParam("new_password") String new_password) { List<User> users = usersRep.getUser(login); if (users.isEmpty()) //Проверка пользователя на существование { System.out.println("Couldn`t update user " + login + " because this user wasn`t found"); return new ServerAnswer(ServerAnswer.NO_USER); } if (users.get(0).getPassword().equals(old_password)) //Проверка правильности пароля { User user = new User(); user.setLogin(login); user.setName(name); user.setSurname(surname); user.setEmail(email); user.setPhone_number(phone_number); user.setImage(null); user.setPassword(<PASSWORD>); usersRep.updateUser(user); System.out.println("User " + user.getLogin() + " was updated"); return new ServerAnswer(ServerAnswer.OK); } else { System.out.println("User " + login + " couldn`t be updated because of wrong password"); return new ServerAnswer(ServerAnswer.PASSWORD); } } } <file_sep>/src/main/java/ru/jenyaiu90/ytest/entity/Group.java package ru.jenyaiu90.ytest.entity; //Группа пользователей public class Group { protected int id; protected String name; protected int admin; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public int getAdmin() { return admin; } public void setAdmin(int admin) { this.admin = admin; } } <file_sep>/src/main/java/ru/jenyaiu90/ytest/entity/Answer.java package ru.jenyaiu90.ytest.entity; //Ответ пользователя на одно из заданий теста public class Answer { protected int id; protected int result; protected int task; protected String answer; protected String imageAnswer; protected boolean isChecked; protected int points; public int getId() { return id; } public void setId(int id) { this.id = id; } public int getResult() { return result; } public void setResult(int result) { this.result = result; } public int getTask() { return task; } public void setTask(int task) { this.task = task; } public String getAnswer() { return answer; } public void setAnswer(String answer) { this.answer = answer; } public String getImageAnswer() { return imageAnswer; } public void setImageAnswer(String image) { imageAnswer = image; } public boolean getIsChecked() { return isChecked; } public void setIsChecked(boolean checked) { isChecked = checked; } public int getPoints() { return points; } public void setPoints(int points) { this.points = points; } } <file_sep>/target/classes/application.properties spring.datasource.url=jdbc:postgresql://ec2-54-75-231-215.eu-west-1.compute.amazonaws.com:5432/dfr8lu17trdh12?sslmode=require spring.datasource.username=kcnmnhuqmgjpcl spring.datasource.password=<PASSWORD> spring.datasource.driver-class-name=org.postgresql.Driver
2e20e65ba29a6039a2cee89be442a5ee5c95b3cb
[ "Java", "INI" ]
8
Java
sqrt-of-minus-one/YTest-server
7c4a245e4c5219cdc8b557299de5077d7d882b78
7db8c09f48b320d9c8ced6555c8f5d4043167ded
refs/heads/master
<repo_name>VladGbo/Breweries<file_sep>/Breweries/Breweries/TableCell/TypeOfCell.swift // // TypeOfCell.swift // Breweries // // Created by <NAME> on 06.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // import Foundation enum TypeOfCell { case mainCell case supportCell } <file_sep>/Breweries/Breweries/CoreData/BreweryEntity+CoreDataProperties.swift // // BreweryEntity+CoreDataProperties.swift // Breweries // // Created by <NAME> on 06.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // // import Foundation import CoreData extension BreweryEntity { @nonobjc public class func fetchRequest() -> NSFetchRequest<BreweryEntity> { return NSFetchRequest<BreweryEntity>(entityName: "BreweryEntity") } @NSManaged public var id: Int32 @NSManaged public var name: String? @NSManaged public var type: String? @NSManaged public var street: String? @NSManaged public var city: String? @NSManaged public var state: String? @NSManaged public var postalCode: String? @NSManaged public var country: String? @NSManaged public var longitude: String? @NSManaged public var latitude: String? @NSManaged public var phone: String? @NSManaged public var website: String? } <file_sep>/Breweries/Breweries/SearchBar/SearchBarVM.swift // // SearchBarVM.swift // Breweries // // Created by <NAME> on 10.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // import Foundation import UIKit class SearchBarVM: NSObject { var filteredBreweries = [Brewery]() weak var delegate: SearchBarVMDelegate? var searchBarIsEmpty: Bool { guard let delegate = self.delegate else { return false } guard let text = delegate.brewerySearchController.searchBar.text else {return false} return text.isEmpty } var isFiltering:Bool { guard let delegate = self.delegate else { return false } return delegate.brewerySearchController.isActive && !searchBarIsEmpty } } extension SearchBarVM: UISearchResultsUpdating { func updateSearchResults(for searchController: UISearchController) { guard let delegate = self.delegate else { return } filterContentForSearchText(text: searchController.searchBar.text!) delegate.breweryBreweriesTableView.reloadData() } private func filterContentForSearchText (text: String) { guard let breweries = BreweriesManager.shared.breweries else { return } filteredBreweries = breweries.filter({ (brewery) -> Bool in return (brewery.name?.lowercased().contains(text.lowercased()) ?? false) }) } } <file_sep>/Breweries/Breweries/Controlleres/ListOfBreweriesVC.swift // // ViewController.swift // Breweries // // Created by <NAME> on 05.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // import UIKit class ListOfBreweriesVC: UIViewController { @IBOutlet weak var breweriesTableView: UITableView! internal let searchController = UISearchController(searchResultsController: nil) private let tableVM = BreweryTableVM() override func viewDidLoad() { super.viewDidLoad() self.title = "Breweries" self.breweriesTableView.delegate = self.tableVM self.breweriesTableView.dataSource = self.tableVM self.tableVM.searchResultUpdating.delegate = self self.tableVM.delegate = self DispatchQueue.main.async { self.settingSearchController() } let mainNib = UINib(nibName: "MainBreweryTVC", bundle: nil) breweriesTableView.register(mainNib, forCellReuseIdentifier: "MainBreweryTVC") let supportNib = UINib(nibName: "SupportBreweryTVC", bundle: nil) breweriesTableView.register(supportNib, forCellReuseIdentifier: "SupportBreweryTVC") } private func settingSearchController() { self.searchController.searchResultsUpdater = tableVM.searchResultUpdating self.searchController.obscuresBackgroundDuringPresentation = false self.searchController.searchBar.placeholder = "Search" navigationItem.searchController = self.searchController navigationItem.searchController?.isActive = true definesPresentationContext = false self.navigationController?.navigationBar.isTranslucent = true self.navigationController?.navigationBar.backgroundColor = #colorLiteral(red: 0.172368288, green: 0.5335530043, blue: 0.01537404489, alpha: 0.5) } } extension ListOfBreweriesVC: SearchBarVMDelegate { var brewerySearchController : UISearchController { return self.searchController } var breweryBreweriesTableView: UITableView { return self.breweriesTableView } } extension ListOfBreweriesVC: BreweryTableVMDelegate { var navController: UINavigationController { return self.navigationController ?? UINavigationController() } } <file_sep>/Breweries/Breweries/Services/PersistenceService.swift // // CoreDataManager.swift // Breweries // // Created by <NAME> on 06.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // import Foundation import CoreData class PersistenceService { static let shared = PersistenceService() private init () {} private let entityName = "BreweryEntity" var context: NSManagedObjectContext { return persistenceContainer.viewContext } lazy var persistenceContainer: NSPersistentContainer = { let container = NSPersistentContainer(name: "BreweryCD") container.loadPersistentStores { (storeDescription, error) in if let error = error as NSError? { fatalError("Unresolved error \(error), \(error.userInfo)") } } return container }() func save() { if self.context.hasChanges { do { try context.save() } catch { let nserror = error as NSError fatalError("Unresolved error \(nserror), \(nserror.userInfo)") } } } func fetchFromPersistenceStore (complition: @escaping ([Brewery]?, Error?)->Void) { do { let breweriesEntities = try context.fetch(BreweryEntity.fetchRequest()) as [BreweryEntity] let breweries = adaptBreweries(breweries: breweriesEntities) complition(breweries, nil) } catch { complition(nil, error) } } func updateBreweriesCoreData(breweries: [Brewery]) { removeBreweriesFromPersistenceStore() insertBreweriesToPersistenceStore(breweries: breweries) } private func adaptBreweries(breweries: [BreweryEntity]) -> [Brewery] { var res = [Brewery]() for i in breweries { let brewery = Brewery(entity: i) res.append(brewery) } return res } private func removeBreweriesFromPersistenceStore () { let fetchRequest = NSFetchRequest<NSFetchRequestResult>(entityName: entityName) do { let breweriesObj = try context.fetch(fetchRequest) as! [NSManagedObject] for obj in breweriesObj { context.delete(obj) } save() print("remuving from core data was successful") } catch { print("remuving from core data was unsuccessful: \(error.localizedDescription)") } } private func insertBreweriesToPersistenceStore (breweries: [Brewery]) { for i in breweries { let breweryEntity = BreweryEntity(context: context) breweryEntity.id = Int32(i.id ?? 0) breweryEntity.city = i.city breweryEntity.country = i.country breweryEntity.latitude = i.latitude breweryEntity.longitude = i.longitude breweryEntity.name = i.name breweryEntity.phone = i.phone breweryEntity.postalCode = i.postalCode breweryEntity.state = i.state breweryEntity.street = i.street breweryEntity.type = i.type breweryEntity.website = i.website save() } } } <file_sep>/Breweries/Breweries/TableCell/SupportBreweryCellVM.swift // // SupportBreweryCellVM.swift // Breweries // // Created by <NAME> on 08.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // import Foundation class SupportBreweryCellVM: BreweryCellProtocol { var nameOfCompany: String var country: String var state: String var city: String var street: String var longitude: String var latitude: String init(nameOfCompany: String, country: String, state: String, city: String, street: String, longitude: String, latitude: String) { self.nameOfCompany = nameOfCompany self.country = country self.state = state self.city = city self.street = street self.longitude = longitude self.latitude = latitude } var type: TypeOfCell { return .supportCell } var nameInBundle: String { return "SupportBreweryTVC" } } <file_sep>/Breweries/Breweries/TableCell/BreweryCellVM.swift // // BreweryCellVm.swift // Breweries // // Created by <NAME> on 08.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // import Foundation class BreweryCellVM { var cellsVM = [BreweryCellProtocol]() private let breweries: [Brewery] init (breweries: [Brewery]) { self.breweries = breweries setDataForCells() } private func setDataForCells() { breweries.forEach { (brewery) in if let name = brewery.name, let phone = brewery.phone, let website = brewery.website { let mainCellVM = MainBeweryCellVM(nameOfCompany: name, phoneNumber: phone, website: website) cellsVM.append(mainCellVM) } if let name = brewery.name, let country = brewery.country, let state = brewery.state, let city = brewery.city, let street = brewery.street, let longitude = brewery.longitude, let latitude = brewery.latitude{ let supportCellVM = SupportBreweryCellVM (nameOfCompany: name, country: country, state: state, city: city, street: street, longitude: longitude, latitude: latitude) cellsVM.append(supportCellVM) } } } } <file_sep>/Breweries/Breweries/TableCell/MainBreweryTVC.swift // // MainBreweryTVC.swift // Breweries // // Created by <NAME> on 06.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // import UIKit class MainBreweryTVC: UITableViewCell { @IBOutlet weak var nameOfCompanyLabel: UILabel! @IBOutlet weak var phoneNumberLabel: UILabel! @IBOutlet weak var websiteLabel: UILabel! var cellVM: BreweryCellProtocol? { didSet { guard let cellVM = cellVM as? MainBeweryCellVM else { return } self.nameOfCompanyLabel.text = cellVM.nameOfCompany self.phoneNumberLabel.text = "Phone: \(cellVM.phoneNumber)" let hyperLinkWithTitle = "Website: \(cellVM.website)" let attributedString = NSAttributedString.makeHiperLink(for: cellVM.website, in: hyperLinkWithTitle, as: cellVM.website) self.websiteLabel.attributedText = attributedString } } override func awakeFromNib() { super.awakeFromNib() setStyle() } override func setSelected(_ selected: Bool, animated: Bool) { super.setSelected(selected, animated: animated) } private func setStyle() { self.layer.borderColor = CGColor(#colorLiteral(red: 0.172368288, green: 0.5335530043, blue: 0.01537404489, alpha: 1)) self.layer.cornerRadius = 20.0 self.layer.borderWidth = 1.0 self.backgroundColor = #colorLiteral(red: 0.9607108235, green: 0.9608257413, blue: 0.9606716037, alpha: 1) } private func setAttribete(text: String) -> NSMutableAttributedString{ let muteText = NSMutableAttributedString(string: text) muteText.addAttribute(NSAttributedString.Key.foregroundColor, value: UIColor.black, range: NSRange(location:2,length:4)) return muteText } } <file_sep>/Breweries/Breweries/TableCell/BreweryCellProtocol.swift // // BreweryCellProtocol.swift // Breweries // // Created by <NAME> on 06.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // import Foundation protocol BreweryCellProtocol { var type: TypeOfCell { get } var nameInBundle: String { get } } <file_sep>/Breweries/Breweries/TableCell/MainBeweryCellVM.swift // // MainBeweryVM.swift // Breweries // // Created by <NAME> on 08.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // import Foundation class MainBeweryCellVM: BreweryCellProtocol { var nameOfCompany: String var phoneNumber: String var website: String init(nameOfCompany: String, phoneNumber: String, website: String) { self.nameOfCompany = nameOfCompany self.phoneNumber = phoneNumber self.website = website } var type: TypeOfCell { return .mainCell } var nameInBundle: String { return "MainBreweryTVC" } } <file_sep>/Breweries/Breweries/Models/Brewery.swift // // Brewery.swift // Breweries // // Created by <NAME> on 05.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // import Foundation struct Brewery: Codable { var id : Int? var name : String? var type : String? var street : String? var city : String? var state : String? var postalCode: String? var country : String? var longitude : String? var latitude : String? var phone : String? var website : String? init(entity: BreweryEntity) { self.id = Int(entity.id) self.name = entity.name self.type = entity.type self.street = entity.street self.city = entity.city self.state = entity.state self.postalCode = entity.postalCode self.country = entity.country self.longitude = entity.longitude self.latitude = entity.latitude self.phone = entity.phone self.website = entity.website } init() {} enum CodingKeys: String, CodingKey { case id case name case type = "brewery_type" case street case city case state case postalCode = "postal_code" case country case longitude case latitude case phone case website = "website_url" } } <file_sep>/Breweries/Breweries/SearchBar/SearchBarVMDelegate.swift // // SearchBarVMDelegate.swift // Breweries // // Created by <NAME> on 10.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // import Foundation import UIKit protocol SearchBarVMDelegate: AnyObject { var brewerySearchController : UISearchController { get } var breweryBreweriesTableView: UITableView { get } } <file_sep>/Breweries/Breweries/CoreData/BreweryEntity+CoreDataClass.swift // // BreweryEntity+CoreDataClass.swift // Breweries // // Created by <NAME> on 06.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // // import Foundation import CoreData @objc(BreweryEntity) public class BreweryEntity: NSManagedObject { } <file_sep>/Breweries/Breweries/Services/NetworkService.swift // // NetworkService.swift // Breweries // // Created by <NAME> on 05.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // import Foundation class NetworkService { private let api = Bundle.main.object(forInfoDictionaryKey: "API_BASE_URL") as! String typealias NetworkResult = ([Brewery]?, Error?)-> Void static let shared = NetworkService() private init () {} func requestFetchListOfBreweries(complition: @escaping NetworkResult) { let link = "\(api)breweries" guard let url = URL(string:link) else { return } URLSession.shared.dataTask(with: url) { (data, _, error) in if let data = data { do { let jsonDecoder = JSONDecoder() let breweries = try jsonDecoder.decode([Brewery].self, from: data) complition(breweries, nil) } catch { complition(nil, error) } } if let error = error { complition(nil, error) } }.resume() } func requestSearchBreweries(breweries: String, complition: @escaping NetworkResult) { // let link = "\(api)breweries?by_name=\(breweries)".addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) guard let url = URL(string:link!) else { return } URLSession.shared.dataTask(with: url) { (data, _, error) in if let data = data { do { let jsonDecoder = JSONDecoder() let breweries = try jsonDecoder.decode([Brewery].self, from: data) complition(breweries, nil) } catch { complition(nil, error) } } if let error = error { complition(nil, error) } }.resume() } } <file_sep>/Breweries/Breweries/Services/BreweriesManager.swift // // BreweriesManager.swift // Breweries // // Created by <NAME> on 06.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // import Foundation class BreweriesManager { static let shared = BreweriesManager() private init () {} typealias Result = ([Brewery])->Void var breweries: [Brewery]? func fetchBreweries (complitionResult:@escaping Result) { NetworkService.shared.requestFetchListOfBreweries { (breweries, error) in if let breweries = breweries { self.breweries = breweries complitionResult(breweries) PersistenceService.shared.updateBreweriesCoreData(breweries: breweries) } else { PersistenceService.shared.fetchFromPersistenceStore { (persistenceBreweries, error) in if let persistenceBreweries = persistenceBreweries { self.breweries = persistenceBreweries complitionResult(persistenceBreweries) } } } if let _ = error { complitionResult([Brewery]()) } } } } <file_sep>/Breweries/Breweries/TableCell/BreweryTableVM.swift // // BreweryTableVM.swift // Breweries // // Created by <NAME> on 09.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // import Foundation import UIKit import SafariServices protocol BreweryTableVMDelegate: AnyObject { var navController: UINavigationController { get } } class BreweryTableVM: NSObject{ private var breweries: [Brewery] private var cellsVM: BreweryCellVM var searchResultUpdating = SearchBarVM() weak var delegate: BreweryTableVMDelegate? override init() { breweries = BreweriesManager.shared.breweries ?? [Brewery]() cellsVM = BreweryCellVM(breweries: breweries) } func updateCellsModel (breweries: [Brewery]) { cellsVM = BreweryCellVM(breweries: breweries) } } extension BreweryTableVM: UITableViewDataSource, UITableViewDelegate { func numberOfSections(in tableView: UITableView) -> Int { if searchResultUpdating.isFiltering { updateCellsModel(breweries: searchResultUpdating.filteredBreweries) } else { updateCellsModel(breweries: self.breweries) } return cellsVM.cellsVM.count } func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int { return 1 } func tableView(_ tableView: UITableView, heightForHeaderInSection section: Int) -> CGFloat { return 16 } func tableView(_ tableView: UITableView, viewForHeaderInSection section: Int) -> UIView? { let headerView = UIView() headerView.backgroundColor = UIColor.clear return headerView } func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell { let model = cellsVM.cellsVM[indexPath.section] switch model.type { case .mainCell: if let cell = tableView.dequeueReusableCell(withIdentifier: model.nameInBundle) as? MainBreweryTVC { DispatchQueue.main.async { cell.cellVM = model } return cell } case .supportCell: if let cell = tableView.dequeueReusableCell(withIdentifier: model.nameInBundle) as? SupportBreweryTVC { DispatchQueue.main.async { cell.cellVM = model } cell.delegate = self return cell } } return UITableViewCell() } func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) { let model = cellsVM.cellsVM[indexPath.section] if model.type == .mainCell { guard let mainData = model as? MainBeweryCellVM else { return } guard let url = URL(string: mainData.website) else {return } let config = SFSafariViewController.Configuration() config.entersReaderIfAvailable = true let vc = SFSafariViewController(url: url , configuration: config) vc.title = "Website" vc.modalPresentationStyle = .formSheet vc.modalTransitionStyle = .crossDissolve delegate?.navController.pushViewController(vc, animated: true) } } } extension BreweryTableVM: SupportBreweryTVCDelegate { func didPressedOnMap(lat: String, lon: String, title: String, subTitle: String) { let model = BreweryMapVM(latitude: lat, longitude: lon, title: title, subTitle: subTitle) guard let mapVC = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "BreweryMapVC") as? BreweryMapVC else {return} mapVC.model = model self.delegate?.navController.pushViewController(mapVC, animated: true) } } <file_sep>/Breweries/Breweries/Controlleres/BreweryMapVC.swift // // BreweryMapVC.swift // Breweries // // Created by <NAME> on 05.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // import UIKit import MapKit class BreweryMapVC: UIViewController { private var breweryMapView: MKMapView? var model: BreweryMapVM? { didSet { self.title = "Map" setMap() setMapSetting() } } func setMapSetting() { guard let model = model else { return } let latitude = Double(model.latitude) ?? 0 let longitude = Double (model.longitude) ?? 0 let annotation = MKPointAnnotation() annotation.coordinate = CLLocationCoordinate2D(latitude: latitude, longitude: longitude) annotation.title = model.title annotation.subtitle = model.subTitle let region = MKCoordinateRegion(center: annotation.coordinate, latitudinalMeters: 500, longitudinalMeters: 500) if let breweryMapView = breweryMapView { breweryMapView.addAnnotation(annotation) breweryMapView.region = region } } private func setMap() { let point = CGPoint(x: 0, y: 0) let size = CGSize(width: self.view.frame.width, height: self.view.frame.height) self.breweryMapView = MKMapView(frame: CGRect(origin: point, size: size)) if let breweryMapView = self.breweryMapView { self.view.addSubview(breweryMapView) } } } <file_sep>/Breweries/Breweries/Controlleres/BreweryMapVM.swift // // BreweryMapVM.swift // Breweries // // Created by <NAME> on 11.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // import Foundation class BreweryMapVM { var latitude: String var longitude: String var title: String var subTitle: String init(latitude: String, longitude: String, title: String, subTitle: String) { self.latitude = latitude self.longitude = longitude self.title = title self.subTitle = subTitle } } <file_sep>/Breweries/Breweries/TableCell/SupportBreweryTVC.swift // // SupportBreweryTVC.swift // Breweries // // Created by <NAME> on 06.02.2020. // Copyright © 2020 VladislavGorbenko. All rights reserved. // import UIKit import MapKit protocol SupportBreweryTVCDelegate: AnyObject { func didPressedOnMap(lat: String, lon: String, title: String, subTitle: String) } class SupportBreweryTVC: UITableViewCell { @IBOutlet weak var nameOfCompanyLabel: UILabel! @IBOutlet weak var countryLabel: UILabel! @IBOutlet weak var stateLabel: UILabel! @IBOutlet weak var cityLabel: UILabel! @IBOutlet weak var streetLabel: UILabel! @IBOutlet weak var mapButton: UIButton! weak var delegate: SupportBreweryTVCDelegate? private var latitude: String? private var longitude: String? var cellVM: BreweryCellProtocol? { didSet { guard let cellVM = cellVM as? SupportBreweryCellVM else { return } self.nameOfCompanyLabel.text = "\(cellVM.nameOfCompany) 2" self.countryLabel.text = "Country: \(cellVM.country)" self.stateLabel.text = "State: \(cellVM.state)" self.cityLabel.text = "City: \(cellVM.city)" self.streetLabel.text = "Street: \(cellVM.street)" self.latitude = cellVM.latitude self.longitude = cellVM.longitude } } override func awakeFromNib() { super.awakeFromNib() setStyle() } override func setSelected(_ selected: Bool, animated: Bool) { super.setSelected(selected, animated: animated) } @IBAction func tappedOnShowMap(_ sender: UIButton) { if let lat = self.latitude, let lon = self.longitude, let title = self.nameOfCompanyLabel.text, let subtitle = self.streetLabel.text{ delegate?.didPressedOnMap(lat: lat, lon: lon, title: title, subTitle: subtitle) } } private func setStyle() { self.layer.borderColor = CGColor(#colorLiteral(red: 0.172368288, green: 0.5335530043, blue: 0.01537404489, alpha: 1)) self.layer.cornerRadius = 20.0 self.layer.borderWidth = 1.0 self.backgroundColor = #colorLiteral(red: 0.9607108235, green: 0.9608257413, blue: 0.9606716037, alpha: 1) mapButton.layer.cornerRadius = 7.0 } }
6642ec762d0e9afb0ea3d0f7080950f99ca9242b
[ "Swift" ]
19
Swift
VladGbo/Breweries
2e6bc6436288762ad03e863a1cf298ba0f99a09e
a2f9134ca8133ddbcd0b131584b270fadfcfda63
refs/heads/master
<file_sep>$(function() { var target = ''; var targets = [ 'A-E-01', 'A-E-02', 'A-E-03', 'A-E-04', 'A-F-05', 'A-F-06', 'A-F-07', 'A-F-08', 'A-G-09', 'A-G-10', 'A-G-11', 'A-G-12', 'A-H-13', 'A-H-14', 'A-H-15', 'A-H-16', 'B-I-17', 'B-I-18', 'B-I-19', 'B-I-20', 'B-J-21', 'B-J-22', 'B-J-23', 'B-J-24', 'B-K-25', 'B-K-26', 'B-K-27', 'B-K-28', 'B-L-29', 'B-L-30', 'B-L-31', 'B-L-32', 'C-M-33', 'C-M-34', 'C-M-35', 'C-M-36', 'C-N-37', 'C-N-38', 'C-N-39', 'C-N-40', 'C-O-41', 'C-O-42', 'C-O-43', 'C-O-44', 'C-P-45', 'C-P-46', 'C-P-47', 'C-P-48', 'D-Q-49', 'D-Q-50', 'D-Q-51', 'D-Q-52', 'D-R-53', 'D-R-54', 'D-R-55', 'D-R-56', 'D-S-57', 'D-S-58', 'D-S-59', 'D-S-60', 'D-T-61', 'D-T-62', 'D-T-63', 'D-T-64', 'A-E-01', 'A-E-02', 'A-E-03', 'A-E-04', 'A-F-05', 'A-F-06', 'A-F-07', 'A-F-08', 'A-G-09', 'A-G-10', 'A-G-11', 'A-G-12', 'A-H-13', 'A-H-14', 'A-H-15', 'A-H-16', 'B-I-17', 'B-I-18', 'B-I-19', 'B-I-20', 'B-J-21', 'B-J-22', 'B-J-23', 'B-J-24', 'B-K-25', 'B-K-26', 'B-K-27', 'B-K-28', 'B-L-29', 'B-L-30', 'B-L-31', 'B-L-32', 'C-M-33', 'C-M-34', 'C-M-35', 'C-M-36', 'C-N-37', 'C-N-38', 'C-N-39', 'C-N-40', 'C-O-41', 'C-O-42', 'C-O-43', 'C-O-44', 'C-P-45', 'C-P-46', 'C-P-47', 'C-P-48', 'D-Q-49', 'D-Q-50', 'D-Q-51', 'D-Q-52', 'D-R-53', 'D-R-54', 'D-R-55', 'D-R-56', 'D-S-57', 'D-S-58', 'D-S-59', 'D-S-60', 'D-T-61', 'D-T-62', 'D-T-63', 'D-T-64', 'A-E-01', 'A-E-02', 'A-E-03', 'A-E-04', 'A-F-05', 'A-F-06', 'A-F-07', 'A-F-08', 'A-G-09', 'A-G-10', 'A-G-11', 'A-G-12', 'A-H-13', 'A-H-14', 'A-H-15', 'A-H-16', 'B-I-17', 'B-I-18', 'B-I-19', 'B-I-20', 'B-J-21', 'B-J-22', 'B-J-23', 'B-J-24', 'B-K-25', 'B-K-26', 'B-K-27', 'B-K-28', 'B-L-29', 'B-L-30', 'B-L-31', 'B-L-32', 'C-M-33', 'C-M-34', 'C-M-35', 'C-M-36', 'C-N-37', 'C-N-38', 'C-N-39', 'C-N-40', 'C-O-41', 'C-O-42', 'C-O-43', 'C-O-44', 'C-P-45', 'C-P-46', 'C-P-47', 'C-P-48', 'D-Q-49', 'D-Q-50', 'D-Q-51', 'D-Q-52', 'D-R-53', 'D-R-54', 'D-R-55', 'D-R-56', 'D-S-57', 'D-S-58', 'D-S-59', 'D-S-60', 'D-T-61', 'D-T-62', 'D-T-63', 'D-T-64' ]; var taskNum = 0; var taskStartTime = 0; var taskPath = []; var taskFlag = false; // Initialization // ------------------------------ $('#pilot1-block').height($(window).height()-100); targets.sort(function(){return Math.round(Math.random());}); $(document).keydown(function(event){ if (event.keyCode == 32) { //console.log(taskNum); if (taskNum >= 192) { $('body').css('background', '#EEE'); } else { $('#m-1').addClass('hidden'); $('.selected').removeClass('selected'); taskPath = []; taskFlag = true; taskStartTime = Date.now(); assignNewTask(); } } if (event.keyCode == 187) { $('.user-info').toggleClass('hidden'); } }); // Path Record // ------------------------------ $(document).mousemove(function(e) { if (taskFlag) { taskPath.push({ x: e.pageX, y: e.pageY, t: Date.now() - taskStartTime }); } }); function recordTaskData(name, task, path, time) { $.ajax({ url: '/pilot1', type: 'POST', data: { name: name, task: task, path: path, time: time }, error: function(xhr) { alert('ajax request error'); }, success: function(response) { console.log('success'); } }); } // Task Assignment // ------------------------------ function assignNewTask() { // target = randomTarget(); target = targets.pop(); $('.trigger-text').text(target); } function randomTarget(){ ri = Math.floor(Math.random()*targets.length); return targets[ri]; } // Trigger Menu Selection // ------------------------------ var intend = ''; var timer = 0; var flag = false; function mouseoverHandler() { return function () { var $this = $(this); $('#m-1').removeClass('hidden'); intend = $this.text(); if (!flag) { flag = true; timer = setTimeout( function(){ if (intend == $this.text()) { console.log('select ' + $this.text()); if ($this.text() == target.slice(4,6)) { // console.log(Date.now() - taskStartTime); // console.log(taskPath); $this.addClass('selected'); recordTaskData($('.user-info input').val(), target, JSON.stringify(taskPath), Date.now() - taskStartTime); taskNum++; taskFlag = false; $('.progress').css('width', taskNum*100/192+'%'); $('.trigger-text').text('--'); } } }, 1000); } }; } function mouseleaveHandler() { return function () { var $this = $(this); intend = ''; clearTimeout(timer); timer = 0; flag = false; }; } $('.selection').on('mouseover', mouseoverHandler()); $('.selection').on('mouseleave', mouseleaveHandler()); // Trigger 1st Menu // ------------------------------ $('.trigger-btn').on('mouseover', function () { var $this = $(this); if ($this.prop('hoverTimeout')) { $this.prop('hoverTimeout', clearTimeout($this.prop('hoverTimeout'))); } $this.prop('hoverIntent', setTimeout(function() { if (taskFlag) { $('#m-1').removeClass('hidden'); } }, 1000)); }); $('#m-1').on('mouseleave', function () { var $this = $(this); if ($this.prop('hoverIntent')) { $this.prop('hoverIntent', clearTimeout($this.prop('hoverIntent'))); } $this.prop('hoverTimeout', setTimeout(function() { $('#m-1').addClass('hidden'); }, 1000)); }); });<file_sep>function Point(x, y) // constructor { this.X = x; this.Y = y; } $(function() { var taskFlag = 0; var blockPos = {}; var initFlag = 0; var color = ['#E74C3C', '#2ECC71', '#3498DB', '#F1C40F']; var taskNum = 0; var taskPath = []; var task = 'no-guidance'; var taskStartTime = 0; var lastPos; var curPos; var xc = 0; var yc = 0; var tc = 0; var recognizer = new DollarRecognizer; // Initialization // ------------------------------ blockPos.X = $('#pilot3-block').offset().left; blockPos.Y = $('#pilot3-block').offset().top; $(document).keydown(function(event){ if (event.keyCode == 90 && taskNum < 10) { taskFlag = 1; taskStartTime = Date.now(); if (taskNum >= 5) { task = 'with-guidance'; $('#guidance').fadeIn(); } } if (event.keyCode == 88) { commitTask(); $('#guidance').fadeOut(); } if (event.keyCode == 188) { $('.user-info').toggleClass('hidden'); } }); // Commit Task // ------------------------------ function commitTask() { var result = recognizer.Recognize(taskPath); console.log(result); recordTaskData($('.user-info input').val(), task, JSON.stringify(taskPath), JSON.stringify(result.Path), result.Name, result.Score, result.Circle, result.Correct, Date.now() - taskStartTime); taskNum++; $('.progress').css('width', taskNum*100/10+'%'); taskFlag = 0; initFlag = 0; taskPath = []; d3.selectAll('#target path').remove(); xc = 0; yc = 0; tc = 0; } // Path Drawing and Dynamic Guiding // ------------------------------ var pi = Math.PI; var target = d3.select('#target'); var guidance = d3.select('#guidance'); var line = d3.svg.line() .x(function(d) { return d.X; }) .y(function(d) { return d.Y; }) .interpolate('basis'); var arc = d3.svg.arc() .innerRadius(function(d) { return d.R; }) .outerRadius(function(d) { return d.R; }) .startAngle(function(d) { return (d.A+90) * (pi/180); }) .endAngle(function(d) { return (d.A+90) * (pi/180) + d.D/d.R; }); $(document).mousemove(function(e) { curPos = new Point(e.pageX - blockPos.X, e.pageY - blockPos.Y); if (!initFlag) { lastPos = new Point(curPos.X, curPos.Y); initFlag = 1; } if (taskFlag) { taskPath.push(curPos); xc += curPos.X; yc += curPos.Y; tc++; target.append('path') .attr({ 'd': line([lastPos, curPos]), 'stroke': '#16A085', 'stroke-width': '5px', 'fill': 'none' }); } lastPos = new Point(curPos.X, curPos.Y); if (tc >= 1) { var a = angle((xc/tc +320)/2, (yc/tc +320)/2, curPos.X, curPos.Y); var r = distance((xc/tc +320)/2, (yc/tc +320)/2, curPos.X, curPos.Y); var offsetX = 300 - r * Math.cos(a * (pi/180)); var offsetY = 300 - r * Math.sin(a * (pi/180)); var guidanceLength = Math.min(100, Math.max(0, distance(taskPath[0].X, taskPath[0].Y, curPos.X, curPos.Y)-50)); d3.select('#guidance path').remove(); guidance.append('path') .attr({ 'd': arc({A: a, R: r, D: guidanceLength}), 'stroke': '#16A085', 'stroke-width': '15px', 'stroke-opacity': '0.3', 'fill': 'none', 'transform': 'translate('+offsetX+','+offsetY+')' }); $('#guidance') .css({ 'left': e.pageX-300, 'top': e.pageY-300 }); } }); function angle(cx, cy, ex, ey) { var dy = ey - cy; var dx = ex - cx; var theta = Math.atan2(dy, dx); theta *= 180 / Math.PI; return theta; } function distance(x, y, x0, y0) { return Math.sqrt((x -= x0) * x + (y -= y0) * y); } // Path Record // ------------------------------ function recordTaskData(name, task, path, rpath, rname, rscore, rcircle, rcorrect, time) { $.ajax({ url: '/pilot3', type: 'POST', data: { name: name, task: task, path: path, rpath: rpath, rname: rname, rscore: rscore, rcircle: rcircle, rcorrect: rcorrect, time: time }, error: function(xhr) { alert('ajax request error'); }, success: function(response) { console.log('success'); } }); } });<file_sep>$(function() { var line = d3.svg.line() .x(function(d) { return d.x; }) .y(function(d) { return d.y; }) .interpolate('basis'); var svg = d3.select('#main-svg'); function drawResult(result){ result.forEach(function (data){ if (data.task == 'r') { svg.append('path') .attr({ 'd': line(data.path), 'stroke': '#e74c3c', 'stroke-width': '1px', 'fill': 'none' }); } if (data.task == 'g') { svg.append('path') .attr({ 'd': line(data.path), 'stroke': '#2ecc71', 'stroke-width': '1px', 'fill': 'none' }); } if (data.task == 'b') { svg.append('path') .attr({ 'd': line(data.path), 'stroke': '#3498db', 'stroke-width': '1px', 'fill': 'none' }); } if (data.task == 'y') { svg.append('path') .attr({ 'd': line(data.path), 'stroke': '#f1c40f', 'stroke-width': '1px', 'fill': 'none' }); } }); } $.get('/pilot2_data', function(data){ drawResult(data); }); });<file_sep>$(function() { var readyFlag = 0; var taskFlag = 0; var commitFlag = 0; var blockPos = {}; var taskNum = 0; var taskPath = []; var taskStartTime = 0; var targets = [ 'r', 'g', 'b', 'y', 'r', 'g', 'b', 'y', 'r', 'g', 'b', 'y', 'r', 'g', 'b', 'y', 'r', 'g', 'b', 'y' ]; var task = ''; // Initialization // ------------------------------ targets.sort(function(){return Math.round(Math.random());}); blockPos.x = $('#pilot2-block').offset().left; blockPos.y = $('#pilot2-block').offset().top; $(document).keydown(function(event){ if (event.keyCode == 90) { readyFlag = 1; } if (event.keyCode == 88) { if (commitFlag) { commitTask(); resetTask(); } } if (event.keyCode == 67) { resetTask(); } if (event.keyCode == 187) { $('.user-info').toggleClass('hidden'); } }); // Stroke Adjusment // ------------------------------ $(document).mousemove(function(e) { var offsetX = e.pageX - blockPos.x; var offsetY = e.pageY - blockPos.y; // console.log(offsetX); // console.log(offsetY); var relateTop, relateRight, relateBottom, relateLeft; relateTop = (450-distance(offsetX, offsetY, 320, 0))/450; relateRight = (450-distance(offsetX, offsetY, 640, 320))/450; relateBottom = (450-distance(offsetX, offsetY, 320, 640))/450; relateLeft = (450-distance(offsetX, offsetY, 0, 320))/450; $('#pathTop').attr('stroke-opacity', relateTop); $('#pathRight').attr('stroke-opacity', relateRight); $('#pathBottom').attr('stroke-opacity', relateBottom); $('#pathLeft').attr('stroke-opacity', relateLeft); $('#pathTop').attr('stroke-width', relateTop*10); $('#pathRight').attr('stroke-width', relateRight*10); $('#pathBottom').attr('stroke-width', relateBottom*10); $('#pathLeft').attr('stroke-width', relateLeft*10); $('#pathTop').attr('stroke-dashoffset', getDashOffset(relateTop, offsetX, offsetY)); $('#pathRight').attr('stroke-dashoffset', getDashOffset(relateRight, offsetX, offsetY)); $('#pathBottom').attr('stroke-dashoffset', getDashOffset(relateBottom, offsetX, offsetY)); $('#pathLeft').attr('stroke-dashoffset', getDashOffset(relateLeft, offsetX, offsetY)); if (taskFlag) { taskPath.push({ x: offsetX, y: offsetY, t: Date.now() - taskStartTime }); } }); function distance(x, y, x0, y0) { return Math.sqrt((x -= x0) * x + (y -= y0) * y); } function getDashOffset(relateNum, x, y) { if ((x >= 0) && (x <= 640) && (y >= 0) && (y <=640)) { return 640-Math.sqrt(relateNum)*640; } else { return 0; } } // Path Record // ------------------------------ function recordTaskData(name, task, path, time) { $.ajax({ url: '/pilot2', type: 'POST', data: { name: name, task: task, path: path, time: time }, error: function(xhr) { alert('ajax request error'); }, success: function(response) { console.log('success'); } }); } // Task Commit // ------------------------------ function commitTask() { recordTaskData($('.user-info input').val(), task, JSON.stringify(taskPath), Date.now() - taskStartTime); taskNum++; taskPath = []; $('.progress').css('width', taskNum*100/20+'%'); } // Task Assign & Reset // ------------------------------ function assignNewTask() { if (taskFlag == 0) { // task = randomTarget(); task = targets.pop(); taskFlag = 1; taskStartTime = Date.now(); } } function resetTask(){ $('.trigger-area').removeClass('triggered r g b y'); readyFlag = 0; taskFlag = 0; commitFlag = 0; task = ''; } function randomTarget(){ ri = Math.floor(Math.random()*targets.length); return targets[ri]; } // Trigger Target // ------------------------------ function mouseoverHandler() { return function () { var $this = $(this); if ((task == 'r') && $this.hasClass('trigger--top')) { $this.addClass('triggered r'); commitFlag = 1; } if ((task == 'g') && $this.hasClass('trigger--right')) { $this.addClass('triggered g'); commitFlag = 1; } if ((task == 'b') && $this.hasClass('trigger--bottom')) { $this.addClass('triggered b'); commitFlag = 1; } if ((task == 'y') && $this.hasClass('trigger--left')) { $this.addClass('triggered y'); commitFlag = 1; } }; } function mouseleaveHandler() { return function () { var $this = $(this); $this.removeClass('triggered r g b y'); commitFlag = 0; }; } $('.trigger--end').on('mouseover', mouseoverHandler()); $('.trigger--end').on('mouseleave', mouseleaveHandler()); // Trigger Start // ------------------------------ $('.trigger--start').on('mouseover', function () { var $this = $(this); if ($this.prop('hoverTimeout')) { $this.prop('hoverTimeout', clearTimeout($this.prop('hoverTimeout'))); } $this.prop('hoverIntent', setTimeout(function() { if (readyFlag) { assignNewTask(); $('.trigger--start').addClass('triggered ' + task); } }, 1000)); }); });<file_sep>$(function() { var line = d3.svg.line() .x(function(d) { return d.X-100; }) .y(function(d) { return d.Y; }) .interpolate('basis'); var ng = d3.select('#no-guidance'); var wg = d3.select('#with-guidance'); var ngnum = 0; var wgnum = 0; var ngscore = []; var wgscore = []; function drawResult(result){ result.forEach(function (data){ if (data.task == 'no-guidance') { ngscore.push(parseFloat(data.rcircle)); if (data.rcorrect == 'true') { ngnum++; } ng.append('path') .attr({ 'd': line(data.path), 'stroke': '#34495e', 'stroke-width': '1px', 'fill': 'none' }); } if (data.task == 'with-guidance') { wgscore.push(parseFloat(data.rcircle)); if (data.rcorrect == 'true') { wgnum++; } wg.append('path') .attr({ 'd': line(data.path), 'stroke': '#34495e', 'stroke-width': '1px', 'fill': 'none' }); } }); var nr = average(ngscore); var wr = average(wgscore); $('#ngnum').append('<span class="title">[correct task]</span>'+ ngnum + ' / ' + nr.t); $('#wgnum').append('<span class="title">[correct task]</span>'+ wgnum + ' / ' + wr.t); $('#ngscore').append('<span class="title">[mean]</span>'+ nr.mean); $('#wgscore').append('<span class="title">[mean]</span>'+ wr.mean); $('#ngscore').append('<br><span class="title">[std]</span>'+ nr.deviation); $('#wgscore').append('<br><span class="title">[std]</span>'+ wr.deviation); } $.get('/pilot3_data', function(data){ drawResult(data); }); function average(a) { var r = {mean: 0, variance: 0, deviation: 0}, t = a.length; r.t = t; for(var m, s = 0, l = t; l--; s += a[l]); for(m = r.mean = s / t, l = t, s = 0; l--; s += Math.pow(a[l] - m, 2)); return r.deviation = Math.sqrt(r.variance = s / t), r; } });
a64e4b07526f1a36522e2f150839ca0524c5795a
[ "JavaScript" ]
5
JavaScript
nktx/gazemenu
b5ac3d13e1abdf57f4b2a8453d706298f72e3189
51c0a09787dc1f0673e3617101d59dbb7815ba94
refs/heads/master
<repo_name>Sebijk/b1gMail6-Plugins<file_sep>/Openfire/openfire.extension.php <?php /* * Copyright (c) 2007 - 2008, Home of the Sebijk.com * http://www.sebijk.com */ $MODULE_CALL = 'modopenfire'; class modopenfire extends b1gMailModul { // Informationen zum Modul function modopenfire() { $this->titel = 'Jabber Openfire-Integration'; $this->autor = 'Home of the Sebijk.com'; $this->web = 'http://www.sebijk.com'; $this->mail = '<EMAIL>'; $this->version = '1.3'; $this->designedfor = '6.3.1'; $this->admin_pages = true; $this->admin_page_title = 'Openfire'; } // Installation function Install() { global $db; $sql = $db->Query("CREATE TABLE `{pre}mod_openfire` ( `secretkey` varchar(255) NOT NULL default '', `domain` varchar(255) NOT NULL default '' ) ENGINE=MyISAM;"); $sql = new SQLq("INSERT INTO `{pre}mod_openfire` (`secretkey`, `domain`) VALUES ('', 'localhost');"); PutLog("Modul \"Openfire-Integration\" wurde erfolgreich installiert.", PRIO_NOTE, __FILE__, __LINE__); return(true); } // Deinstallation function Uninstall() { global $db; $sql = $db->Query("DROP TABLE {pre}mod_openfire;"); PutLog("Modul \"Openfire-Integration\" wurde erfolgreich deinstalliert.", PRIO_NOTE, __FILE__, __LINE__); return(true); } // Jabber-Registrierungen zum Openfire Server senden function OnSignup($userid, $usermail) { global $vorname, $name, $fullmail, $_REQUEST; $benutzername = btrim($_REQUEST['reg_mail']); $jabber_kennwort = btrim($_REQUEST['reg_pass']); $voller_name = $vorname." ".$name; // Konfiguration $sql = new SQLq("SELECT * FROM {pre}mod_openfire"); $row = $sql->FetchArray(); $userservice_secretkey = $row['secretkey']; $jabber_domain = $row['domain']; $sendjabber_register = "https://".$jabber_domain.":9091/plugins/userService/userservice?type=add&secret=".$userservice_secretkey."&username=".rawurlencode($benutzername)."&password=".rawurlencode($<PASSWORD>)."&name=".rawurlencode($voller_name)."&email=".rawurlencode($fullmail); $http = new HTTPRequest($sendjabber_register); $receive_url = $http->DownloadToString(); } function OnDeleteUser($id) { global $db; $sql = $db->Query("SELECT email FROM {pre}users WHERE id=?",$id); $jabber_row = $sql->FetchArray(); $jabber_email = $jabber_row['email']; $sql = $db->Query("SELECT * FROM {pre}mod_openfire"); $jabber_row = $sql->FetchArray(); $benutzername = explode("@", $jabber_email); $sendjabber_delete = "https://".$jabber_row['domain'].":9091/plugins/userService/userservice?type=delete&secret=".$jabber_row['secretkey']."&username=".rawurlencode($benutzername[0]); $http = new HTTPRequest($sendjabber_delete); $result = $http->DownloadToString(); } // Admin-Handler function AdminHandler() { If (isset($_POST['save'])) { $sql = new SQLq("UPDATE {pre}mod_openfire SET domain='".$_POST['openfire_domain']."',secretkey='".$_POST['openfire_userservice_secretkey']."'"); $erfolg = "<br /><b>Die Daten wurden erfolgreich gespeichert!</b><br />"; } $sql = new SQLq("SELECT * FROM {pre}mod_openfire"); $row = $sql->FetchArray(); $openfire_userserivce_secretkey = $row['secretkey']; $openfire_domain = $row['domain']; ?> <body style="margin: 0px; background-color: #FFFFFF"> <center> <form style="display:inline;" method="post" name="save" id="save" action="admin.php?action=modulepage&module=<?php echo($this->internal_name); ?>&PHPSESSID=<?php echo(session_id()); ?>"> <?php echo $erfolg; ?> <table width="90%" cellspacing="1" bgcolor="#999999"" height="106"> <tr> <td height="19" colspan="2" background="res/lauf.jpg"> &nbsp;&nbsp;<font color="#666666"><b> Openfire-Integration</b></font></td> </tr> <tr> <td bgcolor="#f5f5f5" height="27" width="20%">Openfire-Domain:</td> <td bgcolor="#f5f5f5" height="27" width="80%"> <input type="text" name="openfire_domain" size="20" value="<?php echo $openfire_domain;?>" tabindex="1"></td> </tr> <tr> <td bgcolor="#f5f5f5" height="27" width="20%">Secret Key vom User Service Plugin:</td> <td bgcolor="#f5f5f5" height="27" width="80%"> <input type="text" name="openfire_userservice_secretkey" size="20" value="<?php echo $openfire_userservice_secretkey;?>" tabindex="2"></td> <tr> <td bgcolor="#f5f5f5" height="11" width="20%">&nbsp;</td> <td bgcolor="#f5f5f5" height="11" width="80%"> <input type="submit" value="Speichern" name="save"></tr> </table> <p />b1gMail Openfire-Integration &copy; 2007 - 2008, <a href="http://www.sebijk.com" target="_blank">Home of the Sebijk.com</a> </div> </form> </center> </body> <?php } } ?> <file_sep>/Joomla Integration/joomla.extension.php <?php /* * Copyright (c) 2007, Home of the Sebijk.de * http://www.sebijk.de */ $MODULE_CALL = 'modjoomla'; class modjoomla extends b1gMailModul { function modjoomla() { $this->titel = 'Joomla Integration (Registrierung)'; $this->autor = 'Sebijk'; $this->web = 'http://www.sebijk.de'; $this->mail = '<EMAIL>'; $this->version = '1.0'; $this->designedfor = '6.3.1'; } function OnSignup($userid, $usermail) { global $db, $vorname, $name, $fullmail, $_REQUEST; /** Sollte Joomla nicht in der gleichen Datenbank wie b1gMail installiert sein, so koennen Sie hier die Datenbank zu Joomla eingeben. **/ $joomla_db = ""; $joomla_benutzername = btrim($_REQUEST['reg_mail']); $joomla_emailadresse = btrim($fullmail); $joomla_kennwort = btrim($_REQUEST['reg_pass']); $joomla_kennwort = md5($joomla_kennwort); $joomla_id = intval($db->InsertId()); $voller_name = $vorname." ".$name; // Noch in Entwicklung: $joomla_lastvisitdate = ""; // Query ausfuehren $db->Query("INSERT INTO jos_users (id,name,username,email,password,usertype,block,sendEmail,gid,registerDate,lastvisitDate,activation,params) VALUES (?,?,?,?,?,'users','0','1','18',?,'0000-00-00 00:00:00','','')", $joomla_id, $voller_name, $joomla_emailadresse, $joomla_benutzername, $joomla_kennwort, $joomla_lastvisitdate); // Variabeln leeren unset($joomla_db); unset($joomla_emailadresse); unset($joomla_kennwort); unset($joomla_id); unset($voller_name); } } ?><file_sep>/notizen_erstellen_fixed/notes.extension.php <? $MODULE_CALL = 'modNotes'; class modNotes extends b1gMailModul { function modNotes() { global $s_loggedin; $this->titel = 'Notizen'; $this->autor = '<NAME>'; $this->web = ''; $this->mail = '<EMAIL>'; $this->version = '1.0'; $this->designedfor = '6.3.1'; if($s_loggedin=='yes') { $this->user_pages = true; $this->user_page_array = array(0 => array('title' => 'Notizen', 'link' => 'main.php?action=notes&bmsession='.bmSession_ID())); } } function FileHandler(&$file, $action) { global $_REQUEST; global $s_userid; global $tpl; global $FCKeditorBasePath; if($file=="main.php" && $action=="notes") { if($_REQUEST['do']=="del" && is_numeric($_REQUEST['id'])) { $sql = new SQLq("DELETE FROM {pre}notes WHERE id='".$_REQUEST['id']."' AND userid='".$s_userid."'"); $sql->FreeClose(); unset($_REQUEST['do']); } if($_REQUEST['do']=="new") { $FCKeditorBasePath = "./editor/"; $fck = new FCKEditor(); $fck->Value = ''; $editor=$fck->ReturnFCKeditor("text", "100%", "400px"); $tpl->assign('page', 'notizen_erstellen.tpl'); $tpl->assign('editor',$editor); $tpl->display('index.tpl'); } if($_REQUEST['do']=="savenew") { if(trim($_REQUEST['betreff'])=="") { $_REQUEST['betreff']="-"; } $sql = new SQLq("INSERT INTO {pre}notes(userid,zeit,betreff,text) VALUES('".$s_userid."','".time()."','".str_replace(array("'","<",">"),array("\\'","&lt;","&gt;"),$_REQUEST['betreff'])."','".str_replace("'","\\'",$_REQUEST['text'])."')"); $sql->FreeClose(); unset($_REQUEST['do']); } if($_REQUEST['do']=="edit") { if(is_numeric($_REQUEST['id'])) { $sql = new SQLq("SELECT * FROM {pre}notes WHERE id='".$_REQUEST['id']."' AND userid='".$s_userid."'"); if($sql->RowCount()>0) { $row=$sql->FetchArray(); $FCKeditorBasePath = "./editor/"; $fck = new FCKEditor(); $fck->Value = htmlentities($row['text']); $editor=$fck->ReturnFCKeditor("text", "100%", "400px"); $tpl->assign('page', 'notizen_aendern.tpl'); $tpl->assign('note',array("editor"=>$editor,"betreff"=>$row['betreff'],"id"=>$row['id'])); $tpl->display('index.tpl'); } else { unset($_REQUEST['do']); } $sql->FreeClose(); } else { unset($_REQUEST['do']); } } if($_REQUEST['do']=="saveedit") { if(trim($_REQUEST['betreff'])=="") { $_REQUEST['betreff']="-"; } $notiz_text = $_REQUEST['text']; $notiz_text = str_replace("'","\\'",$notiz_text); $notiz_text = htmlspecialchars($notiz_text); $sql = new SQLq("UPDATE {pre}notes SET betreff='".str_replace(array("'","<",">"),array("\\'","&lt;","&gt;"),$_REQUEST['betreff'])."',text='".$notiz_text."' WHERE id='".str_replace("'","\\'",$_REQUEST['id'])."' AND userid='".$s_userid."'"); $sql->FreeClose(); $_REQUEST['do']="show"; } if($_REQUEST['do']=="show") { if(is_numeric($_REQUEST['id'])) { $sql = new SQLq("SELECT * FROM {pre}notes WHERE userid='".$s_userid."' AND id='".$_REQUEST['id']."'"); if($sql->RowCount()>0) { $row=$sql->FetchArray(); $note=array("id"=>$row['id'],"betreff"=>$row['betreff'],"uhrzeit"=>date("H:i:s",$row['zeit']),"datum"=>date("d.m.Y",$row['zeit']),"text"=>htmlspecialchars($row['text'])); $tpl->assign('page', 'notizen_anzeigen.tpl'); $tpl->assign('note',$note); $tpl->display('index.tpl'); } else { unset($_REQUEST['do']); } $sql->FreeClose(); } else { unset($_REQUEST['do']); } } if(!isset($_REQUEST['do'])) { $sql = new SQLq("SELECT * FROM {pre}notes WHERE userid='".$s_userid."' ORDER BY id DESC"); $notes=array(); while($row=$sql->FetchArray()) { $notes[]=array("id"=>$row['id'],"betreff"=>$row['betreff'],"uhrzeit"=>date("H:i:s",$row['zeit']),"datum"=>date("d.m.Y",$row['zeit'])); } $sql->FreeClose; $tpl->assign('page', 'notizen.tpl'); $tpl->assign('notes',$notes); $tpl->display('index.tpl'); } } } function Install() { $sql = new SQLq("CREATE TABLE `{pre}notes` ( `id` int(11) NOT NULL auto_increment, `userid` int(11), `zeit` int(11), `betreff` varchar(255), `text` text, PRIMARY KEY (`id`) );"); $sql->FreeClose(); } function Uninstall() { $sql = new SQLq("DROP TABLE `{pre}notes`"); $sql->FreeClose(); } } ?><file_sep>/checkmail/modules/checkmail.extension.php <?php /* * Checkmail * Original Autor: haggi0505 * (<EMAIL>) * * Optimized by Sebijk * http://www.sebijk.de */ $MODULE_CALL = 'modcheckmail'; class modcheckmail extends b1gMailModul { function modcheckmail() { global $s_loggedin; $this->titel = 'CheckMail'; $this->autor = 'Sebijk, haggi0505'; $this->web = 'http://www.sebijk.de'; $this->mail = '<EMAIL>'; $this->version = '1.2'; $this->designedfor = '6.3.1'; if($s_loggedin=='yes') { $this->user_pages = true; $this->user_page_array = array(0 => array('title' => 'CheckMail', 'link' => 'main.php?action=checkmail&amp;bmsession='.bmSession_ID().'" target="checkmail" onclick="void(window.open(\'main.php?action=checkmail&amp;bmsession='.bmSession_ID().'\',\'checkmail\',\'toolbar=no,width=190,height=280,resizable=yes,scrollbars=no\'));')); } } function FileHandler(&$file, $action) { global $_REQUEST, $tpl, $lang_main, $db, $s_userrow, $s_usermail; if($file=="main.php" && $action=="checkmail") { // Mails auslesen $sql = $db->Query("SELECT COUNT(*) AS n FROM {pre}mails WHERE gelesen='no' AND user=?", $s_usermail); $notread = $sql->FetchArray(); $notread = intval($notread['n']); $sql->FreeClose(); unset($sql); $wtext = str_replace("%%mails%%", $notread, $lang_main['welcometext']); // Templatevariabeln zuweisen $tpl->assign('notread', $notread); $tpl->assign('zeitangabe', gmdate("d M Y, H:i:s", time())); $tpl->assign('in_refresh', $s_userrow['in_refresh']); $tpl->assign('willkommenstext', $wtext); $tpl->display('checkmailextern.tpl'); } } } ?><file_sep>/checkmail/README.md # CheckMail Version 1.2 Dies ist eine komplett überarbeitete Version von Checkmail von haggi0505. Der Code ist sehr stark optimiert worden und bietet noch einige Verbesserungen an. Mehr dazu findet ihr unter lesen.txt. <file_sep>/Rechtschreibung/README.md # Google Rechtschreibprüfung Version 1.0 für b1gMail Dieses Addon ersetzt die b1gMail-Rechtschreibprüfung (falls installiert) durch die Rechtschreibprüfung von Google. Vorteil daran ist, dass es mehrere Sprachen auswählen kann, als die von b1gMail integrierte Rechtschreibprüfung. <file_sep>/README.md # b1gMail6-Plugins my b1gmail V6 Plugins
d8b67c8ee73ffc7dfa2bf7312d8130ca6240204e
[ "Markdown", "PHP" ]
7
PHP
Sebijk/b1gMail6-Plugins
2cbbbedbaf7b8d1376aa99cac5f68dc7cbfce39d
f678348ebc4411d37ca02ac552c0cc7f7e5668c2
refs/heads/master
<repo_name>DenDiem/BaranovJava01<file_sep>/src/dendiem/com/Main.java package dendiem.com; /** * @author <NAME> * @date 02/02/2019 */ import java.util.ArrayList; /**@TODO * Об’єкт-одинак * Написати клас, організований за шаблоном Singleton. * Написати програму, яка: * - ілюструє звернення до методів екземпляру цього класу; * - створює колекцію з кількома різними екземплярами цього класу * та виводить її вміст на екран (обмежитися однопотоковим варіантом). */ public class Main { static void test1(){ SingletonPoint singletonPoint = SingletonPoint.getInstance(); System.out.println("First create: " + singletonPoint); SingletonPoint tryCreateNewInstance = SingletonPoint.getInstance(); System.out.println("Second create: " +tryCreateNewInstance); System.out.println("getX : " + singletonPoint.getX()); System.out.println("getY : " + singletonPoint.getY()); singletonPoint.setX(100); tryCreateNewInstance.setY(200); System.out.println("setX for singletonPoint = 100"); System.out.println("setY for tryClone = 200"); System.out.println("First create: " + singletonPoint); System.out.println("Second create: " +tryCreateNewInstance); } static void test2()throws Exception{ SingletonPoint singletonPoint = SingletonPoint.getInstance(); System.out.println("create clone"); SingletonPoint tryClone = (SingletonPoint) singletonPoint.clone(); singletonPoint.setX(2); tryClone.setY(3); System.out.println("setX for singletonPoint = 2"); System.out.println("setY for tryClone = 3"); System.out.println("create colections"); ArrayList<SingletonPoint> listSingleton = new ArrayList<>(); listSingleton.add(singletonPoint); listSingleton.add(tryClone); System.out.println("Print colections: "); for (SingletonPoint item: listSingleton) { System.out.println(item); } } public static void main(String[] args)throws Exception { System.out.println("TASK1 *** \nTestring method for Singleton class"); test1(); System.out.println("TASK2 *** \nBreak Singleton and create colections"); test2(); } } <file_sep>/src/dendiem/com/Point.java package dendiem.com; public class Point implements Cloneable { private int _x; private int _y; public Point(){ this._x = 0; this._y = 0; } @Override protected Object clone() throws CloneNotSupportedException { return super.clone(); } public Point(int _x, int _y) { this._x = _x; this._y = _y; } public String tellAboutU(){ return "I am Point + "; } public int getX() { return _x; } public void setX(int _x) { this._x = _x; } public int getY() { return _y; } public void setY(int _y) { this._y = _y; } @Override public String toString() { return "Point{" + "_x=" + _x + ", _y=" + _y + '}'; } }
e5450f53f62cd03eec717c03c020e50eb36ea0bc
[ "Java" ]
2
Java
DenDiem/BaranovJava01
2188ca5f947f82d25e5cf74d3c15412385b37bb1
bba53858e5347dfb288f7df320ef29d7647e6682
refs/heads/master
<file_sep><?php ob_start(); try { include './includes/title.php'; ?> <!--header--> <?php $file = './includes/header.php'; if (file_exists($file) && is_readable($file)) { require $file; } else { throw new Exception("$file can't be found"); } ?> <body> <!--navigation--> <?php $file = './includes/menu.php'; if (file_exists($file) && is_readable($file)) { require $file; } else { throw new Exception("$file can't be found"); } ?> <!--header image--> <div class="row"> <div class="container header-img-container"> <img class="rounded-circle header-img" src="./images/Queen3.jpg" alt="circle"> </div> </div> <!--freddie info section--> <div class="container queen-container qc-lg-update"> <div class="row align-items-end"> <div class="col-12"> <div class="card text-dark"> <img class="card-img-top" src="./images/fm12.jpg" alt="freddy" width="100"> <div class="card-body"> <h4 class="card-title"><NAME></h4> <p class="card-text">Singer-songwriter and musician <NAME> was born <NAME> on September 5, 1946, in Zanzibar, Tanzania. As the frontman of Queen, <NAME> was one of the most talented and innovative singers of the rock era. He spent time in a boarding school in Bombay (now Mumbai), India, where he studied piano. It was not long before this charismatic young man joined his first band, the Hectics</p> </div> </div> </div> </div> </div><br> <!--brian may info Section--> <div class="container queen-container qc-lg-update"> <div class="row align-items-end"> <div class="col-12"> <div class="card text-dark"> <img class="card-img-top" src="./images/bm4.jpg" width="60" alt="Brian"> <div class="card-body"> <h4 class="card-title"><NAME></h4> <p class="card-text"><NAME> was born on July 19, 1947, in Hampton, Middlesex, England, to parents Ruth and <NAME>. An imaginative teen, May, with the help of his father, built his own homemade guitar, dubbed "The Red Special." The guitar, which was made from makeshift materials including firewood and was played with a six-pence coin for a pick, would later figure prominently in May's musical career. He would go on to play it on every Queen album and live show. </p> </div> </div> </div> </div> </div><br> <!--<NAME> info section--> <div class="container queen-container qc-lg-update"> <div class="row align-items-end"> <div class="col-12"> <div class="card text-dark"> <img class="card-img-top" src="./images/jd1.jpg" width="60" alt="Icon"><br> <div class="card-body"> <h4 class="card-title"><NAME></h4> <p class="card-text">Born on August 19, 1951, in Leicester, England, As a child, he developed a passion for electronics while also taking up music, heaving influenced by the Beatles. He started playing guitar with the band the Opposition when he was 14, and eventually switched to bass. In 1970 Deacon met the band's guitarist <NAME> and drummer <NAME>, and was invited to audition for the position of bassist. Deacon got the gig, and thus, with virtuosic singer <NAME> already at the helm, the Queen lineup that would last for two decades was born.</p> </div> </div> </div> </div> </div><br> <!--roger taylor info section--> <div class="container queen-container qc-lg-update"> <div class="row align-items-end"> <div class="col-12"> <div class="card text-dark"> <img class="card-img-top" src="./images/rt3.jpg" alt="roger"> <div class="card-body"> <h4 class="card-title"><NAME></h4> <p class="card-text"><NAME> was born on July 26, 1949, in the seaport town of King's Lynn, part of England's Norfolk county. During his youth, Taylor developed a passion for multi-instrumentalism, before turning to drums.Taylor moved to London and studied dentistry and biology for a time, though he would ultimately decide to pursue a career in music. In 1967, he began performing with the rock group Smile, which included guitarist <NAME>. </p> </div> </div> </div> </div> </div><br> <!--timeline of band members--> <div class="container"> <div class="row"> <div class="col-12"> <div class="shadow-lg p-3 mb-5 bg-white rounded"> <div class="card"> <div class="card-body"> <img class="card-img-bottom" src="./images/timelinecopy.png" alt="timeline" width="100"> </div> </div> </div> </div> </div> </div><br> <!--feedback form--> <?php $file = './includes/form.php'; if (file_exists($file) && is_readable($file)) { require $file; } else { throw new Exception("$file can't be found"); } ?> <script src="js/jquery.slim.min.js"></script> <script src="js/popper.min.js"></script> <script src="js/bootstrap.min.js"></script> </body> </html> <?php } catch (Exception $e) { ob_end_clean(); header('Location: http://site12.wdd.francistuttle.edu/projects/queen/error.php'); } ob_end_flush(); ?><file_sep><?php // run this script only if the logout button has been clicked if (isset($_POST['logout'])) { // empty the $_SESSION array $_SESSION = array(); // invalidate the session cookie if (isset($_COOKIE[session_name()])) { setcookie(session_name(), '', time()-86400, '/'); } // end session and redirect session_destroy(); header('Location: http://localhost/working_revamp/login.php'); exit; } ?> <form class="text-right" method="post"> <input class="btn btn-dark mr-3 mt-1" name="logout" type="submit" value="Log out"> </form><file_sep><?php ob_start(); try { include './includes/title.php'; require_once './includes/header.php'; // create database connection $conn = dbConnect('read', 'pdo'); $sql = 'SELECT * FROM albums ORDER BY album_id'; $result = $conn->query($sql); $error = $conn->errorInfo()[2]; ?> <body> <?php require_once './includes/menu.php'; ?> <h1 class="text-center">Welcome, <?= $_SESSION['username'] ?>!</h1> <div class="container queen-container qc-lg"> <div class="row"> <div class="col-12"> <div class="col bg-danger text-white qc-sm"> <div class="row"> <h1 class="col-12 text-center">Manage Albums</h1> </div> </div> </div> </div> </div> <?php if (isset($error)) { echo "<p>$error</p>"; } else { if (isset($_GET['updated'])) { echo '<p>Record updated</p>'; } elseif (isset($_GET['trans_error'])) { echo "Can't update record because of the following error: "; echo htmlentities($_GET['trans_error']) . '</p>'; } ?> <div class="container queen-container"> <div class="row"> <div class="col-12"> <table class="table table-hover table-striped table-dark"> <tr> <th class="text-center">Title</th> <th class="text-center">Record Label</th> <th class="text-center">Year Released</th> <th>&nbsp;</th> <th>&nbsp;</th> </tr> <?php while ($row = $result->fetch()) { ?> <tr> <td class="text-center"><?= $row['album_name']; ?></td> <td class="text-center"><?= $row['record_label']; ?></td> <td class="text-center"><?= $row['year_released']; ?></td> <td><a class="linq" href="update.php?album_id=<?= $row['album_id']; ?>">EDIT</a></td> <td><a class="linq" href="delete.php?album_id=<?= $row['album_id']; ?>">DELETE</a></td> </tr> <?php } ?> </table> </div> </div> </div> <div class="container text-right"> <h5><a class="linq" href="insert.php">Insert New Album</a></h5> </div> <?php } ?> </body> </html> <?php } catch (Exception $e) { ob_end_clean(); header('Location: http://site12.wdd.francistuttle.edu/projects/queen/error.php'); } ob_end_flush(); ?><file_sep><?php require_once 'connection.php'; $conn = dbConnect('read', 'pdo'); // get the username's hashed password from the database $sql = 'SELECT pwd FROM users WHERE username = ?'; // prepare statement $stmt = $conn->prepare($sql); // pass the input parameter as a single-element array $stmt->execute([$username]); $storedPwd = $stmt->fetchColumn(); // check the submitted password against the stored version if (password_verify($password, $storedPwd)) { $_SESSION['authenticated'] = '<NAME>'; // get the time the session started $_SESSION['start'] = time(); session_regenerate_id(); header("Location: $redirect"); exit; } else { // if not verified, prepare error message $error = 'Invalid username or password'; }<file_sep><?php ob_start(); try { include './includes/title.php'; ?> <!--header--> <?php $file = './includes/header.php'; if (file_exists($file) && is_readable($file)) { require $file; } else { throw new Exception("$file can't be found"); } ?> <!--Navigation--> <?php $file = './includes/menu.php'; if (file_exists($file) && is_readable($file)) { require $file; } else { throw new Exception("$file can't be found"); } ?> <body> <div class="container-fluid text-center h1 mt-5"> <h1 class="mt-5">Thank you for joining the fan club!</h1> </div> </body> </html> <?php } catch (Exception $e) { ob_end_clean(); header('Location: http://site12.wdd.francistuttle.edu/projects/queen/error.php'); } ob_end_flush(); ?><file_sep><?php require_once './includes/header.php'; require_once './includes/utility_funcs.php'; // initialize flags $OK = false; $done = false; $trans_error = false; // create database connection $conn = dbConnect('write', 'pdo'); // get details of selected record if (isset($_GET['album_id']) && !$_POST) { // prepare SQL query $sql = 'SELECT album_id, album_name, record_label, year_released, album_cover FROM albums WHERE album_id = ?'; $stmt = $conn->prepare($sql); // pass the placeholder value to execute() as a single-element array $OK = $stmt->execute([$_GET['album_id']]); // bind the results $stmt->bindColumn(1, $album_id); $stmt->bindColumn(2, $album_name); $stmt->bindColumn(3, $record_label); $stmt->bindColumn(4, $year_released); $stmt->bindColumn(5, $album_cover); $stmt->fetch(); } // if form has been submitted, update record if (isset($_POST['update'])) { $conn->setAttribute(PDO::ATTR_ERRMODE, PDO::ERRMODE_EXCEPTION); try { $conn->beginTransaction(); // prepare update query $sql = 'UPDATE albums SET album_name = ?, record_label = ?, year_released = ?, album_cover = ? WHERE album_id = ?'; $stmt = $conn->prepare($sql); $stmt->bindParam(1, $_POST['album_name'], PDO::PARAM_STR); $stmt->bindParam(2, $_POST['record_label'], PDO::PARAM_STR); $stmt->bindParam(3, $_POST['year_released'], PDO::PARAM_INT); $stmt->bindParam(4, $_POST['album_cover'], PDO::PARAM_STR); $stmt->bindParam(5, $_POST['album_id'], PDO::PARAM_INT); // execute query $stmt->execute(); $done = $conn->commit(); } catch (Exception $e) { $conn->rollBack(); $trans_error = $e->getMessage(); } } // redirect page after updating or if $_GET['article_id'] not defined if (($done || $trans_error)||(!$_POST && !isset($_GET['album_id']))) { $url = 'http://site12.wdd.francistuttle.edu/projects/queen/admin.php'; if ($trans_error) { $url .= "?trans_error=$trans_error"; } header("Location: $url"); exit; } if (isset($stmt)) { // get error message (will be null if no error) $error = $stmt->errorInfo()[2]; } ?> <body> <?php require_once './includes/menu.php'; if (isset($error)) { echo "<p class='warning'>Error: $error</p>"; } if($album_id == 0) { ?> <p class="warning">Invalid request: record does not exist.</p> <?php } else { ?> <div class="container mt-5 col-6 queen-container qc-lg-update"> <div class="row"> <div class="col-12"> <div class="col bg-danger text-white qc-sm"> <h1 class="text-center">Update Album: "<?= $album_name ?>"</h1> <form method="post" action="update.php" enctype="multipart/form-data"> <p class="form-group col"> <label for="album_name">Title (<?= $album_name ?>):</label> <input class="form-control" name="album_name" type="text" id="album_name" value="<?php if (isset($error)) { echo safe($_POST['album_name']); } ?>"> </p> <p class="form-group col"> <label for="record_label">Record Label (<?= $record_label ?>):</label> <input class="form-control" name="record_label" type="text" id="record_label" value="<?php if (isset($error)) { echo safe($_POST['record_label']); } ?>"> </p> <p class="form-group col"> <label for="year_released">Year Released (<?= $year_released ?>):</label> <input class="form-control" name="year_released" type="text" id="year_released" value="<?php if (isset($error)) { echo safe($_POST['year_released']); } ?>"> </p> <p class="form-group col"> <input name="album_id" type="hidden" value="<?= $album_id ?>"> <label for="album_cover">Name of Cover Image File (<?= $album_cover ?>):</label> <input class="form-control" name="album_cover" type="text" id="album_cover" value="<?php if (isset($error)) { echo safe($_POST['album_cover']); } ?>"> </p> <p class="form-group col"> <input type="submit" name="update" value="Update Album"> </p> </form> </div> </div> </div> </div> <div class="col"> <p> <h5 class="text-center"><a href="admin.php">&laquo; Back to list</a></h5> <h5 class="text-center">OR</h5> <h5 class="text-center"><a href="update_tracks.php?album_id=<?= $album_id; ?>">Edit Tracks &raquo;</a></h5> </p> </div> <?php } ?> </body> </html><file_sep><?php require_once './includes/header.php'; require_once './includes/utility_funcs.php'; // create database connection $conn = dbConnect('write', 'pdo'); // initialize flags $OK = false; $deleted = false; // get details of selected record if (isset($_GET['album_id']) && !$_POST) { // prepare SQL query $sql = 'SELECT album_id, album_name, record_label, year_released, album_cover FROM albums WHERE album_id = ?'; $stmt = $conn->prepare($sql); // pass the placeholder value to execute() as a single-element array $OK = $stmt->execute([$_GET['album_id']]); // assign result array to variables $stmt->bindColumn(1, $album_id); $stmt->bindColumn(2, $album_name); $stmt->bindColumn(3, $record_label); $stmt->bindColumn(4, $year_released); $stmt->bindColumn(5, $album_cover); // fetch the result $stmt->fetch(); $error = $stmt->errorInfo()[2]; } // if confirm deletion button has been clicked, delete record if (isset($_POST['delete'])) { $sql = 'DELETE FROM albums WHERE album_id = ?'; $stmt = $conn->prepare($sql); $stmt->execute([$_POST['album_id']]); // get number of affected rows $deleted = $stmt->rowCount(); if (!$deleted) { $error = 'There was a problem deleting the record. '; $error .= $stmt->errorInfo()[2]; } $sql2 = 'DELETE FROM tracks WHERE album_id = ?'; $stmt2 = $conn->prepare($sql2); $stmt2->execute([$_POST['album_id']]); // get number of affected rows $deleted = $stmt2->rowCount(); if (!$deleted) { $error = 'There was a problem deleting the record. '; $error .= $stmt2->errorInfo()[2]; } } // redirect the page if deleted, cancel button clicked, or $_GET['article_id'] not defined if ($deleted || isset($_POST['cancel_delete']) || !isset($_GET['album_id'])) { header('Location: http://site12.wdd.francistuttle.edu/projects/queen/admin.php'); exit; } ?> <body> <?php require_once './includes/menu.php'; ?> <div class="container text-center mt-5 mb-5"> <h1 class="">Delete Album</h1> </div> <?php if (isset($error)) { echo "<p class='warning'>Error: $error</p>"; } elseif (isset($album_id) && $album_id == 0) { ?> <p class="">Invalid request: record does not exist.</p> <?php } else { ?> <div class="container"> <div class="container text-danger text-center"> <h4 class="mb-5">Please confirm that you want to delete the following item. <br>This action cannot be undone.</h4> </div> <div class="container text-center"> <h2 class=""><?= safe($album_name) . ' (' . safe($record_label) . ') ' . safe($year_released); ?></h2> </div> </div> <?php } ?> <div class="container text-center mt-5"> <form method="post" action="delete.php"> <p> <input class="mr-5 input-warning" name="cancel_delete" type="submit" value="Cancel"> <?php if (isset($album_id) && $album_id > 0) { ?> <input name="album_id" type="hidden" value="<?= $album_id; ?>"> <?php } ?> <?php if (isset($album_id) && $album_id > 0) { ?> <input class="input-warning" type="submit" name="delete" value="Confirm Deletion"> <?php } ?> </p> </form> </div> </body> </html> <file_sep><?php if (basename($_SERVER['PHP_SELF']) == 'admin.php' || basename($_SERVER['PHP_SELF']) == 'delete.php' || basename($_SERVER['PHP_SELF']) == 'update.php' || basename($_SERVER['PHP_SELF']) == 'insert.php' || basename($_SERVER['PHP_SELF']) == 'update_tracks.php') { require_once './includes/session_timeout_db.php'; } require_once './includes/utility_funcs.php'; require_once './includes/connection.php'; if (basename($_SERVER['PHP_SELF']) !== 'admin.php' && basename($_SERVER['PHP_SELF']) !== 'login.php' && basename($_SERVER['PHP_SELF']) !== 'delete.php' && basename($_SERVER['PHP_SELF']) !== 'update.php' && basename($_SERVER['PHP_SELF']) !== 'insert.php' && basename($_SERVER['PHP_SELF']) !== 'update_tracks.php') { require_once './includes/form_insert.php'; } ?> <!DOCTYPE html> <html lang="en-us"> <head> <meta charset="UTF-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no"> <meta http-equiv="x-ua-compatible" content="ie=edge"> <title>Queen <?= $title ?? '' ?></title> <link rel="stylesheet" href="css/bootstrap.min.css"> <link href="https://fonts.googleapis.com/css?family=Peralta" rel="stylesheet"> </head><file_sep><div class="container" style="background: #C9BF67;padding-top:15px; padding-bottom:15px;"> <div class="row"> <div class="col-12"> <div class="col bg-danger text-white" style="padding-top: 15px; padding-left:25px; padding-bottom: 10px;"> <?php if (isset($error)) { echo "<p>Error: $error</p>"; } ?> <form method="post"> <h4>JOIN THE FAN CLUB!!</h4> <?php if (($_POST && $suspect) || ($_POST && isset($errors['mailfail']))) { ?> <p class="warning">Sorry, your mail could not be sent. Please try later.</p> <?php } elseif ($missing || $errors) { ?> <p class="warning">Please fix the item(s) indicated.</p> <?php } ?> <div class="form-group col"> <label for="name">Name <?php if (in_array('name', $missing)) { ?> <span class="warning">Please enter your name</span> <?php } ?> </label> <input type="text" class="form-control" id="name" name="name" placeholder="First and last name" <?php if ($missing || $errors) { echo 'value="' . htmlentities($name) . '"'; } ?>> </div> <div class="form-group col"> <label for="address">Address <?php if (in_array('address', $missing)) { ?> <span class="warning">Please enter your address</span> <?php } ?> </label> <input type="text" class="form-control" id="address" name="address" placeholder="City, State and Zip" <?php if ($missing || $errors) { echo 'value="' . htmlentities($address) . '"'; } ?>> </div> <div class="form-group col"> <label for="birthday">Birthday <?php if (in_array('birthday', $missing)) { ?> <span class="warning">Please enter your birthday</span> <?php } ?> </label> <input type="text" class="form-control" id="birthday" name="birthday" placeholder="01/18/1978" <?php if ($missing || $errors) { echo 'value="' . htmlentities($birthday) . '"'; } ?>> </div> <div class="form-group col"> <label for="email">Email <?php if (in_array('email', $missing)) { ?> <span class="warning">Please enter your email</span> <?php } ?> </label> <input type="text" class="form-control" id="email" name="email" placeholder="<EMAIL>" <?php if ($missing || $errors) { echo 'value="' . htmlentities($email) . '"'; } ?>> </div> <div class="form-group col"> <label for="comments">Comments <?php if (in_array('comments', $missing)) { ?> <span class="warning">Please enter your comments</span> <?php } ?> </label> <input type="text" class="form-control" id="comments" name="comments" placeholder="Comments" <?php if ($missing || $errors) { echo 'value="' . htmlentities($comments) . '"'; } ?>> </div> <div class="form-group col" style="margin-top:23px; padding:10px"> <input type="submit" name="submit" value="Join Today"> </div> </form> </div> </div> </div> </div> <pre> <?php if ($_POST && $mailSent) { echo "Message body\n\n"; echo htmlentities($message) . "\n"; echo 'Headers: '. htmlentities($headers); } ?> </pre> <file_sep><?php ob_start(); try { include './includes/title.php'; ?> <!--header--> <?php $file = './includes/header.php'; if (file_exists($file) && is_readable($file)) { require $file; } else { throw new Exception("$file can't be found"); } ?> <body> <!--navigation--> <?php $file = './includes/menu.php'; if (file_exists($file) && is_readable($file)) { require $file; } else { throw new Exception("$file can't be found"); } ?> <!--top image funfacts 1-5--> <div class="container" style="background: #C9BF67; padding-top:10px; padding-bottom:15px;"> <div class="row"> <div class="col-12"> <div class="card"> <img class="card-img-top" src="../images/band.jpg" alt="band_logo" style= "width:100%;z-index:0;"> <div class="card-body text-dark"> <h4 class="card-title">Fun Facts</h4> <p class="card-text"> 1. Not all artists are drop-outs People often think that rock stars dedicate their whole life to music and hence are not much good in studies. A big blow to such misconception is the Queen Band. Except <NAME>, every other band member has a post graduate degree.<br> 2. <NAME> aka <NAME>ara When <NAME> was born, he was named <NAME> as he belonged to an Indian family from Gujarat. But later when he was studying in a boarding school, he adopted the name <NAME>. Strangely his name in the passport is <NAME>. <br> 3. Freddie named the band “Queen” Freddie knew that naming a male band “Queen” would raise some eyebrows and would be associated with gay terms. Yet he went ahead with this name as he thought it to be a regal, strong and splendid sounding name.
<br> 4. “I want it all” inspired by Brian’s wife <NAME> had claimed that the main inspiration behind the song “I want it all” was his wife <NAME>. He got the idea when his wife said I want it all and I want it now!
<br> 5. The accidental invention of stand-free mic </p> </div> </div> </div> </div> </div><br> <!--image 2 funfacts 6-8--> <div class="container" style="background: #C9BF67; padding-top:15px; padding-bottom:15px;"> <div class="row"> <div class="col-12"> <div class="card"> <img class="card-img-top" src="../images/Queen-live.jpg" alt="band_logo" style= "width:100%;"> <div class="card-body text-dark"> <h4 class="card-title">Fun Facts</h4> <p class="card-text"> 6. “I like to ride my bicycle” but Freddie didn’t like to ride bicycle! Quite contrary to the lyrics of the song “Bicycle Race”, <NAME> didn’t like riding <br> 7. Do you know that the song “Crazy little thing called love” was composed in a bath? And guess what? It was composed by <NAME> himself. Freddie was actually in a hotel and while bathing in a tub he got inspired for this song. He even had the piano brought near his tub so that he can compose the song. <br> 8. Two music videos of Queen was filmed in Roger Taylor’s garden The drummer of Queen, Roger Taylor’s garden is as famous as Roger Taylor himself. This is due to the fact that the videos of “Spread your wings” and “I want it all” were recorded in his garden. He really got inspired by his wife I guess!</p> </div> </div> </div> </div> </div><br> <!--image 3 funfacts 9&10--> <div class="container" style="background: #C9BF67; padding-top:15px; padding-bottom:15px;"> <div class="row"> <div class="col-12"> <div class="card"> <img class="card-img-top" src="../images/FreddieMJ.jpg" alt="freddiemj" style= "width:100%;"> <div class="card-body text-dark"> <h4 class="card-title">Fun Facts</h4> <p class="card-text"> 9. MJ insisted to release “Another one bites the dust” At first Queen didn’t plan to release the song “Another one bites the dust”. But when <NAME> heard the song at the backstage of an L.A. concert, he convinced the band to release it. The song became one of the most successful singles of the band and sold over seven million.<br> 10. Queen honored by England: In 1999, England honored the Queen band by releasing a postage stamp commemorating <NAME>. But this stamp created unpleasantness among the royal family as the photo chosen for the stamp had <NAME>aylor in background. According to British tradition, the only living people who can appear on their stamps are the member of Royal Family.</p> <img class="card-img-top" src="../images/stamp.jpg" alt="stamp" style= "width:100%;"> </div> </div> </div> </div> </div><br> <!--feedback form--> <?php $file = './includes/form.php'; if (file_exists($file) && is_readable($file)) { require $file; } else { throw new Exception("$file can't be found"); } ?> <script src="js/jquery.slim.min.js"></script> <script src="js/popper.min.js"></script> <script src="js/bootstrap.min.js"></script> </body> </html> <?php } catch (Exception $e) { ob_end_clean(); header('Location: http://localhost/phpsols-4e/error.php'); } ob_end_flush(); ?><file_sep><?php ob_start(); try { include './includes/title.php'; ?> <!--header--> <?php $file = './includes/header.php'; if (file_exists($file) && is_readable($file)) { require $file; } else { throw new Exception("$file can't be found"); } ?> <body> <!--Navigation--> <?php $file = './includes/menu.php'; if (file_exists($file) && is_readable($file)) { require $file; } else { throw new Exception("$file can't be found"); } ?> <!--top image--> <div class="container cite-container1"> <div class="row"> <div class="col-12"> <div class="card text-dark"> <img class="card-img-top" src="./images/cite.jpg" alt="Card image cap"> </div> <!--cite info--> <div class="card-body"> <h4 class="card-title">Cite Info</h4> <p class="card-text"> Album info: <a class="cite-link" href="http://www.icce.rug.nl/~soundscapes/VOLUME03/Queen_anomaly_Appendix1.shtml">Albums</a><br> Funfacts: <a class="cite-link" href="https://ohfact.com/interesting-facts-about-band-queen/">Oh Facts </a><br> Photos:<a class="cite-link" href="https://www.google.com/"> Photos</a><br> Personal Info:<a class="cite-link" href="https://en.wikipedia.org"> Member info</a><br> Quote:<a class="cite-link" href="https://www.brainyquote.com/authors/freddie_mercury"> Brainy quote</a><br> video<a class="cite-link" href="https://www.youtube.com/watch?v=A22oy8dFjqc"> Youtube</a><br> </p> </div> </div> </div> </div><br> <!--video--> <div class="container cite-container2"> <div class="row"> <div class="col-12"> <div class="embed-responsive embed-responsive-16by9"> <iframe width="560" height="315" src="https://www.youtube.com/embed/A22oy8dFjqc" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe> </div> </div> </div> </div><br> <!--blockquote--> <div class="container"> <div class="row"> <div class="col-12"> <div class="shadow-lg p-3 mb-5 bg rounded"> <blockquote class="blockquote blockquote-reverse cite-quote"> <p class="mb-0">We're a bit flashy, but the musics's not one big noise. </p> <footer class="blockquote-footer"><NAME> <cite title="Source Title">brainyquote</cite> </footer> </blockquote> </div> </div> </div> </div> <!--feedback form--> <?php $file = './includes/form.php'; if (file_exists($file) && is_readable($file)) { require $file; } else { throw new Exception("$file can't be found"); } ?> <script src="js/jquery.slim.min.js"></script> <script src="js/popper.min.js"></script> <script src="js/bootstrap.min.js"></script> </body> </html> <?php } catch (Exception $e) { ob_end_clean(); header('Location: http://site12.wdd.francistuttle.edu/projects/queen/error.php'); } ob_end_flush(); ?><file_sep><?php require_once './includes/header.php'; require_once './includes/utility_funcs.php'; // initialize flags $OK = false; $done = false; $trans_error = false; // create database connection $conn = dbConnect('write', 'pdo'); // get details of selected record if (isset($_GET['album_id']) && !$_POST) { // prepare SQL query $sql = 'SELECT track_id, album_id, album_name, name FROM tracks WHERE album_id = ?'; $stmt = $conn->prepare($sql); // pass the placeholder value to execute() as a single-element array $OK = $stmt->execute([$_GET['album_id']]); // bind the results $stmt->bindColumn(1, $track_id); $stmt->bindColumn(2, $album_id); $stmt->bindColumn(3, $album_name); $stmt->bindColumn(4, $name); $stmt->fetch(); } // if form has been submitted, update record if (isset($_POST['update'])) { $conn->setAttribute(PDO::ATTR_ERRMODE, PDO::ERRMODE_EXCEPTION); $album_id = $_POST['album_id']; $album_name = $_POST['album_name']; // delete old tracks $sql2 = 'DELETE FROM tracks WHERE album_id = :album_id'; $stmt2 = $conn->prepare($sql2); $stmt2->bindParam(':album_id', $album_id, PDO::PARAM_INT); $stmt2->execute(); // convert string of new tracks into array $tnames = explode(",", safe($_POST['tracks'])); $numTracks = count($tnames); $sql3 = 'INSERT INTO tracks (album_id, album_name, name) VALUES (:a_id, :a_name, :t_name)'; $stmt3 = $conn->prepare($sql3); $stmt3->bindParam(':a_id', $album_id, PDO::PARAM_INT); $stmt3->bindParam(':a_name', $album_name, PDO::PARAM_STR); try { $conn->beginTransaction(); $i = 0; while ($i < $numTracks) { $tname = $tnames[$i]; $stmt3->bindParam(':t_name', $tname, PDO::PARAM_STR); $stmt3->execute(); $i++; } $conn->commit(); } catch (Exception $e){ $conn->rollback(); throw $e; } $url = 'http://site12.wdd.francistuttle.edu/projects/queen/admin.php'; if ($trans_error) { $url .= "?trans_error=$trans_error"; } header("Location: $url"); } ?> <body> <?php require_once './includes/menu.php'; if($album_id == 0) { ?> <p class="warning">Invalid request: record does not exist.</p> <?php } else { ?> <div class="container mt-5 col-6 queen-container qc-lg-update"> <div class="row"> <div class="col-12"> <div class="col bg-danger text-white qc-sm"> <h1 class="text-center">Update Tracks For "<?= $album_name ?>":</h1> <form method="post" action="update_tracks.php" enctype="multipart/form-data"> <p class="form-group col"> <label for="tracks">Enter Track Names (separated by commas):</label> <textarea class="form-control" name="tracks" type="text" id="tracks" value="<?php if (isset($error)) { echo safe($_POST['tracks']); } ?>"></textarea> <input name="album_id" type="hidden" value="<?= $album_id ?>"> <input name="album_name" type="hidden" value="<?= $album_name ?>"> </p> <p class=" text-center form-group col"> <input type="submit" name="update" id="update" value="Update Tracks"> <p class="text-center">Warning! <br>This will delete all existing tracks associated with this album and insert the new values</p> </p> </form> </div> </div> </div> </div> <div class="col"> <p> <h5 class="text-center"><a href="admin.php">&laquo; Back to Album List</a></h5> </p> </div> <?php } ?><file_sep> <nav class="navbar sticky-top navbar-dark bg-danger"> <a class="navbar-brand" href="index.php">Band</a> <a class="navbar-brand" href="music.php">Music</a> <a class="navbar-brand" href="fun.php">Fun Facts</a> <a class="navbar-brand" href="cite.php">Cite Info</a> <a class="navbar-brand" href="admin.php">Admin</a> </nav> <?php if (basename($_SERVER['PHP_SELF']) == 'admin.php' || basename($_SERVER['PHP_SELF']) == 'delete.php' || basename($_SERVER['PHP_SELF']) == 'update.php' || basename($_SERVER['PHP_SELF']) == 'insert.php' || basename($_SERVER['PHP_SELF']) == 'update_tracks.php') { require_once './includes/logout.php'; } ?><file_sep><?php ob_start(); try { include './includes/title.php'; require_once './includes/header.php'; $error = ''; if (isset($_POST['login'])) { session_start(); $username = trim($_POST['username']); $password = trim($_POST['pwd']); $_SESSION['username'] = $username; // location to redirect on success $redirect = 'http://localhost/working_revamp/admin.php'; require_once './includes/authenticate_pdo.php'; } ?> <body> <?php require_once './includes/menu.php'; if ($error) { echo "<p>$error</p>"; } elseif (isset($_GET['expired'])) { ?> <p>Your session has expired. Please log in again.</p> <?php } ?> <div class="container col-3" style="min-width: 350px; max-width: 400px; background: #C9BF67; padding-top:15px; padding-bottom:15px; margin-top: 150px;"> <div class="row"> <div class="col-12"> <div class="col bg-danger text-white" style="min-width: 300px; max-width: 400px; padding-top: 15px; padding-left: 25px; padding-bottom: 10px;"> <form method="post" action="login.php"> <p class="form-group col"> <label for="username">Username:</label> <input type="text" name="username" id="username" class="form-control"> </p> <p class="form-group col"> <label for="pwd">Password:</label> <input type="password" name="pwd" id="pwd" class="form-control"> </p> <p class="form-group col"> <input name="login" type="submit" value="Log in" class="form-control"> </p> </form> </div> </div> </div> </div> <script src="js/jquery.slim.min.js"></script> <script src="js/popper.min.js"></script> <script src="js/bootstrap.min.js"></script> </body> </html> <?php } catch (Exception $e) { ob_end_clean(); header('Location: http://localhost/phpsols-4e/error.php'); } ob_end_flush(); ?><file_sep><?php ob_start(); try { include './includes/title.php'; ?> <!--header--> <?php $file = './includes/header.php'; if (file_exists($file) && is_readable($file)) { require $file; } else { throw new Exception("$file can't be found"); } // establish connection and build SQL $conn = dbConnect('read', 'pdo'); $sql = 'SELECT a.*, GROUP_CONCAT(t.name) as track_name FROM albums AS a LEFT JOIN tracks AS t ON a.album_id = t.album_id GROUP BY a.album_id'; ?> <body> <!--navigation--> <?php $file = './includes/menu.php'; if (file_exists($file) && is_readable($file)) { require $file; } else { throw new Exception("$file can't be found"); } ?> <div class="container"> <div class="row"> <?php // loop through albums foreach ($conn->query($sql) as $row) { ?> <div class="col-6 mw-330 text-center mt-5 mx-auto"> <img src="./images/<?= $row['album_cover']?>" alt="Card image cap" style="width:100%;"> <!--shadow--> <div class="shadow-lg p-3 mb-5 bg-white rounded"> <div class="card-body" style="padding-left:10px;"> <h4 class="card-title"><?= $row['album_name'] ?></h4> <b><?= print_r($row['album_name'] . ' (' . $row['record_label'] . ') (' . $row['year_released'] . ')', true); ?><br><br></b> <ul class="list-group list-group-flush"> <?php // get tracks from long string $tracks = explode(",", $row['track_name']); $numTracks = count($tracks); $i = 0; // loop through tracks while ($i < $numTracks) { ?> <li class="list-group-item"><?php echo $tracks[$i]; $i++; ?></li> <?php } ?> </ul> </div> </div> </div> <?php } ?> </div> </div> <!--feedback form--> <?php $file = './includes/form.php'; if (file_exists($file) && is_readable($file)) { require $file; } else { throw new Exception("$file can't be found"); } ?> <script src="js/jquery.slim.min.js"></script> <script src="js/popper.min.js"></script> <script src="js/bootstrap.min.js"></script> </body> </html> <?php } catch (Exception $e) { ob_end_clean(); header('Location: http://site12.wdd.francistuttle.edu/projects/queen/error.php'); } ob_end_flush(); ?><file_sep><?php ob_start(); try { include './includes/title.php'; require_once './includes/header.php'; if (isset($_POST['submit'])) { // initialize flag $OK = false; // create database connection $conn = dbConnect('write', 'pdo'); // create SQL $sql = 'INSERT INTO albums (album_name, record_label, year_released, album_cover) VALUES (:title, :label, :year, :image)'; // prepare the statement $stmt = $conn->prepare($sql); // bind the parameteres and execute the statement $title = safe($_POST['title']); $label = safe($_POST['label']); $year = safe($_POST['year']); $image = safe($_POST['image']); $stmt->bindParam(':title', $title, PDO::PARAM_STR); $stmt->bindParam(':label', $label, PDO::PARAM_STR); $stmt->bindParam(':year', $year, PDO::PARAM_INT); $stmt->bindParam(':image', $image, PDO::PARAM_STR); // exectue $stmt->execute(); // get album id from inserted album $a_id = $conn->lastInsertId(); // convert string of tracks into array $tnames = explode(",", safe($_POST['tracks'])); $numTracks = count($tnames); // insert multiple rows of tracks into tracks table $sql3 = 'INSERT INTO tracks (album_id, album_name, name) VALUES (:a_id, :a_name, :t_name)'; $stmt3 = $conn->prepare($sql3); $stmt3->bindParam(':a_id', $a_id, PDO::PARAM_INT); $stmt3->bindParam(':a_name', $title, PDO::PARAM_STR); try { $conn->beginTransaction(); $i = 0; while ($i < $numTracks) { $tname = $tnames[$i]; $stmt3->bindParam(':t_name', $tname, PDO::PARAM_STR); $stmt3->execute(); $i++; } $conn->commit(); } catch (Exception $e){ $conn->rollback(); throw $e; } $url = 'http://localhost/working_revamp/admin.php'; if ($trans_error) { $url .= "?trans_error=$trans_error"; } header("Location: $url"); } ?> <body> <?php require_once './includes/menu.php'; ?> <div class="container mt-5" style="background: #C9BF67;padding-top:15px; padding-bottom:15px;"> <div class="row"> <div class="col-12"> <div class="col bg-danger text-white" style="padding-top: 15px; padding-left:25px; padding-bottom: 10px;"> <h1>Insert New Album</h1> <form method="post" action="insert.php" enctype="multipart/form-data"> <p class="form-group col"> <label for="title">Title:</label> <input class="form-control" name="title" type="text" id="title" value="<?php if (isset($error)) { echo safe($_POST['title']); } ?>"> </p> <p class="form-group col"> <label for="label">Record Label:</label> <input class="form-control" name="label" type="text" id="label" value="<?php if (isset($error)) { echo safe($_POST['label']); } ?>"> </p> <p class="form-group col"> <label for="year">Year Released:</label> <input class="form-control" name="year" type="text" id="year" value="<?php if (isset($error)) { echo safe($_POST['year']); } ?>"> </p> <p class="form-group col"> <label for="image">Name of Image File:</label> <input class="form-control" name="image" type="text" id="image" value="<?php if (isset($error)) { echo safe($_POST['image']); } ?>"> </p> <p class="form-group col"> <label for="tracks">Track Names (separated by commas):</label> <textarea class="form-control" name="tracks" type="text" id="tracks" value="<?php if (isset($error)) { echo safe($_POST['tracks']); } ?>"></textarea> </p> <p class="form-group col"> <input type="submit" name="submit" value="Insert Album"> </p> </form> </div> </div> </div> </div> <script src="js/jquery.slim.min.js"></script> <script src="js/popper.min.js"></script> <script src="js/bootstrap.min.js"></script> </body> </html> <?php } catch (Exception $e) { ob_end_clean(); header('Location: http://localhost/working_revamp/thanks.php'); } ob_end_flush(); ?>
878b4ea3aa1d8eb4163838337ac590224debd16f
[ "PHP" ]
16
PHP
njferrari14/discography
a72bf17c4d66c1d566a5843cf843e69f1e1510a6
581e138cc52af45fe38d6596b8f98e6bf5a22dbd
refs/heads/master
<file_sep>import React from 'react'; import ReactDOM from 'react-dom'; let allTags = [] class Tagcloud extends React.Component { constructor(props){ super(props); this.state = { allTags: [ {id: 1, content: "Ferocious"}, {id: 2, content: "Empathetic"}, {id: 3, content: "Bold"}, {id: 4, content: "Visionary"} ]}; this.addTagEvent = this.addTagEvent.bind(this); } render() { let tags = this.props.tags.map((tag) => { return <TagItem key={tag.id} tag={tag} className="tag-item-container" /> }) return ( <div className="tag-list-container"> {tags} <NewTagItem addTagEvent={this.addTagEvent} /> </div> ) } addTagEvent(tagItem){ allTags.push(tagItem.newTag); this.setState({ allTags }); } } class TagItem extends React.Component { render(){ return <div className="tag-item"> <p>{this.props.tag}<span id="close-x">&times;</span></p> </div> } } class NewTagItem extends React.Component { constructor(props){ super(props); this.onSubmit = this.onSubmit.bind(this); } componentDidMount(){ ReactDOM.findDOMNode(this.refs.tagName).focus(); } render(){ return ( <form onSubmit={this.onSubmit}> <label>Enter a new tag </label> <input type="text" name="newtag" ref="tagName" /> </form> ); } onSubmit(event){ event.preventDefault(); let input = ReactDOM.findDOMNode(this.refs.tagName) let newTag = input.value; this.props.addTagEvent({ newTag }); input.value = ''; } } ReactDOM.render( <Tagcloud tags={allTags}/>, document.getElementById('app') ); export default Tagcloud // GENERATE UUID /* generateUUID(min,max){ // NOT A REAL UUID! min = Math.ceil(min); max = Math.floor(max); return Math.floor(Math.random() * (max - min + 1)) + min; } */
34bde17c501030753f24d1c41dd0ee1b3205fb14
[ "JavaScript" ]
1
JavaScript
supernoir/react-tagcloud
d94c236b0e8b6ec138423208ad276eb323fa9af9
1b8feb48bab6b6335a0e82236ee6f06e34802fb0
refs/heads/master
<file_sep><?php print <<<_html_ <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" /> <title>{$title[0]}</title> <link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.13.1/css/all.css" integrity="<KEY>" crossorigin="anonymous"/> <link href="https://fonts.googleapis.com/css2?family=M+PLUS+Rounded+1c:wght@400;800&display=swap" rel="stylesheet"/> <link rel="stylesheet" href="src/style.css" /> </head> <body> <header class="header"> <div class="header__title center"> <i class="fas fa-bars BAR"></i> <span class="header__title_title">Web図書管理システム</span> </div> <form action="http://localhost:81/school/24/search.php" method="POST" class="header__form center"> <input class="header__search" type="text" name="keyword" pattern="^[^<>]+$" placeholder="キーワード検索" required /> <input class="header__search__button" type="submit" value="Search" /> </form> <a href="http://localhost:81/school/24/main.php"><i class="fas fa-home"></i></a> </header> <main> <section class="sideBar"> <div class="sidebar__top"> <div class="sidebar__top__icon center"> <i class="fas fa-book-reader"></i> </div> <span>管理 Page</span> </div> <div class="sideBar__link"> <a href="http://localhost:81/school/24/main.php"><i class="fas fa-book"></i><span>All Books</span></a> <a href="http://localhost:81/school/24/add.php"><i class="fas fa-file-alt"></i><span>Add Book</span></a> </div> </section> <section class="bookTable"> <div class="bookTable__title"> <span><i class="fas fa-chevron-right">{$title[1]}</i></span> </div> <div class="bookTable__add"> <form action="{$URL}" method="POST"> <div class="bookTable__add__form"> <ul class="bookTable__add__form__ul"> <li><label>ISBN</label><input type="text" name="isbn" id="isbn" maxlength="13" title="数字10桁又は数字9桁とX, 13桁の数字だけで入力可能" pattern="^([0-9]{9}[0-9xX]|[0-9]{13})$" class="" value="{$arr['b_isbn']}" required /></li> <li><label>図書名</label><input type="text" name="name" pattern="^[^<>]+$" value="{$arr['b_name']}" required /></li> <li><label>著者名</label><input type="text" name="author" pattern="^[^<>]+$" value="{$arr['b_author']}" required /></li> <li><label>出版社</label><input type="text" name="pub" pattern="^[^<>]+$" value="{$arr['b_pub']}" required /></li> </ul> <ul class="bookTable__add__form__ul"> <li><label>価格</label><input type="number" name="price" value="{$arr['b_price']}" title="数字だけを入力してください" required /></li> <li><label>出版日</label><input type="date" name="pubdate" value="{$arr['b_pubdate']}" required /></li> <li><label>購入日</label><input type="date" name="buydate" value="{$arr['b_buydate']}" required /></li> <li><label>画像URL</label><input type="text" name="url" value="{$arr['img_url']}" pattern="^[^<>]+$" /></li> </ul> </div> <div class="bookTable__add__buttons"> <input type="submit" value="登録" /> <input type="reset" value="RESET" /> </div> </form> </div> </section> </main> <script src="./js/sidebar.js"></script> <script src="./js/preventIsbn.js"></script> </body> </html> _html_; <file_sep><?php echo $_POST['validate']; <file_sep><?php print <<<_html_ <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" /> <title>Library Project : {$title[0]}</title> <link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.13.1/css/ all.css"integrity="<KEY>" crossorigin="anonymous"/> <link href="https://fonts.googleapis.com/css2?family=M+PLUS+Rounded+1c:wght@400;800&display=swap" rel="stylesheet"/> <link rel="stylesheet" href="src/style.css" /> </head> <body> <header class="header"> <div class="header__title center"> <i class="fas fa-bars BAR"></i><span class="header__title_title">Web図書管理システム</span> </div> <form action="http://localhost:81/school/24/search.php" method="POST" class="header__form center"> <input class="header__search" name="keyword" type="text" pattern="^[^<>]+$" placeholder="キーワード検索" required> <input class="header__search__button" type="submit" value="Search" /> </form> <a href="http://localhost:81/school/24/main.php"><i class="fas fa-home"></i></a> </header> <main> <section class="sideBar"> <div class="sidebar__top"> <div class="sidebar__top__icon center"> <i class="fas fa-book-reader"></i></div><span>管理 Page</span></div> <div class="sideBar__link"> <a href="http://localhost:81/school/24/main.php"><i class="fas fa-book"></i><span>All Books</span></a> <a href="http://localhost:81/school/24/add.php"><i class="fas fa-file-alt"></i><span>Add Book</span></a> </div> </section> <section class="bookTable"> <div class="bookTable__title"> <span class=""><i class="fas fa-chevron-right"> {$title[1]}</i></span> <span>総 {$count}件</span> </div> <div class="bookTable__search"> <span><i class="fas fa-chevron-right">キーワード: <span id="word1" class="bookTable__keywords">{$words}</span>|| <span class="bookTable__keywords"> </span>除外キーワード: <span id="word2" class="bookTable__keywords">{$e_words}</span> </i></span></div> _html_; foreach ($arr as $key => $temp) { $bookNum = $key + 1; print <<<_html_ <div class="bookTable__content"> <div class="table1"><span>{$bookNum}</span> <img src="{$temp['img_url']}" /></div> <div class="table2"> <ul><li>書籍名:<span>{$temp['b_name']}</span></li> <li>著者名:<span>{$temp['b_author']}</span></li> <li>出版社:<span>{$temp['b_pub']}</span></li></ul> </div> <div class="table3"> <ul><li>出版日:<span>{$temp['b_pubdate']}</span></li> <li>購入日:<span>{$temp['b_buydate']}</span></li> <li>ISBN:<span>{$temp['b_isbn']}</span></li></ul> </div> <div class="table4"> <ul><li>価格:<span>{$temp['b_price']}</span>円</li></ul> </div> <div class="table5"> <form action="http://localhost:81/school/24/update.php" id="update{$bookNum}" method="GET"> <input type="hidden" name="update" value={$temp['b_isbn']} /> </form> <button class="bookTable__button__update center" type="submit" form="update{$bookNum}"> <i class="fas fa-edit"></i> </button> <form action="http://localhost:81/school/24/delete.php" method="POST" id="delete{$bookNum}"> <input type="hidden" name="delete" value={$temp['b_isbn']} /> </form> <button class="bookTable__button__delete center" type="submit" form="delete{$bookNum}"> <i class="fas fa-trash-alt"></i> </button> </div> </div> _html_; } print <<< html </section> </main> <script src="./js/noimg.js"></script> <script src="./js/sidebar.js"></script> <script src="./js/searchWord.js"></script> </body> </html> html; <file_sep><?php $a = "rewr dasda eqweq -dsadas -good"; $b = "-kana kana かな 仮名 -幹事 -かな"; $temp = preg_split("/[\s,]+/", $a, -1, PREG_SPLIT_NO_EMPTY); preg_match_all('/[-]\w+/', $a, $matches, PREG_PATTERN_ORDER); $pattern = "/[-]+[ぁ-ゔ]+|[-]+[ァ-ヴー]+[々〆〤]+|[-]+[一-龥]+|[-]\w+/"; $result = preg_replace($pattern, "", $b); $result2 = mb_ereg_replace($pattern, "", $b); var_dump($result); ?><file_sep><?php function paintToHtml($arr, $query, $words){ $count = count($arr); require_once 'paintToHtml.php'; } function fetchToDb($conn, $query){ $result = pg_query($conn, $query); if(!$result){ print "--------------------"; print "データの取得に失敗しました。Query文にエラーがあります。"; print "--------------------"; exit(1); }else{ $arr = pg_fetch_all($result); } return $arr; } function makeQuery($keyword){ $query = "SELECT img_url, b_isbn, b_name, b_author, b_pub, b_price, b_pubdate, b_buydate FROM zenglab2008 LEFT JOIN book_image ON b_isbn = isbn WHERE "; $count = count($keyword); foreach ($keyword as $key => $value){ $term = "'%".pg_escape_string($value)."%'"; $query .= "((lower(b_isbn) LIKE {$term} or lower(b_Name) LIKE {$term} or lower(b_author) LIKE {$term} or lower(b_pub) LIKE {$term}))"; $query .= ($key !== $count-1)?" OR " : ""; } $words = "[".implode(", ", $keyword)."]"; return array($query, $words); } function validateForm(){ if(strlen(trim($_POST['keyword'])) === 0 || strlen(trim($_POST['keyword']))=== null){ print "キーワードを入力してください。"; exit(1); } else { $temp = htmlspecialchars(strtolower($_POST['keyword'])); // 미리 대문자를 소문자로 엔코딩 $convertedKey = mb_convert_kana($temp, 's', 'utf-8'); // 일본어전각스페이스를 반각스페이스로 $keyToArr = preg_split("/[\s,]+/", $convertedKey, -1, PREG_SPLIT_NO_EMPTY); return $keyword = array_unique($keyToArr); } } function connectToDb(){ //You must replace your own code belog one; //$connect_query = 'host=localhost user=zeng dbname=bookdb_1981117'; $connect_query = 'host=localhost port=5432 dbname=bookdb_1981117 user=postgres password=<PASSWORD>'; $conn = pg_connect($connect_query); if(!$conn){ print "<hr>\n"; print "bookdbデータベースの接続に失敗しました。"; print preg_last_error($conn); print "<hr>\n"; exit(1); } return $conn; } function init(){ $conn = connectToDb(); $keyword = validateForm(); list($query, $words) = makeQuery($keyword); $result = fetchToDb($conn, $query); paintToHtml($result, $query, $words); pg_close($conn); } init(); ?><file_sep><?php echo $_GET['update']; <file_sep><?php //$connect_query = 'host=localhost user=zeng dbname=bookdb_1981117'; //you must put in this part instead of your own codes $connect_query = 'host=localhost port=5432 dbname=bookdb_1981117 user=postgres password=<PASSWORD>'; $conn = pg_connect($connect_query); $query = "SELECT img_url, b_isbn, b_name, b_author, b_pub, b_price, b_pubdate, b_buydate FROM zenglab2008 LEFT JOIN book_image ON b_isbn = isbn WHERE b_price >= 2000 AND b_price <5000"; if(!$conn){ print "<hr>\n"; print "bookdbデータベースへの接続に失敗しました。<br/>\n"; echo pg_last_error($con); print "<hr>\n"; exit(1); } $result = pg_query($conn, $query); $arr = pg_fetch_all($result); $count = count($arr); ?> <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" /> <title>Enshu20_pj2_02</title> <link rel="stylesheet" href="style.css" /> </head> <body> <header class="header center"> <div class="center"> <span class="header__title">図書データ一覧</span> </div> </header> <main class="main"> <div class="count"><span>総件数:<?= $count;?>件</span></div> <br /> <section class="grid"> <?php function paintToHtml($arr){ foreach ($arr as $num => $temp){ $setNum = $num+1; print <<<_html_ <div class="content"> <div class="book-image"> <span class="book-number">{$setNum}</span> <img src="{$temp['img_url']}"/> </div> <div class="book-contents"> <span>図書名:<span class="book-contents__title">{$temp['b_name']} </span></span> <span>著者名:{$temp['b_author']}</span> <span>出版社:{$temp['b_pub']}</span> <span>購入日:{$temp['b_buydate']}</span> <span>出版日:{$temp['b_pubdate']}</span> <span>価格:{$temp['b_price']}円</span> <span>ISBN:{$temp['b_isbn']}</span> </div> </div> _html_; } } //this function is ensurance for fucking situations /*function paintToHtml($arr){ foreach ($arr as $num => $temp){ $setNum = $num+1; print "<div class='content'>"; print "<div class='book-image'>"; print "<span class='book-number'>{$setNum}</span>"; print "<img src='".$temp['img_url']."'/></div>"; print "<div class='book-contents'>"; print "<span>図書名:<span class='book-contents__title'>".$temp['b_name']."</span></span>"; print "<span>著者名:{$temp['b_author']}</span>"; print "<span>出版社:{$temp['b_pub']}</span>"; print "<span>購入日:{$temp['b_buydate']}</span>"; print "<span>出版日:{$temp['b_pubdate']}</span>"; print "<span>価格:{$temp['b_price']}円</span>"; print "<span>ISBN:{$temp['b_isbn']}</span></div></div>"; } }*/ paintToHtml($arr); pg_close($conn); ?> </section> </main> <script src="noimg.js"></script> </body> </html> <file_sep>const project1 = document.querySelector(".leftBar__project1"), project2 = document.querySelector(".leftBar__project2"); const contents1 = document.querySelectorAll(".contents1"), contents2 = document.querySelectorAll(".contents2"); const HIDE = "hide"; function handler2() { contents2.forEach((list) => { list.classList.toggle(HIDE); }); } function handler1() { contents1.forEach((list) => { list.classList.toggle(HIDE); }); } function init() { project1.addEventListener("click", handler1); project2.addEventListener("click", handler2); } init(); <file_sep><!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <title>演習20(作成者:ベキゴン)</title> </head> <body> <h2>図書データ一覧</h2> <h3>データベースの全てのデータを表示するPHPプログラム。データベースのデータを一行づつ取り出し、デーブルにして表示する</h3> <?php //$con = pg_connect("host=localhost user=zeng dbname=bookdb_1981117"); //you must change this part instead of own yours $con = pg_connect('host=localhost port=5432 dbname=bookdb_1981117 user=postgres password=<PASSWORD>'); if(!$con){ print "<hr>\n"; print "bookdbデータベースへの接続に失敗しました。<br/>\n"; echo pg_last_error($con); print "<hr>\n"; exit(1); } $isbnToImgUrl = array(); $img_sql = "select isbn, img_url from book_image"; $img_result = pg_query($con, $img_sql); if(!$img_result){ print "<hr>\n"; print "book_imageへの問い合わせ文「".$img_sql."」の実行(データの検索)が失敗しました。<br>\n"; print pg_last_error($con); print "<hr>\n"; exit(1); } $img_rows = pg_num_rows($img_result); for($i = 0; $i < $img_rows; $i++){ $img_row = pg_fetch_array($img_result, $i); $isbnToImgUrl[$img_row['isbn']] = $img_row['img_url']; } $sql = "select * from zenglab2008"; $result = pg_query($con, $sql); if(!$result){ print "<hr>\n"; print "bookdbへ問い合わせ文「".$sql."」の実行(データの選択)が失敗しました。<br>\n"; print pg_last_error($con); print "<hr>\n"; exit(1); } $rows= pg_num_rows($result); ?> <table border="1" style="vertical-align : top; background-color:white;font-size:16px;width:100%;"> <tr align="left"><td colspan="2"> <?php print $rows;?> 件</td></tr> <tr style="text-align:center; background-color:#cccccc;"><td style="width:150px;">図書画像</td><td>図書データ</td></tr> <?php for($i = 0; $i < $rows; $i++){ $row = pg_fetch_array($result, $i); $book_img = isset($isbnToImgUrl[$row['b_isbn']]) ? $isbnToImgUrl[$row['b_isbn']] : "noimg.jpg"; print '<tr valign="middle">' . "\n"; print '<td style="text-align:center;"><img style="height:150px;" src="'.$book_img.'"></td>'."\n"; print "<td><ul>"; print "<li>ISBN: ".$row['b_isbn']."</li>\n"; print "<li>図書名:".$row['b_name']."</li>\n"; print "<li>著者:".$row['b_author']."</li>\n"; print "<li>出版社:".$row['b_pub']."</li>\n"; print "<li>価格:&yen;".number_format($row['b_price'])."</li>\n"; print "<li>出版日付:".$row['b_pubdate'].str_repeat("&nbsp;", 40-strlen($row['b_pubdate']))."購入日付:".$row['b_buydate']."</li>\n"; print "</ul></td>\n"; print "</tr>\n"; } print "</table>\n"; pg_close($con); ?> </body> </html><file_sep>const pjBtn2 = document.querySelectorAll(".contents"); const subject = document.querySelectorAll(".text"); const docu = document.querySelector(".rightContents"); const endOfPage = docu.offsetHeight; function handler(event) { const id = event.target.id; const target = `docu${id}`; subject.forEach((list) => { const subId = list.id; if (subId === target) { const coord = list.offsetTop; window.scrollTo(0, coord); } }); } function init() { pjBtn2.forEach((list) => { list.addEventListener("click", handler); }); window.scrollTo(0, endOfPage); } init(); <file_sep><?php if ($_SERVER['REQUEST_METHOD'] === 'POST') { function goToHome() { print "<script>alert('本データのが削除されました')</script>"; //이부분은 서버로 이동시 반드시 학교 서버에 맞춰서 바꿀것. print "<script>window.location.assign('http://localhost:81/school/24/main.php');</script>"; exit(1); } function deleteRow($conn, $bookStatus, $bookImage) { $result1 = pg_query($conn, $bookImage); if (!$result1) { print "Query文かDBの接続にエラーがあります。"; exit(1); } else { sleep(1); } $result2 = pg_query($conn, $bookStatus); if (!$result2) { print "Query文かDBの接続にエラーがあります。"; exit(1); } else { goToHome(); } pg_close($conn); } function makeQuery() { $target = $_POST['delete']; $bookStatus = "DELETE FROM zenglab2008 WHERE b_isbn = '{$target}'"; $bookImage = "DELETE FROM book_image WHERE isbn = '{$target}'"; return array($bookStatus, $bookImage); } function connectToDb() { require 'db.php'; $temp = new Db; $conn = $temp->connectDb(); if (!$conn) { print "<hr>\n"; print "bookdbデータベースの接続に失敗しました。"; print preg_last_error($conn); print "<hr>\n"; exit(1); } return $conn; } function init() { $conn = connectToDb(); list($bookStatus, $bookImage) = makeQuery(); deleteRow($conn, $bookStatus, $bookImage); } init(); } <file_sep>const img = document.querySelectorAll("img"); const errors = { error1: '<img src="noimg.jpg">', error2: '<img src="">', }; function init() { img.forEach((list) => { const temp = list.outerHTML; if (temp === errors.error1 || temp === errors.error2) { list.src = "https://upload.wikimedia.org/wikipedia/commons/thumb/4/41/Noimage.svg/130px-Noimage.svg.png"; } }); } init(); <file_sep><?php function paintToHtml($arr) { if (!$arr) { $count = 0; } else { $count = count($arr); } $title = ["Main page", "全図書データ"]; require_once 'paintToHtml.php'; } function fetchToDb($conn) { $query = "SELECT img_url, b_isbn, b_name, b_author, b_pub, b_price, b_pubdate, b_buydate FROM zenglab2008 LEFT JOIN book_image ON b_isbn = isbn"; $result = pg_query($conn, $query); if (!$result) { print "--------------------"; print "データの取得に失敗しました。Query文にエラーがあります。"; print "--------------------"; exit(1); } else { $arr = pg_fetch_all($result); } pg_close($conn); return $arr; } function connectToDb() { require 'db.php'; $temp = new Db; $conn = $temp->connectDb(); if (!$conn) { print "<hr>\n"; print "bookdbデータベースの接続に失敗しました。"; print preg_last_error($conn); print "<hr>\n"; exit(1); } return $conn; } function init() { $conn = connectToDb(); $arr = fetchToDb($conn); paintToHtml($arr); } init(); <file_sep><?php //필요한건 접속 데이터 취득 html화 function paintToHtml($array = array()){ } function fetchFromDb($conn, $query){ $temp = pg_query($conn, $query); if(!$temp){ print "query文にエラーがあります"; echo pg_last_error($temp); }else{ $result = pg_fetch_all($temp); return $result; } } function connectToDb(){ $connect_query = 'host=localhost port=5432 dbname=bookdb_1981117 user=postgres password=<PASSWORD>'; $conn = pg_connect($connect_query); if(!$conn){ print "bookdbデータベースへの接続に失敗しました。<br/>"; echo pg_last_error($conn); exit(1); }else{ return $conn; } } function init(){ $conn = connectToDb(); $query = "SELECT img_url, b_isbn, b_name, b_author, b_pub, b_price, b_pubdate, b_buydate FROM zenglab2008 LEFT JOIN book_image ON b_isbn = isbn WHERE b_price >= 3000"; $result = fetchFromDb($conn, $query); paintToHtml($result); } init(); ?><file_sep><?php try { $con = pg_connect('host=localhost port=543 dbname=bookdb_1981117 user=postgres password=<PASSWORD>'); // $fetch = pg_query($con, "select count(*) from zenglab2008"); // $temp = pg_fetch_array($fetch); // var_dump($temp); }catch (Exception $e){ // echo "다음과 같은 오류가 발생했습니다".$e->getMessage(); // echo pg_last_error($con); } ?><file_sep>const searchWord1 = document.querySelector("#word1"), searchWord2 = document.querySelector("#word2"), searchTag = document.querySelector(".bookTable__search"); const VANISH = "vanish"; function handler() { const word1 = searchWord1.innerText, word2 = searchWord2.innerText; if (word1 === "" && word2 === "") { searchTag.classList.add(VANISH); } else { searchTag.classList.remove(VANISH); } } function init() { handler(); } init(); <file_sep><?php function paintToHtml($arr, $words, $e_words) { if (!$arr) { $count = 0; } else { $count = count($arr); } $title = ["Search page", "図書検索データ"]; require_once 'paintToHtml.php'; } function fetchToDb($conn, $query) { $result = pg_query($conn, $query); if (!$result) { print "--------------------"; print "データの取得に失敗しました。Query文にエラーがあります。"; print "--------------------"; exit(1); } else { $arr = pg_fetch_all($result); } return $arr; } function makeQuery($keyword, $e_words = array()) { $query = "SELECT img_url, b_isbn, b_name, b_author, b_pub, b_price, b_pubdate, b_buydate FROM zenglab2008 LEFT JOIN book_image ON b_isbn = isbn WHERE "; $count = count($keyword); foreach ($keyword as $key => $value) { $term = "'%" . pg_escape_string($value) . "%'"; $query .= "((lower(b_isbn) LIKE {$term} or lower(b_Name) LIKE {$term} or lower(b_author) LIKE {$term} or lower(b_pub) LIKE {$term})) "; if ($e_words) { foreach ($e_words as $word) { $e_term = "'%" . pg_escape_string($word) . "%'"; $e_query .= " AND ((lower(b_isbn) NOT LIKE ({$e_term}) and (lower(b_name) NOT LIKE ({$e_term}) and (lower(b_author) NOT LIKE ({$e_term}) and (lower(b_pub) NOT LIKE ({$e_term}))))))"; } $query .= $e_query; } $query .= ($key !== $count - 1) ? " OR " : ""; } $words = "[" . implode(", ", $keyword) . "]"; return array($query, $words); } function validateForm() { if (strlen(trim($_POST['keyword'])) === 0 || strlen(trim($_POST['keyword'])) === null) { print "キーワードを入力してください。"; exit(1); } else { //최초에는 '/[-]\w+/' 정규표현식을 썼으나 일본어 단어를 잡아내지 못하는 단점이 있었음 //그래서 내 나름대로 궁리한 끝에 -가 붙은 일본어와 한자, 영단어까지 모두 잡아내는 표현식을 만듬. $pattern = "/[-]+[ぁ-ゔ]+|[-]+[ァ-ヴー]+[々〆〤]+|[-]+[一-龥]+|[-]\w+/"; $temp = htmlspecialchars(strtolower($_POST['keyword'])); // 미리 대문자를 소문자로 엔코딩 $convertedKey = mb_convert_kana($temp, 's', 'utf-8'); // 일본어전각스페이스를 반각스페이스로 $keyToArr = preg_split("/[\s,]+/", $convertedKey, -1, PREG_SPLIT_NO_EMPTY); //$tempArr= preg_replace("/[-]\w+/", "", $keyToArr); $tempArr = preg_replace($pattern, "", $keyToArr); //preg_match_all('/[-]\w+/', $convertedKey, $except_word, PREG_PATTERN_ORDER); preg_match_all($pattern, $convertedKey, $except_word, PREG_PATTERN_ORDER); foreach ($except_word as $temp) { $exception_array = str_replace("-", "", $temp); foreach ($temp as $value) { $exception_word .= "[" . $value . "] "; } } $keyword = array_unique($tempArr); $unseted = array(); for ($i = 0; $i < count($keyword); $i++) { if ($keyword[$i] === "") { unset($keyword[$i]); } else { $unseted[] = $keyword[$i]; } } return array($unseted, $exception_array, $exception_word); } } function connectToDb() { require 'db.php'; $temp = new Db; $conn = $temp->connectDb(); if (!$conn) { print "<hr>\n"; print "bookdbデータベースの接続に失敗しました。"; print preg_last_error($conn); print "<hr>\n"; exit(1); } return $conn; } function init() { $conn = connectToDb(); list($keyword, $exception_array, $exception_word) = validateForm(); list($query, $query_words) = makeQuery($keyword, $exception_array); $result = fetchToDb($conn, $query); paintToHtml($result, $query_words, $exception_word); } init(); <file_sep><?php if ($_SERVER['REQUEST_METHOD'] === "POST") { function goToHome() { print "<script>alert('本データが追加されました')</script>"; //이부분은 서버로 이동시 반드시 학교 서버에 맞춰서 바꿀것. print "<script>window.location.assign('http://localhost:81/school/24/main.php');</script>"; exit(1); } function addToDb($conn, $bookStatus, $bookImage) { var_dump($bookStatus); $result1 = pg_query($conn, $bookStatus); if (!$result1) { print "Query文かDBの接続にエラーがあります。"; var_dump($result1); exit(1); } else { sleep(1); } $result2 = pg_query($conn, $bookImage); if (!$result2) { print "Query文かDBの接続にエラーがあります"; //var_dump($result2); exit(1); } else { goToHome(); } } function makeQuery($keywords) { $isbn = $keywords['isbn']; $name = $keywords['name']; $author = $keywords['author']; $pub = $keywords['pub']; $price = $keywords['price']; $pubdate = $keywords['pubdate']; $buydate = $keywords['buydate']; $url = $keywords['url']; $bookStatus = "INSERT INTO zenglab2008(b_isbn, b_name, b_author, b_pub, b_price, b_pubdate, b_buydate) VALUES('{$isbn}', '{$name}', '{$author}', '{$pub}', {$price}, '{$pubdate}', '{$buydate}')"; $bookImage = "INSERT INTO book_image(isbn, img_url) VALUES('{$isbn}', '{$url}')"; return array($bookStatus, $bookImage); } function validate_form($data) { $confirmed_data = array(); foreach ($data as $key => $value) { $value = mb_convert_kana($value, 's', "UTF-8"); $value = htmlentities(trim($value), ENT_QUOTES, "UTF-8"); if ($key !== "url") { //이번에는 책의 이미지가 필요해서 url이미지의 필터링을 헤제하지만, 이건 굉장히 위험한 방법이다. $value = preg_replace("/[\s\t\'\;\=]+/", "", $value); // 탭이나 특수문자 제거 $value = preg_replace("/\s{1,}1\=(.*)+/", "", $value); //공백뒤에 1=1 등 을 제거 $value = preg_replace("/\s{1,}(or|and|null|where|limit)/i", "", $value); //공백이후 and or null where등 sql명령어 제거 } if ($key === "price") { $value = intval($value); } $confirmed_data[$key] = $value; } return $confirmed_data; } function connectToDb() { require 'db.php'; $temp = new Db; $conn = $temp->connectDb(); if (!$conn) { print "<hr>\n"; print "bookdbデータベースの接続に失敗しました。"; print preg_last_error($conn); print "<hr>\n"; exit(1); } return $conn; } function init() { $conn = connectToDb(); $data = validate_form($_POST); list($bookStatus, $bookImage) = makeQuery($data); addToDb($conn, $bookStatus, $bookImage); pg_close($conn); } init(); } else { function paintToHtml() { $title = ["Add page", "図書データ登録"]; $URL = "http://localhost:81/school/24/add.php"; require_once "paintToHtml2.php"; } paintToHtml(); } <file_sep><?php function connectToDb() { require 'db.php'; $temp = new Db; $conn = $temp->connectDb(); if (!$conn) { print "<hr>\n"; print "bookdbデータベースの接続に失敗しました。"; print preg_last_error($conn); print "<hr>\n"; exit(1); } return $conn; } $conn = connectToDb(); if ($_SERVER['REQUEST_METHOD'] === 'POST') { function goToHome() { print "<script>alert('本データのが変更されました')</script>"; //이부분은 서버로 이동시 반드시 학교 서버에 맞춰서 바꿀것. print "<script>window.location.assign('http://localhost:81/school/24/main.php');</script>"; exit(1); } function updateToDb($conn, $bookStatus, $bookImage) { $result1 = pg_query($conn, $bookStatus); if (!$result1) { print "Query文かDBの接続にエラーがあります。"; exit(1); } else { sleep(1); } $result2 = pg_query($conn, $bookImage); if (!$result2) { print "Query文かDBの接続にエラーがあります。"; exit(1); } else { goToHome(); } } function makeQuery($keywords) { $isbn = $keywords['isbn']; $name = $keywords['name']; $author = $keywords['author']; $pub = $keywords['pub']; $price = $keywords['price']; $pubdate = $keywords['pubdate']; $buydate = $keywords['buydate']; $url = $keywords['url']; $bookStatus = "UPDATE zenglab2008 SET b_isbn='{$isbn}', b_name='{$name}', b_author='{$author}', b_pub='{$pub}', b_price={$price}, b_pubdate='{$pubdate}', b_buydate='{$buydate}' WHERE b_isbn = '{$isbn}'"; $bookImage = "UPDATE book_image SET isbn='{$isbn}', img_url='{$url}' WHERE isbn = '{$isbn}'"; return array($bookStatus, $bookImage); } function validate_form($data) { $confirmed_data = array(); foreach ($data as $key => $value) { $value = mb_convert_kana($value, 's', "UTF-8"); $value = htmlentities(trim($value), ENT_QUOTES, "UTF-8"); if ($key !== "url") { //이번에는 책의 이미지가 필요해서 url이미지의 필터링을 헤제하지만, 이건 굉장히 위험한 방법이다. $value = preg_replace("/[\s\t\'\;\=]+/", "", $value); // 탭이나 특수문자 제거 $value = preg_replace("/\s{1,}1\=(.*)+/", "", $value); //공백뒤에 1=1 등 을 제거 $value = preg_replace("/\s{1,}(or|and|null|where|limit)/i", "", $value); //공백이후 and or null where등 sql명령어 제거 } if ($key === "price") { $value = intval($value); } $confirmed_data[$key] = $value; } return $confirmed_data; } function init() { global $conn; $data = validate_form($_POST); list($bookStatus, $bookImage) = makeQuery($data); updateToDb($conn, $bookStatus, $bookImage); pg_close($conn); } init(); } else { function paintToHtml($arr) { $title = ["Update page", "図書データ変更"]; $URL = "http://localhost:81/school/24/update.php"; require_once "paintToHtml2.php"; // var_dump($arr); } function fetchToDb($conn, $query) { $result = pg_query($conn, $query); if (!$result) { print "--------------------"; print "データの取得に失敗しました。Query文にエラーがあります。"; print "--------------------"; exit(1); } else { $arr = pg_fetch_assoc($result); } pg_close($conn); return $arr; } function makeQuery() { $targetNum = $_GET['update']; $query = "SELECT img_url, b_isbn, b_name, b_author, b_pub, b_price, b_pubdate, b_buydate FROM zenglab2008 LEFT JOIN book_image ON b_isbn = isbn WHERE b_isbn='{$targetNum}'"; return $query; } function init() { global $conn; $query = makeQuery(); $arr = fetchToDb($conn, $query); paintToHtml($arr); } init(); } <file_sep><?php function query($conn) { $temp = "%PHP%"; $temp = pg_escape_string($temp); $query = "SELECT * FROM zenglab2008 WHERE b_name LIKE" . $temp; /* $temp = pg_query($conn, $query); $result = pg_fetch_all($temp); */ var_dump($query); } function connectDb() { //You must replace your own code belog one; //$connect_query = 'host=localhost user=zeng dbname=bookdb_1981117'; $conn = pg_connect('host=localhost port=5432 dbname=bookdb_1981117 user=postgres password=<PASSWORD>'); if ($error = pg_last_error($conn)) { print $error; }; return $conn; } function init() { $conn = connectDb(); query($conn); } init(); <file_sep>const toggle = document.querySelectorAll(".text__subTitle__content__title"); const SHOW = "show"; function init() { toggle.forEach((list) => { list.addEventListener("click", function () { const panel = this.nextElementSibling; panel.classList.toggle(SHOW); }); }); } init(); <file_sep><?php //데이터베이스 접속 클래스. class Db { function connectDb() { //You must replace your own code belog one; //$connect_query = 'host=localhost user=zeng dbname=bookdb_1981117'; $conn = pg_connect('host=localhost port=5432 dbname=bookdb_1981117 user=postgres password=<PASSWORD>'); if ($error = pg_last_error($conn)) { print $error; }; return $conn; } } <file_sep><?php try{ $db = new PDO('pgsql:host=localhost; port=5432; dbname=bookdb_1981117','postgres','angels'); $db->setAttribute(PDO::ATTR_ERRMODE, PDO::ERRMODE_EXCEPTION); }catch (PDOException $e){ print "다음과 같은 에러가 발생했습니다." . $e->getMessage(); } function test($db){ $temp = $db->query("select * from zenglab2008"); $data = $temp->fetchAll(PDO::FETCH_ASSOC); echo count($data); } test($db); ?><file_sep><?php print <<<_html_ <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" /> <title>Enshu21_pj2_21_01</title> <link rel="stylesheet" href="style.css" /> </head> <body> <header class="header center"> <div class="center"> <span class="header__title">図書データ一覧</span> </div> </header> <main class="main"> <div class="count"><span>総件数:<?= $count;?>件</span></div> <br /> <section class="grid"> _html_; foreach ($arr as $num => $temp){ $setNum = $num+1; print <<<_html_ <div class="content"> <div class="book-image"> <span class="book-number">{$setNum}</span> <img src="{$temp['img_url']}"/> </div> <div class="book-contents"> <span>図書名:<span class="book-contents__title">{$temp['b_name']} </span></span> <span>著者名:{$temp['b_author']}</span> <span>出版社:{$temp['b_pub']}</span> <span>購入日:{$temp['b_buydate']}</span> <span>出版日:{$temp['b_pubdate']}</span> <span>価格:{$temp['b_price']}円</span> <span>ISBN:{$temp['b_isbn']}</span> </div> </div> _html_; } print <<<_html_ </section> </main> <script src="noimg.js"></script> </body> </html> _html_; ?> <file_sep># SCHOOL-Subject
926d221d704fd5b47677135af6f2d3e81d3fc83d
[ "JavaScript", "Markdown", "PHP" ]
25
PHP
vanti113/SCHOOL-Subject
abf9808c673b7e777ddacea4e09b5dd6e1063e1c
21eb19b0b0875708b7d58cd783ad1440aa56f047
refs/heads/master
<repo_name>fengziboy/beta<file_sep>/util/DebianPackager.py import os # For file path correction import hashlib # sha256 for Release file import re # regex import json # Used to parse various JSON files from subprocess import call # call dpkg-deb from pydpkg import Dpkg # Retrieve data from DEBs from util.PackageLister import PackageLister from util.DpkgPy import DpkgPy class DebianPackager(object): """ DebianPackager处理功能性回购和交易 带DPKG DEB和DPKG扫描包。 """ def __init__(self, version): super(DebianPackager, self).__init__() self.version = version self.root = os.path.dirname(os.path.abspath(__file__)) + "/../" self.PackageLister = PackageLister(self.version) def CompileRelease(self, repo_settings): """ 从Tweak_数据对象编译控制文件 对象报告设置:报告设置的对象。 """ release_file = "Origin: " + repo_settings['name'] + "\n" release_file += "Label: " + repo_settings['name'] + "\n" release_file += "Suite: stable\n" release_file += "Version: ©2019\n" release_file += "Codename: IOS\n" release_file += "Architectures: iphoneos-arm\n" release_file += "Components: main\n" release_file += "Description: " + repo_settings['description'] + "\n" return release_file def CompileControl(self, tweak_data, repo_settings): """ 从Tweak_数据对象编译控制文件 对象调整数据:“调整发布”对象的单个索引。 对象报告设置:报告设置的对象. """ subfolder = PackageLister.FullPathCname(self, repo_settings) control_file = "Architecture: iphoneos-arm\n" # Mandatory properties include name, bundle id, and version. control_file += "Package: " + tweak_data['bundle_id'] + "\n" control_file += "Name: " + tweak_data['name'] + "\n" control_file += "Version: " + tweak_data['version'] + "\n" # Known properties control_file += "Depiction: https://" + repo_settings['cname'] + subfolder + "/depiction/web/" + tweak_data['bundle_id'] \ + ".html\n" control_file += "Homepage: https://" + repo_settings['cname'] + subfolder + "/depiction/web/" + tweak_data['bundle_id'] \ + ".html\n" control_file += "SileoDepiction: https://" + repo_settings['cname'] + subfolder + "/depiction/native/" \ + tweak_data['bundle_id'] + ".json\n" control_file += "ModernDepiction: https://" + repo_settings['cname'] + subfolder + "/depiction/native/" \ + tweak_data['bundle_id'] + ".json\n" control_file += "Icon: https://" + repo_settings['cname'] + subfolder + "/assets/" + tweak_data['bundle_id'] + "/icon.png\n" try: if repo_settings['maintainer']['email']: control_file += "Maintainer: " + repo_settings['maintainer']['name'] + " <" \ + repo_settings['maintainer']['email'] + ">\n" except Exception: control_file += "Maintainer: " + repo_settings['maintainer']['name'] + ">\n" # Optional properties try: if tweak_data['tagline']: control_file += "Description: " + tweak_data['tagline'] + "\n" except Exception: control_file += "描述:一个很棒的包!\n" try: if tweak_data['Support']: control_file += "Support: " + tweak_data['Support'] + "\n" except Exception: control_file += "描述:支持不支持!\n" try: if tweak_data['Installed-Size']: control_file += "Installed-Size: " + tweak_data['Installed-Size'] + "\n" except Exception: control_file += "描述:插件的大小!\n" try: if tweak_data['section']: control_file += "Section: " + tweak_data['section'] + "\n" except Exception: control_file += "Section: Unknown\n" try: if tweak_data['pre_dependencies']: control_file += "Pre-Depends: " + tweak_data['pre_dependencies'] + "\n" except Exception: pass try: if tweak_data['dependencies']: control_file += "Depends: firmware (>=" + tweak_data['works_min'] + "), " + tweak_data[ 'dependencies'] + "\n" except Exception: control_file += "Depends: firmware (>=" + tweak_data['works_min'] + ")\n" try: if tweak_data['conflicts']: control_file += "Conflicts: " + tweak_data['conflicts'] + "\n" except Exception: pass try: if tweak_data['replaces']: control_file += "Replaces: " + tweak_data['replaces'] + "\n" except Exception: pass try: if tweak_data['provides']: control_file += "Provides: " + tweak_data['provides'] + "\n" except Exception: pass try: if tweak_data['build_depends']: control_file += "Build-Depends: " + tweak_data['build_depends'] + "\n" except Exception: pass try: if tweak_data['recommends']: control_file += "Recommends: " + tweak_data['recommends'] + "\n" except Exception: pass try: if tweak_data['suggests']: control_file += "Suggests: " + tweak_data['suggests'] + "\n" except Exception: pass try: if tweak_data['enhances']: control_file += "Enhances: " + tweak_data['enhances'] + "\n" except Exception: pass try: if tweak_data['breaks']: control_file += "Breaks: " + tweak_data['breaks'] + "\n" except Exception: pass try: if tweak_data['developer']: try: if tweak_data['developer']['email']: control_file += "Author: " + tweak_data['developer']['name'] + " <" + tweak_data['developer'][ 'email'] + ">\n" except Exception: control_file += "Author: " + tweak_data['developer']['name'] + "\n" except Exception: control_file += "Author: Unknown\n" try: if tweak_data['sponsor']: try: if tweak_data['sponsor']['email']: control_file += "Sponsor: " + tweak_data['sponsor']['name'] + " <" + tweak_data['sponsor'][ 'email'] + ">\n" except Exception: control_file += "Sponsor: " + tweak_data['sponsor']['name'] + ">\n" except Exception: pass # other_control try: if tweak_data['other_control']: for line in tweak_data['other_control']: control_file += line + "\n" except Exception: pass return control_file def CreateDEB(self, bundle_id, recorded_version): """ 根据存储在“temp”文件夹中的信息创建DEB。 字符串束ID:要压缩的包的束ID。 """ # TODO: Find a Python-based method to safely delete all DS_Store files. call(["find", ".", "-name", ".DS_Store", "-delete"], cwd=self.root + "temp/" + bundle_id) # Remove .DS_Store. Kinda finicky. for file_name in os.listdir(self.root + "temp/" + bundle_id): if file_name.endswith(".deb"): # Check if the DEB is a newer version deb = Dpkg(self.root + "temp/" + bundle_id + "/" + file_name) if Dpkg.compare_versions(recorded_version, deb.version) == -1: # Update package stuff package_name = PackageLister.BundleIdToDirName(self, bundle_id) with open(self.root + "Packages/" + package_name + "/silica_data/index.json", "r") as content_file: update_json = json.load(content_file) update_json['version'] = deb.version changelog_entry = input("这个插件 \"" + update_json['name'] + "\" 有新版本可用 (" + recorded_version + " -> " + deb.version + "). 此版本中的更改?\n(添加多行" + " 使用换行符 [\\n] 并使用有效的标记语法.): " ) try: update_json['changelog'].append({ "version": deb.version, "changes": changelog_entry }) except Exception: update_json['changelog'] = { "version": deb.version, "changes": changelog_entry } return_str = json.dumps(update_json) print("更新包index.json...") PackageLister.CreateFile(self, "Packages/" + package_name + "/silica_data/index.json", return_str) pass DpkgPy.extract(self, self.root + "temp/" + bundle_id + "/" + file_name, self.root + "temp/" + bundle_id) os.remove(self.root + "temp/" + bundle_id + "/" + file_name) os.remove(self.root + "temp/" + bundle_id + "/control") else: # TODO: Update DpkgPy to generate DEB files without dependencies (for improved win32 support) call(["dpkg-deb", "-b", "-Zgzip", self.root + "temp/" + bundle_id], cwd=self.root + "temp/") # Compile DEB def CheckForSilicaData(self): """ 确保Silica_数据文件存在,如果不存在,请尝试使用尽可能多的数据创建一个。 如果有一个DEB文件,它将从其控制文件中获取数据。它还将自动更新版本号。 如果没有deb文件,它将使用文件夹的名称,版本1.0.0,尝试猜测一些依赖项, 并添加一些占位符数据。 :return: """ for folder in os.listdir(self.root + "Packages"): if folder.lower() != ".ds_store": if not os.path.isdir(self.root + "Packages/" + folder + "/silica_data"): print("好像是包裹 \"" + folder + "\" 未配置。让我们把它成立!") is_deb = False deb_path = "" try: for file_name in os.listdir(self.root + "Packages/" + folder): if file_name.endswith(".deb"): is_deb = True deb_path = self.root + "Packages/" + folder + "/" + file_name except Exception: print("\033[91m- 配置错误! -\n" "请将.deb文件放在自己的文件夹中。这个 \"Packages\" 目录" " 应该由多个文件夹组成,每个文件夹包含单个包的数据.\n" "请解决此问题并重试.\033[0m") quit() # T他的将是我们包的默认脚手架。最后,我会把它阉割成 # 基本要素;也有点像是对我的参考. output = { "bundle_id": "co.shuga.silica.unknown", "name": "Unknown Package", "version": "1.0.0", "tagline": "An unknown package.", "Installed-Size": "未知.", "homepage": "https://sileo-cydia.github.io", "developer": { "name": "Unknown", "email": "<EMAIL>" }, "section": "Themes", "works_min": "8.0", "works_max": "12.12", "featured": "false" } if is_deb: print("从DEB中提取数据...") deb = Dpkg(deb_path) output['name'] = deb.headers['Name'] output['bundle_id'] = deb.headers['Package'] try: output['tagline'] = deb.headers['Description'] except Exception: output['tagline'] = input("包裹的简要描述是什么?") try: output['Installed-Size'] = deb.headers['Installed-Size'] except Exception: output['Installed-Size'] = input("包裹的大小 ") try: output['Support'] = deb.headers['Support'] except Exception: output['Support'] = input("描述/支持/不支持 ") try: remove_email_regex = re.compile('<.*?>') output['developer']['name'] = remove_email_regex.sub("", deb.headers['Author']) except Exception: output['developer']['name'] = input("这个包裹是谁做的?这可能是你的名字. ") output['developer']['email'] = input("作者的电子邮件地址是什么? ") output['email'] = "<EMAIL>" try: output['sponsor']['name'] = deb.headers['Sponsor'] except Exception: pass try: output['dependencies'] = deb.headers['Depends'] except Exception: pass try: output['section'] = deb.headers['Section'] except Exception: pass try: output['version'] = deb.headers['Version'] except Exception: output['version'] = "1.0.0" try: output['conflicts'] = deb.headers['Conflicts'] except Exception: pass try: output['replaces'] = deb.headers['Replaces'] except Exception: pass try: output['provides'] = deb.headers['Provides'] except Exception: pass try: output['build_depends'] = deb.headers['Build-Depends'] except Exception: pass try: output['recommends'] = deb.headers['Recommends'] except Exception: pass try: output['suggests'] = deb.headers['Suggests'] except Exception: pass try: output['enhances'] = deb.headers['Enhances'] except Exception: pass try: output['breaks'] = deb.headers['Breaks'] except Exception: pass try: output['suggests'] = deb.headers['Suggests'] except Exception: pass # These still need data. output['works_min'] = input("软件包的最低iOS版本是什么? ") output['works_max'] = input("该软件包适用的最高iOS版本是什么? ") output['featured'] = input("你的回购协议上应该有这个套餐吗?(真/假)(true/false) ") print("都做完了!请查看生成的 \"index.json\" 文件,并考虑填充" " \"silica_data\" 包含说明、屏幕截图和图标的文件夹.") # Extract Control file from DEB DpkgPy.control_extract(self, deb_path, self.root + "Packages/" + folder + "/silica_data/scripts/") # Remove the Control; it's not needed. os.remove(self.root + "Packages/" + folder + "/silica_data/scripts/Control") if not os.listdir(self.root + "Packages/" + folder + "/silica_data/scripts/"): os.rmdir(self.root + "Packages/" + folder + "/silica_data/scripts/") else: print("正在估计依赖项…") # Use the filesystem to see if Zeppelin, Anemone, LockGlyph, XenHTML, and similar. # If one of these are found, set it as a dependency. # If multiple of these are found, use a hierarchy system, with Anemone as the highest priority, # for determining the category. output['dependencies'] = "" output['section'] = "Themes" if os.path.isdir(self.root + "Packages/" + folder + "/Library/Zeppelin"): output['section'] = "Themes (Zeppelin)" output['dependencies'] += "com.alexzielenski.zeppelin, " if os.path.isdir(self.root + "Packages/" + folder + "/Library/Application Support/LockGlyph"): output['section'] = "Themes (LockGlyph)" output['dependencies'] += "com.evilgoldfish.lockglypgh, " if os.path.isdir(self.root + "Packages/" + folder + "/var/mobile/Library/iWidgets"): output['section'] = "Widgets" output['dependencies'] += "com.matchstic.xenhtml, " if os.path.isdir(self.root + "Packages/" + folder + "/Library/Wallpaper"): output['section'] = "Wallpapers" if os.path.isdir(self.root + "Packages/" + folder + "/Library/Themes"): output['section'] = "Themes" output['dependencies'] += "com.anemonetheming.anemone, " if output['dependencies'] != "": output['dependencies'] = output['dependencies'][:-2] repo_settings = PackageLister.GetRepoSettings(self) # Ask for name output['name'] = input("我们应该如何命名这个包? ") # Automatically generate a bundle ID from the package name. domain_breakup = repo_settings['cname'].split(".")[::-1] only_alpha_regex = re.compile('[^a-zA-Z]') machine_safe_name = only_alpha_regex.sub("",output['name']).lower() output['bundle_id'] = ".".join(str(x) for x in domain_breakup) + "." + machine_safe_name output['tagline'] = input("包裹的简要描述是什么? ") output['Installed-Size'] = input("包裹的大小? ") # I could potentially default this to what is in settings.json but attribution may be an issue. output['developer']['name'] = input("这个包裹是谁做的?这很可能是你的名字。 ") output['developer']['email'] = input("作者的电子邮件地址是什么? ") output['works_min'] = input("W该软件包的最低iOS版本是什么? ") output['works_max'] = input("该软件包的最高iOS版本是什么? ") output['featured'] = input("你的回购协议上应该有这个套餐吗?(true/false) ") PackageLister.CreateFolder(self, "Packages/" + folder + "/silica_data/") PackageLister.CreateFile(self, "Packages/" + folder + "/silica_data/index.json", json.dumps(output)) def CompilePackages(self): """ Creates a Packages.bz2 file. """ # TODO: Update DpkgPy to generate DEB files without dependencies (for improved win32 support) call(["dpkg-scanpackages", "-m", "."], cwd=self.root + "docs/", stdout=open(self.root + "docs/Packages", "w")) # For this, we're going to have to run it and then get the output. From here, we can make a new file. call(["bzip2", "Packages"], cwd=self.root + "docs/") def SignRelease(self): """ 签署release以创建release.gpg。还为packages.bz2在发行版中添加哈希. """ with open(self.root + "docs/Packages.bz2", "rb") as content_file: bzip_raw = content_file.read() bzip_sha256_hash = hashlib.sha256(bzip_raw).hexdigest() bzip_size = os.path.getsize(self.root + "docs/Packages.bz2") with open(self.root + "docs/Release", "a") as text_file: text_file.write("\nSHA256:\n " + str(bzip_sha256_hash) + " " + str(bzip_size) + " Packages.bz2") key = "Silica MobileAPT Repository" # Most of the time, this is acceptable. call(["gpg", "-abs", "-u", key, "-o", "Release.gpg", "Release"], cwd=self.root + "docs/") def PushToGit(self): """ 提交并将repo推送到Git服务器(可能是GitHub). """ # TODO: use GitPython instead of calling Git directly. call(["git", "add", "."], cwd=self.root) call(["git", "commit", "-am", "通过二氧化硅更新回购内容"], cwd=self.root) call(["git", "push"], cwd=self.root) <file_sep>/util/DpkgPy.py import arpy import tarfile class DpkgPy: """ dpkgpy是一个python库,设计用于在纯python中创建和操作debian包。 除了其他Python库之外,它没有依赖项。 (c)2019 Shuga控股公司。版权所有! """ def __init__(self): super(DpkgPy, self).__init__() def extract(self, input_path, output_path): """ 从DEB文件中提取数据。 :param input_path:要提取的DEB文件路径的字符串。 :param output_path:放置提取的DEB的文件路径字符串。文件夹必须已经存在。 :返回:有关提取是否成功或失败的布尔值。 """ try: root_ar = arpy.Archive(input_path) root_ar.read_all_headers() try: data_bin = root_ar.archived_files[b'data.tar.gz'] data_tar = tarfile.open(fileobj=data_bin) data_tar.extractall(output_path) except Exception: data_theos_bin = root_ar.archived_files[b'data.tar.lzma'] data_theos_bin.seekable = lambda: True data_theos_tar = tarfile.open(fileobj=data_theos_bin, mode='r:xz') data_theos_tar.extractall(output_path) # This is an actual Python/lzma implementation bug from the looks of it. control_bin = root_ar.archived_files[b'control.tar.gz'] control_tar = tarfile.open(fileobj=control_bin) control_tar.extractall(output_path) return True except Exception: return False def control_extract(self, input_path, output_path): """ 仅从DEB中提取控制文件 :param input_path:要提取的DEB文件路径的字符串。 :param output_path:放置提取的DEB的文件路径字符串。文件夹必须已经存在。 :返回:有关提取是否成功或失败的布尔值。 """ try: root_ar = arpy.Archive(input_path) root_ar.read_all_headers() control_bin = root_ar.archived_files[b'control.tar.gz'] control_tar = tarfile.open(fileobj=control_bin) control_tar.extractall(output_path) return True except Exception: return False # TODO: Add support for the creation of DEB files without any dependencies, allowing for improved Windows support. <file_sep>/ms/msxq/描述文件编辑.sh #!/bin/sh perl -p -i -e "s/插件名字/FUUpdateBadge「去设置红点」」/g" cydia副本 sileo副本.json perl -p -i -e "s/插件的描述文字/禁止设置里面的升级系统提示红点显示在你的屏幕上/g" cydia副本 sileo副本.json #支持系统 perl -p -i -e "s/0000FF/000000/g" cydia副本 sileo副本.json perl -p -i -e "s/IOS:11-12/IOS:11-12/g" cydia副本 sileo副本.json perl -p -i -e "s/1.0.1/1.0/g" cydia副本 sileo副本.json #更新时间 perl -p -i -e "s/2019-01-21/2019-05-12/g" sileo副本.json perl -p -i -e "s/暂时没有历史版本/2019-05-12更新支持A12/g" cydia副本 #截图 #perl -p -i -e "s/1.png/lowbatterybanner.png/g" sileo副本.json #perl -p -i -e "s/2.png/slices22.png/g" sileo副本.json #perl -p -i -e "s/3.png/slices23.png/g" sileo副本.json #perl -p -i -e "s/4.png/slices23.png/g" sileo副本.json #perl -p -i -e "s/ipX.gif/slices21.png/g" cydia副本 mv cydia副本 fuupdatebadge mv sileo副本.json fuupdatebadge.json cp cydia样本 cydia副本 cp sileo样本.json sileo副本.json
3a22ef1ecd1e5ddb5d7e4bca97ac7b5183374140
[ "Python", "Shell" ]
3
Python
fengziboy/beta
dbfe708242df5882a4ec76574b8fe658a7a91da5
b89c4f54ab6a5ac7843fdf99db34cd135af7f6ad
refs/heads/master
<repo_name>Dmytro-K/task2_9<file_sep>/var9/var9/var9.c #define _CRT_SECURE_NO_WARNINGS #include <stdio.h> #include <stdlib.h> #include <string.h> #include <wchar.h> #include "WordList.h" #define BUF_LEN 256 //typedef struct _WordList WordList; // //struct _WordList //{ // WordPair pair; // WordList *next; //}; int main() { int num; char buffer[BUF_LEN], origBuffer[BUF_LEN], transBuffer[BUF_LEN]; WordList *pairs; WordList *curPair; //WordList *tmpPair; size_t sizeOrig, sizeTrans; pairs = malloc( sizeof( *pairs ) ); puts("Enter words(space separator):"); curPair = &pairs; while( 1 ) { gets_s( buffer, BUF_LEN ); if( buffer[0] ) { sscanf( buffer, "%s%*[ \t]%s", origBuffer, transBuffer ); //tmpPair = (WordList*)malloc( sizeof( WordList ) ); /*sizeOrig = strlen( origBuffer ); sizeTrans = stdlen( transBuffer );*/ /*tmpPair->pair.original = malloc( sizeOrig ); tmpPair->pair.translate = malloc( sizeTrans );*/ /*memcpy( tmpPair->pair.original, origBuffer, sizeOrig ); memcpy( tmpPair->pair.translate, transBuffer, sizeTrans );*/ insertNode( pairs, origBuffer, transBuffer ); /*curPair->next = tmpPair; curPair = curPair->next; curPair->next = 0;*/ } } //printf("%s", buffer); getchar(); }<file_sep>/var9/var9/WordList.c #include <stdlib.h> //#include <string.h> #include <wchar.h> typedef struct _WordPair { wchar_t *original; wchar_t *translate; } WordPair; typedef struct _WordNode WordNode; struct _WordNode { WordPair pair; WordNode *parent; WordNode *left; WordNode *right; }; void wreplacestr( wchar_t **dst, wchar_t **src ) { size_t size; if( *dst ) { free( *dst ); } size = wcslen( *src ); *dst = malloc( ( size + 1 ) * sizeof( wchar_t ) ); wmemcpy( *dst, *src, size ); ( *dst )[size] = 0; } WordNode* findNode( WordNode *node, wchar_t *key ) { int cmpRes; WordNode *curNode = node; if( node == NULL || key == NULL ) { return NULL; } while( 1 ) { if( curNode == NULL ) { return NULL; } cmpRes = wcscmp( key, curNode->pair.original ); if( cmpRes == 0 ) { return curNode; } if( cmpRes > 0 ) { curNode = curNode->right; } else if( cmpRes < 0 ) { curNode = curNode->left; } } } void insertNode( WordNode *node, wchar_t *key, wchar_t *value ) { int cmpRes; WordNode *curNode = node; size_t size; if( node == NULL || key == NULL || value == NULL ) { return; } while( 1 ) { cmpRes = wcscmp( key, curNode->pair.original ); if( cmpRes == 0 ) { wreplacestr( &curNode->pair.translate, &value ); return; } else if( cmpRes > 0 ) { if( curNode->right ) { curNode = curNode->right; } else { curNode->right = (WordNode*)malloc( sizeof( WordNode ) ); wreplacestr( &curNode->right->pair.original, &key ); wreplacestr( &curNode->right->pair.translate, &value ); curNode->right->left = NULL; curNode->right->right = NULL; curNode->right->parent = curNode; return; } } else if( cmpRes < 0 ) { if( curNode->left ) { curNode = curNode->left; } else { curNode->left = (WordNode*)malloc( sizeof( WordNode ) ); wreplacestr( &curNode->left->pair.original, &key ); wreplacestr( &curNode->left->pair.translate, &value ); curNode->left->left = NULL; curNode->left->right = NULL; curNode->left->parent = curNode; return; } } } } int removeNode( WordNode *node, wchar_t *key ) { int cmpRes; WordNode *curNode; WordNode *child; WordNode *tmpNode; if( node == NULL || key == NULL ) { return NULL; } curNode = node; while( 1 ) { cmpRes = wcscmp( key, curNode->pair.original ); if( cmpRes == 0 ) { break; } if( cmpRes > 0 ) { if( curNode->right ) { curNode = curNode->right; } else { return 0; } } else if( cmpRes < 0 ) { if( curNode->left ) { curNode = curNode->left; } else { return 0; } } } while( 1 ) { if( curNode->left == NULL && curNode->right == NULL ) { if( curNode->parent ) { if( curNode->parent->left == curNode ) { curNode = curNode->parent; freeNode( curNode->left ); curNode->left = NULL; } else { curNode = curNode->parent; freeNode( curNode->right ); curNode->right = NULL; } return 1; } } else if( curNode->left == NULL || curNode->right == NULL ) { child = curNode->right ? curNode->right : curNode->left; child->parent = curNode->parent; if( curNode->parent->left == curNode ) { curNode->left = child; } else { curNode->right = child; } if( curNode->pair.original ) { free( curNode->pair.original ); } if( curNode->pair.translate ) { free( curNode->pair.translate ); } free( curNode ); return 1; } else { if( curNode->right->left == NULL ) { curNode->pair.original = curNode->right->pair.original; curNode->pair.translate = curNode->right->pair.translate; tmpNode = curNode->right; curNode->right = curNode->right->right; free( tmpNode ); return 1; } else { curNode->pair.original = curNode->right->left->pair.original; curNode->pair.translate = curNode->right->left->pair.translate; //tmpNode = curNode->right; curNode = curNode->right->left; //free( tmpNode ); } } } } void freeNode( WordNode *node ) { if( node->pair.original ) { free( node->pair.original ); } if( node->pair.translate ) { free( node->pair.translate ); } free( node ); } void destroyBranch( WordNode *node ) { if( node->left ) { destroyBranch( node->left ); node->left = NULL; } if( node->right ) { destroyBranch( node->right ); node->right = NULL; } freeNode( node ); } <file_sep>/var9/var9/WordList.h #ifndef WORD_LIST_H #define WORD_LIST_H typedef struct _WordPair { wchar_t *original; wchar_t *translate; } WordPair; typedef struct _WordNode WordNode, WordList; struct _WordNode { WordPair pair; WordNode *parent; WordNode *left; WordNode *right; }; void wreplacestr( wchar_t **dst, wchar_t **src ); void freeNode( WordNode *node ); WordNode* findNode( WordNode *node, wchar_t *key ); void insertNode( WordNode *node, wchar_t *key, wchar_t *value ); int removeNode( WordNode *node, wchar_t *key ); void freeNode( WordNode *node ); #endif /* WORD_LIST_H */
2298e4cdb54be89a3ac3653280188967160ed2ba
[ "C" ]
3
C
Dmytro-K/task2_9
ba945c9b8a4a3d7d099ca7326514df1a985591b9
f093535517051a4b2cf7e1fbbb1f3a6307900cc4
refs/heads/master
<repo_name>AllanSmithee83/as-webrtc-project<file_sep>/public/js/webrtc.js /* connect to server */ io = io.connect(); var myName = ""; var theirName = ""; var myUserType = ""; var configuration = { 'iceServers': [{ 'url': 'stun:stun.l.google.com:19302' }] }; var rtcPeerConn; var mainVideoArea = document.querySelector("#mainVideoTag"); var smallVideoArea = document.querySelector("#smallVideoTag"); var dataChannel; var dataChannelOptions = { ordered: true, //no guaranteed delivery, unreliable but faster maxRetransmitTime: 1000, //milliseconds }; /*define signal handler*/ io.on('signal', function(data) { if (data.user_type == "caller2" && data.command == "joinroom") { console.log("The caller2 is here!"); if (myUserType == "caller1") { theirName = data.user_name; document.querySelector("#messageOutName").textContent = theirName; document.querySelector("#messageInName").textContent = myName; } //Switch to the doctor listing document.querySelector("#requestCaller2Form").style.display = 'none'; document.querySelector("#waitingForCaller2").style.display = 'none'; document.querySelector("#caller2Listing").style.display = 'block'; } else if (data.user_type == "caller1" && data.command == "call") { console.log("Caller1 is calling"); if (myUserType == "caller2") { theirName = data.user_name; document.querySelector("#messageOutName").textContent = theirName; document.querySelector("#messageInName").textContent = myName; } document.querySelector("#caller2Signup").style.display = 'none'; document.querySelector("#videoPage").style.display = 'block'; } else if (data.user_type == 'signaling') { if (!rtcPeerConn) startSignaling(); var message = JSON.parse(data.user_data); if (message.sdp) { rtcPeerConn.setRemoteDescription(new RTCSessionDescription(message.sdp), function () { // if we received an offer, we need to answer if (rtcPeerConn.remoteDescription.type == 'offer') { rtcPeerConn.createAnswer(sendLocalDesc, logError); } }, logError); } else { rtcPeerConn.addIceCandidate(new RTCIceCandidate(message.candidate)); } } }); //see - http://www.html5rocks.com/en/tutorials/webrtc/infrastructure/ //example http://www.w3.org/TR/webrtc/#simple-peer-to-peer-example function startSignaling() { console.log("starting signaling..."); rtcPeerConn = new webkitRTCPeerConnection(configuration); dataChannel = rtcPeerConn.createDataChannel('textMessages', dataChannelOptions); dataChannel.onopen = dataChannelStateChanged; rtcPeerConn.ondatachannel = receiveDataChannel; //define several handlers we need for webrtc connection to be establish (when we have icecandidate ready to sent our peer , event haneler fired -sent that candidate over socketio chanell to anyone on it ) //how we can connect our peer directly, use stun or turn servers as nessessery to setablish direct p2p connection through firewalls and routers. // send any ice candidates to the other peer rtcPeerConn.onicecandidate = function (evt) { if (evt.candidate) io.emit('signal',{"user_type":"signaling", "command":"icecandidate", "user_data": JSON.stringify({ 'candidate': evt.candidate })}); console.log("completed sending an ice candidate..."); }; // let the 'negotiationneeded' event trigger offer generation rtcPeerConn.onnegotiationneeded = function () { console.log("on negotiation called"); rtcPeerConn.createOffer(sendLocalDesc, logError); }; // once remote stream arrives, show it in the main video element rtcPeerConn.onaddstream = function (evt) { console.log("going to add their stream..."); mainVideoArea.src = URL.createObjectURL(evt.stream); }; // get a local stream, show it in our video tag and add it to be sent navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia; navigator.getUserMedia({ 'audio': false, 'video': true }, function (stream) { console.log("going to display my stream..."); smallVideoArea.src = URL.createObjectURL(stream); rtcPeerConn.addStream(stream); }, logError); } function sendLocalDesc(desc) { rtcPeerConn.setLocalDescription(desc, function () { console.log("sending local description"); io.emit('signal',{"user_type":"signaling", "command":"SDP", "user_data": JSON.stringify({ 'sdp': rtcPeerConn.localDescription })}); }, logError); } function logError(error) { } //////MUTE/PAUSE STREAMS CODE////// var muteMyself = document.querySelector("#muteMyself"); var pauseVideo = document.querySelector("#pauseVideo"); muteMyself.addEventListener('click', function(ev){ console.log("muting/unmuting myself"); var streams = rtcPeerConn.getLocalStreams(); for (var stream of streams) { for (var audioTrack of stream.getAudioTracks()) { if (audioTrack.enabled) { muteMyself.innerHTML = "Unmute" } else { muteMyself.innerHTML = "Mute Myself" } audioTrack.enabled = !audioTrack.enabled; } console.log("Local stream: " + stream.id); } ev.preventDefault(); }, false); pauseVideo.addEventListener('click', function(ev){ console.log("pausing/unpausing my video"); var streams = rtcPeerConn.getLocalStreams(); for (var stream of streams) { for (var videoTrack of stream.getVideoTracks()) { if (videoTrack.enabled) { pauseVideo.innerHTML = "Start Video" } else { pauseVideo.innerHTML = "Pause Video" } videoTrack.enabled = !videoTrack.enabled; } console.log("Local stream: " + stream.id); } ev.preventDefault(); }, false); /////////////Data Channels Code/////////// var messageholder = document.querySelector("#messageholder"); var myMessage = document.querySelector("#myMessage"); var sendMessage = document.querySelector("#sendMessage"); var receivedFileName; var receivedFileSize; var fileBuffer = []; var fileSize = 0; var fileTransferring = false; function dataChannelStateChanged() { if (dataChannel.readyState === 'open') { console.log("Data Channel open"); dataChannel.onmessage = receiveDataChannelMessage; } } function receiveDataChannel(event) { console.log("Receiving a data channel"); dataChannel = event.channel; dataChannel.onmessage = receiveDataChannelMessage; } function receiveDataChannelMessage(event) { console.log("From DataChannel: " + event.data); if (fileTransferring) { //Now here is the file handling code: fileBuffer.push(event.data); fileSize += event.data.byteLength; fileProgress.value = fileSize; //Provide link to downloadable file when complete if (fileSize === receivedFileSize) { var received = new window.Blob(fileBuffer); fileBuffer = []; downloadLink.href = URL.createObjectURL(received); downloadLink.download = receivedFileName; downloadLink.appendChild(document.createTextNode(receivedFileName + "(" + fileSize + ") bytes")); fileTransferring = false; //Also put the file in the text chat area var linkTag = document.createElement('a'); linkTag.href = URL.createObjectURL(received); linkTag.download = receivedFileName; linkTag.appendChild(document.createTextNode(receivedFileName)); var div = document.createElement('div'); div.className = 'message-out'; div.appendChild(linkTag); messageHolder.appendChild(div); } } else { appendChatMessage(event.data, 'message-out'); } } function receiveDataChannelMessage(event) { console.log("From DataChannel: " + event.data); appendChatMessage(event.data, 'message-out'); } sendMessage.addEventListener('click', function(ev){ dataChannel.send(myMessage.value); appendChatMessage(myMessage.value, 'message-in'); myMessage.value = ""; ev.preventDefault(); }, false); function appendChatMessage(msg, className) { var div = document.createElement('div'); div.className = className; div.innerHTML = '<span>' + msg + '</span>'; messageholder.appendChild(div); } /////////////File Transfer/////////// var sendFile = document.querySelector("input#sendFile"); var fileProgress = document.querySelector("progress#fileProgress"); var downloadLink = document.querySelector('a#receivedFileLink'); io.on('files', function(data) { receivedFileName = data.filename; receivedFileSize = data.filesize; console.log("File on it's way is " + receivedFileName + " (" + receivedFileSize + ")"); fileTransferring = true; }); sendFile.addEventListener('change', function(ev){ var file = sendFile.files[0]; console.log("sending file " + file.name + " (" + file.size + ") ..."); io.emit('files',{"filename":file.name, "filesize":file.size}); appendChatMessage("sending " + file.name, 'message-in'); fileTransferring = true; fileProgress.max = file.size; var chunkSize = 16384; var sliceFile = function(offset) { var reader = new window.FileReader(); reader.onload = (function() { return function(e) { dataChannel.send(e.target.result); if (file.size > offset + e.target.result.byteLength) { window.setTimeout(sliceFile, 0, offset + chunkSize); } fileProgress.value = offset + e.target.result.byteLength; }; })(file); var slice = file.slice(offset, offset + chunkSize); reader.readAsArrayBuffer(slice); }; sliceFile(0); fileTransferring = false; }, false); <file_sep>/public/js/app.js //Variable declarations for the high level screens of our single page app var landingPageDiv = document.querySelector("#landingPage"); var caller1EntryDiv = document.querySelector("#caller1Entry"); var caller2SignupDiv = document.querySelector("#caller2Signup"); var videoPageDiv = document.querySelector("#videoPage"); //Variable declarations for other controls used on the signup pages and necessary for app flow var caller1Name = document.querySelector("#caller1Name"); var caller2Name = document.querySelector("#caller2Name"); var enterAsCaller1 = document.querySelector("#enterAsCaller1"); var requestCaller2 = document.querySelector("#requestCaller2"); var requestCaller2Form = document.querySelector("#requestCaller2Form"); var waitingForCaller2 = document.querySelector("#waitingForCaller2"); var waitingForCaller2Progress = document.querySelector("#waitingForCaller2Progress"); var caller2SignupForm = document.querySelector("#caller2SignupForm"); var caller2SignupButton = document.querySelector("#caller2SignupButton"); var waitingForCaller1 = document.querySelector("#waitingForCaller1"); var caller2Listing = document.querySelector("#caller2Listing"); var callCaller2 = document.querySelector("#callCaller2"); var enterAsCaller2 = document.querySelector("#enterAsCaller2"); //Enter the application as a caller1 and toggle the div's enterAsCaller1.addEventListener('click', function(ev){ landingPageDiv.style.display = 'none'; caller1EntryDiv.style.display = 'block'; caller2SignupDiv.style.display = 'none'; videoPageDiv.style.display = 'none'; myUserType = "caller1" requestCaller2Form.style.display = 'block'; waitingForCaller2.style.display = 'none'; caller2Listing.style.display = 'none'; ev.preventDefault(); }, false); //For the caller1 after they enter their basic information they will need to wait for a caller2 to arrive at this point //Signaling code will trigger an update to this view once a caller2 has arrived requestCaller2.addEventListener('click', function(ev){ requestCaller2Form.style.display = 'none'; waitingForCaller2.style.display = 'block'; caller2Listing.style.display = 'none'; //The caller1 joins the signaling room in socket.io caller1UserName = caller1Name.value || 'no name'; myName = caller1UserName; io.emit('signal', {"user_type": "caller1", "user_name": caller1UserName, "user_data": "no data", "command": "joinroom"}); console.log("caller1 " + caller1UserName + " has joined."); ev.preventDefault(); }, false); /* //This code should be removed, it is only for clickable prototype purposes //This allows you to click on the caller1 progress bar and advance to the //video screen without a caller2. waitingForCaller2Progress.addEventListener('click', function(ev){ requestCaller2Form.style.display = 'none'; waitingForCaller2.style.display = 'none'; caller2Listing.style.display = 'block'; ev.preventDefault(); }, false); */ //Enter the application as a caller2 and progress to the sign up form enterAsCaller2.addEventListener('click', function(ev){ landingPageDiv.style.display = 'none'; caller1EntryDiv.style.display = 'none'; caller2SignupDiv.style.display = 'block'; videoPageDiv.style.display = 'none'; myUserType = "caller2" caller2SignupForm.style.display = 'block'; waitingForCaller1.style.display = 'none'; ev.preventDefault(); }, false); //Allows the caller2 to "sign up" by entering their name caller2SignupButton.addEventListener('click', function(ev){ caller2SignupForm.style.display = 'none'; waitingForCaller1.style.display = 'block'; //The caller2 joins the signaling room in socket.io caller2UserName = caller2Name.value || 'no name'; myName = caller2UserName; io.emit('signal', {"user_type": "caller2", "user_name": caller2UserName, "user_data": "no data", "command": "joinroom"}); console.log("caller2 " + caller2UserName + " has joined."); ev.preventDefault(); }, false); //Once a caller2 has arrived on the caller2 listing view,a caller1 calls them from this button callCaller2.addEventListener('click', function(ev){ landingPageDiv.style.display = 'none'; caller1EntryDiv.style.display = 'none'; videoPageDiv.style.display = 'block'; //Send a signal that the caller1 is calling caller1UserName = caller1Name.value || 'no name'; io.emit('signal', {"user_type": "caller1", "user_name": caller1UserName, "user_data": "calling caller2", "command": "call"}); console.log("caller1 " + caller1UserName + " is calling."); //Kick off the WebRTC signaling //Setup the RTC Peer Connection object if (!rtcPeerConn) startSignaling(); ev.preventDefault(); }, false); <file_sep>/README.md Simple WebRTC project Learning webRTc buillding simple webRTC app: -signaling with socket.io -share files using WebRTC Data Channel 1.npm install 2.node server localhost:3000
83a522755a81f9d754fb6e1d357b517cb3d3d512
[ "JavaScript", "Markdown" ]
3
JavaScript
AllanSmithee83/as-webrtc-project
8f3246f095accdc00e9c872d15badf33e70dc831
987d16750231c7d6d4335b8409dc97564233b6fb
refs/heads/master
<file_sep>window.onkeyup = function(e) { var key = e.keyCode ? e.keyCode : e.which; if (key == 38) { //up arrow player.posY *= friction; }else if (key == 37) { // left arrow player.posX *= friction; player.velY += gravity; }else if (key == 39) { // right arrow player.posX /= friction; player.velY += gravity; } //check the player does not go outside the window if (player.posX >= width-player.width) { player.posX = width-player.width; } else if (player.posX <= 0) { player.posX = 0; } //jumping if (keys[38] || keys[32]) { // up arrow or space if(!player.jumping){ player.jumping = true; player.velY = -player.speed*2; } } }; <file_sep>//checks collision between player and cereal var dir = collisionCheck(player, boxes[i].cerealDrawn); for (var i = 0; i < boxes.length; i++) { if (boxes[i].cereal && (dir === "l" || dir === "r" || dir === "t" || dir === "b") ){ boxes.cerealDrawn = undefined; //or delete boxes.cerealDrawn; } } <file_sep>// button cancel animation for pause - resumer button] var pauseResume = ""; var pauseResume = requestAnimationFrame (update); function startStopAnimation (e){ if(this.textContent === "Pause"){ cancelAnimationFrame(pauseResume); this.textContent = "Resume"; } else { requestAnimationFrame (update); this.textContent = "Pause"; } } var pauseButton = getElementById('pause'); pauseButton.addEventListener('click',startStopAnimation, false); <file_sep>(function() { var requestAnimationFrame = window.requestAnimationFrame || window.mozRequestAnimationFrame || window.webkitRequestAnimationFrame || window.msRequestAnimationFrame; window.requestAnimationFrame = requestAnimationFrame; })(); window.cancelAnimationFrame = window.cancelAnimationFrame || window.mozCancelAnimationFrame || function(requestID){ clearInterval(requestID); }; //==background image ==== var bg = new Image(); bg.src = "images/cereal-game-bg2.jpg"; bg.height= 690; var bg2 = new Image(); bg2.src = "images/cereal-game-bg1.jpg"; bg2.height = 690; //==end background image ==== //==platform image ==== var platform = new Image(); platform.src = "images/platforms.png"; platform.width = 170; platform.height = 20; //==end platform image === var avatar_image = new Image(); avatar_image.src = 'images/spoon.png'; //creation of cereal images var heart_image = new Image(); heart_image.src = 'images/heart.png'; var horse_paw_image = new Image(); horse_paw_image.src = 'images/horse-paw.png'; var moon_image = new Image(); moon_image.src = 'images/moon.png'; var rainbow_image = new Image(); rainbow_image.src = 'images/rainbow.png'; var lucky_charms_image = new Image(); lucky_charms_image.src = 'images/lucky-charms.png'; //score var score = 0; var dir; //marshmallow array var marshmallow_array = [heart_image,horse_paw_image,moon_image,rainbow_image,lucky_charms_image]; var isPaused = false; var randomCereal; var randomMarshmallow; var randomImage; //cereals actually on the canvas var cerealsOnCanvas = []; //variable declarations var canvas = document.getElementById('killer-canvas'), ctx = canvas.getContext('2d'), width = 500, height = 700, player = { x : width/2, //place the player in the middle of the x axis y : height - 100, // place the player just 5px above the y axis height : 50, // height of the player width : 50, //width of the player velX : 0, //velocityX velY: 0, //velocityY speed: 5, jumping: false, grounded: false, //avatar: avatar() }, cereal = { width: 50, height: 50, }, keys = []; friction = 0.8; gravity= 0.3; var alertBox = false; //collision boxes var boxes =[]; var outlineInCanvas = []; var outlineInCanvasBottom = []; //difference between canvas width and box width var boxCreationLimit = parseInt(canvas.width - (boxes.width/2)); //below we push 3 boxes into the array. //below limit outlineInCanvasBottom.push({ x:0, y:700, width: width, height: 50 }); //right limit outlineInCanvas.push({ x:500, y:0, width: 10, height: height }); //left limit outlineInCanvas.push({ x:0, y:0, width: 0, height: height }); //6 initial boxes boxes.push({ x:330, y:10, width: 170, height: 20, cereal: false, cerealDrawn : "", cerealW : 40, cerealH : 40 }); boxes.push({ x:80, y:170, width: 170, height: 20, cereal: false, cerealDrawn : "", cerealW : 40, cerealH : 40 }); boxes.push({ x:320, y:250, width: 170, height: 20, cereal: false, cerealDrawn : "", cerealW : 40, cerealH : 40 }); boxes.push({ x:20, y:550, width: 170, height: 20, cereal: false, cerealDrawn : "", cerealW : 40, cerealH : 40 }); boxes.push({ x: 300, y: 480, width: 170, height: 20, cereal: false, cerealDrawn : "", cerealW : 40, cerealH : 40 }); boxes.push({ x: 170, y:380, width: 170, height: 20, pform: "", cereal: false, cerealDrawn : "", cerealW : 40, cerealH : 40 }); canvas.width = width; canvas.height = height; //setInterval oop that push the platforms down var boxScroll = setInterval (function (){ actualScrolling(); }, 30); function boxScrollBottom(){ setTimeout(function(){ actualScrollingBottom(); }, 5000); } boxScrollBottom(); //platform scrolling logic function actualScrolling(){ for(var i=0; i<boxes.length; i++){ boxes[i].y += 1; if (score > 40){ boxes[i].y += 1.9; }else if (score > 70) { boxes[i].y += 2; } } } //bottomplatform logic function actualScrollingBottom(){ setInterval (function (){ outlineInCanvasBottom[0].y+=1; console.log('canvas is scrolling'); }, 100); } // pause function $('#pause').on('click',function(){ isPaused= !isPaused; if (isPaused){ $("#pause").text("Resume"); clearInterval(boxScroll); } else { $("#pause").text("Pause"); boxScroll = setInterval (function (){ actualScrolling(); }, 30); } console.log('pause click'); }); function update(){ if(player.y > 715){ alert("You suck!"); location.reload(true); } //checking player and boxes[i] y position for(i=0; i < boxes.length; i++ ){ if (boxes[i].y > height) { boxes.splice(i,1); // we get rid of the box outside the canvas var number = Math.floor(Math.random() * (5 - 1 + 1 )) + 1; if (number == 4){ randomCereal = true; randomMarshmallow = Math.floor(Math.random()* marshmallow_array.length); randomImage = (marshmallow_array[randomMarshmallow]); } else { randomCereal = false; } boxes.push({ x: Math.floor(Math.random() * (width - 170 - 1 + 1 )) + 1, // x: width - 170, y: Math.floor(Math.random() * player.velY-3) + 1, width:boxes[i].width, height:boxes[i].height, cereal : randomCereal, pform : platform, cerealDrawn : randomImage, cerealW: 40, cerealH: 40 }); } } // check keys if (keys[38] || keys[32] || keys[87]) { // up arrow or space if(!player.jumping && player.grounded){ player.jumping = true; player.grounded = false; // we're not on the ground anymore player.velY = -player.speed * 2; } } if (keys[39] || keys[68]) { // right arrow if (player.velX < player.speed) { player.velX++; } } if (keys[37]|| keys[65]) { // left arrow if (player.velX > -player.speed) { player.velX--; } } player.velX *= friction; player.velY += gravity; //ctx.clearRect(0,0,width,height); //this logic sets the filling rules for the boxes //ctx.fillStyle = "black"; ctx.beginPath(); //background panning logic activation animateBackground(); player.grounded = false; for(var i=0; i < boxes.length; i++) { ctx.drawImage(platform, boxes[i].x, boxes[i].y, boxes[i].width, boxes[i].height); if (boxes[i].cereal){ if(boxes[i].cerealDrawn !== ""){ ctx.drawImage(boxes[i].cerealDrawn,boxes[i].x+70, boxes[i].y-40, 40, 40); } } // //player - cerealcollision check var dir = cerealCollisionCheck(player, boxes[i]); if(dir) { boxes[i].cereal = false; score += 1; console.log(score); $("#score").text(score); } dir = collisionCheck(player, boxes[i]); if (dir === "l" || dir === "r") { player.velX = 0; player.jumping = false; } else if (dir === "b") { player.grounded = true; player.jumping = false; } else if (dir === "t") { player.velY *= -1; } } for(var x=0; x < boxes.length; x++) { // ctx.rect(boxes[i].x, boxes[i].y, boxes[i].width, boxes[i].height); var dir = collisionCheck(player, boxes[x]); if (dir === "l" || dir === "r") { player.velX = 0; player.jumping = false; } else if (dir === "b") { player.grounded = true; player.jumping = false; } else if (dir === "t") { player.velY *= -1; } } // draw the outline and check for collisions for(var n=0; n < outlineInCanvas.length; n++) { ctx.rect(outlineInCanvas[n].x, outlineInCanvas[n].y, outlineInCanvas[n].width, outlineInCanvas[n].height); dir = collisionCheck(player, outlineInCanvas[n]); if (dir === "l" || dir === "r") { player.velX = 0; player.jumping = false; } else if (dir === "b") { player.grounded = true; player.jumping = false; } else if (dir === "t") { player.velY *= -1; } } // draw the outline and check for collisions at bottom ctx.rect(outlineInCanvasBottom[0].x, outlineInCanvasBottom[0].y, outlineInCanvasBottom[0].width, outlineInCanvasBottom[0].height); dir = collisionCheck(player, outlineInCanvasBottom[0]); if (dir === "l" || dir === "r") { player.velX = 0; player.jumping = false; } else if (dir === "b") { player.grounded = true; player.jumping = false; } else if (dir === "t") { player.velY *= -1; } if(player.grounded){ player.velY = 0; } player.x += player.velX; player.y += player.velY; //ctx.drawImage(avatar_image,player.x,player.y, player.width, player.height); ctx.fill(); //fill is a method that fills the current drawing path with platforms. ctx.drawImage(avatar_image,player.x,player.y, player.width, player.height); requestAnimationFrame(update); } //====end of update function===== //====backgorund function===== // var canvas = document.getElementById('killer-background'); // var ctx = canvas.getContext('2d'); // var cW = ctx.canvas.width; // var cH = ctx.canvas.height; function Background(){ this.x = 0; this.y = 0; this.y2 = -bg2.height; this.w = bg.width; this.h = bg.height; this.scrollBackground = function(){ if (this.y > bg.height) { this.y = -bg.height; } if (this.y2 > bg2.height) { this.y2 = -bg2.height; } this.y ++; this.y2++; }; } var background = new Background(); var background2 = new Background(); function animateBackground(){ console.log('animate function'); ctx.drawImage(bg, background.x, background.y); ctx.drawImage(bg2, background.x, background.y2); // ctx.save(); // ctx.clearRect(0,0, canvas.width, canvas.height); // background.render(); if (!isPaused){ background.scrollBackground(); } } //====end backgorund function===== //collision check function function collisionCheck(shapeA, shapeB){ //creation of the vector vX = (shapeA.x + (shapeA.width/2)) - (shapeB.x + (shapeB.width/2)) ; vY = (shapeA.y + (shapeA.height/2)) - (shapeB.y + (shapeB.height/2)) ; //sun of the haplf widths and Heights halfWidths = (shapeA.width/2) + (shapeB.width/2); halfHeights = (shapeB.height/2) + (shapeB.height/2); colDir = null; // if the x and y vector are less than the half width or half height, they we must be inside the object, causing a collision if (Math.abs(vX) < halfWidths && Math.abs(vY) < halfHeights) { // figures out on which side we are colliding (top, bottom, left, or right) var oX = halfWidths - Math.abs(vX), oY = halfHeights - Math.abs(vY); if (oX >= oY) { if (vY > 0) { colDir = "t"; // top shapeA.y += oY; } else { colDir = "b"; //bottom shapeA.y -= oY; } } else { if (vX > 0) { colDir = "l"; //left shapeA.x += oX; } else { colDir = "r"; //right shapeA.x -= oX; } } } return colDir; } // ceral collision check constructor function cerealCollisionCheck(shapeA, shapeB){ //creation of the vector vX = (shapeA.x + (shapeA.width/2)) - (shapeB.x + (shapeB.cerealW/2)) ; vY = shapeA.y + shapeA.height - (shapeB.y + (shapeB.cerealH/2)); //sun of the haplf widths and Heights halfWidths = (shapeA.width/2) + (shapeB.cerealW/2); halfHeights = (shapeB.cerealH/2) + (shapeB.cerealH/2); colDir = null; if (shapeB.cereal) { // if the x and y vector are less than the half width or half height, they we must be inside the object, causing a collision if (Math.abs(vX) < halfWidths && Math.abs(vY) < halfHeights - 20 ) { console.debug(); // figures out on which side we are colliding (top, bottom, left, or right) var oX = halfWidths - Math.abs(vX), oY = halfHeights - Math.abs(vY); if (oX >= oY) { if (vY > 0) { colDir = "t"; // top shapeA.y += oY; } else { colDir = "b"; //bottom shapeA.y -= oY; } } else { if (vX > 0) { colDir = "l"; //left shapeA.x += oX; } else { colDir = "r"; //right shapeA.x -= oX; } } } } return colDir; } //refresh button $("#restart").on('click', function(){ console.log("restart button has been clicked!"); location.reload(true); }); document.body.addEventListener("keydown", function(e) { var permittedKeys = [38,32,87,39,68,37,65]; if ( permittedKeys.includes(e.keyCode)){ e.preventDefault(); } keys[e.keyCode] = true; }); document.body.addEventListener("keyup", function(e) { keys[e.keyCode] = false; }); window.addEventListener("load",function(){ update(); });
27629157c41a0eafc8a27bd2c74927d59aaf9de0
[ "JavaScript" ]
4
JavaScript
iamlydial/cereal-killer-game
68c328cb87f184343e47b7ef86f4305cf1839f8b
58f381ac311be3ed6b35b6ca622b7786807a79a8
refs/heads/master
<repo_name>kawa1228/pwa-sample<file_sep>/src/plugins/axios/index.js import axios from "axios"; // axiosをプラグイン化してみる const install = (Vue, config = {}) => { const client = axios.create(config); // Vue prototype $_axios Object.defineProperty(Vue.prototype, "$_axios", { get() { return client; } }); }; // install Object export export default { install };
320f096f1d11b8f97eda9f142f2fd18283511cd0
[ "JavaScript" ]
1
JavaScript
kawa1228/pwa-sample
105858c3abfa0feddb41c5ade679b886e159652c
d39d0ac9f40560eb9cb30d83af38bdae065a25cf
refs/heads/master
<repo_name>ziakhan82/ticketsystem-razuhmd<file_sep>/ClassLibraryTicketSystem/Brobizz.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace ClassLibraryTicketSystem { public class Brobizz { public bool MyBrobizz { get; set; } /// <summary> /// Constructor to initialize the property /// </summary> /// <param name="myBrobizz"></param> public Brobizz(bool myBrobizz) { MyBrobizz = myBrobizz; } /// <summary> /// Empty Constructor /// </summary> public Brobizz() { } } } <file_sep>/ClassLibraryTicketSystemTests/CarTests.cs using Microsoft.VisualStudio.TestTools.UnitTesting; using ClassLibraryTicketSystem; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace ClassLibraryTicketSystem.Tests { /// <summary> /// Tests for Car class methods /// </summary> [TestClass()] public class CarTests { /// <summary> /// Test with no Brobizz /// </summary> [TestMethod()] public void PriceTest() { //Arrange Car car = new Car(); Brobizz brobizz = new Brobizz(false); //Act var result = car.Price(brobizz); //Assert Assert.AreEqual(240, result); } /// <summary> /// Method to test the type of the vehicle /// </summary> [TestMethod()] public void VehicleTypeTest() { //Arrange Car car = new Car(); //Act var result = car.VehicleType(); //Assert Assert.AreEqual("Car", result); } /// <summary> /// Test with Brobizz discount /// </summary> [TestMethod()] public void PriceTest1() { //Arrange Car car = new Car(); Brobizz brobizz = new Brobizz(true); //Act var result = car.Price(brobizz); //Assert Assert.AreEqual(228, result); } } }<file_sep>/OresundBronTicketSystemTests/OresundMCTests.cs using ClassLibraryTicketSystem; using Microsoft.VisualStudio.TestTools.UnitTesting; using OresundBronTicketSystem; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace OresundBronTicketSystem.Tests { /// <summary> /// Tests for Oresund MC class methods /// </summary> [TestClass()] public class OresundMCTests { /// <summary> /// Test with no Brobizz /// </summary> [TestMethod()] public void PriceTest() { //Arrange OresundMC omc = new OresundMC(); Brobizz brobizz = new Brobizz(false); //Act var result = omc.Price(brobizz); //Assert Assert.AreEqual(210, result); } /// <summary> /// Test to check vehicle type for Oresund MC /// </summary> [TestMethod()] public void VehicleTypeTest() { //Arrange OresundMC omc = new OresundMC(); //Act var result = omc.VehicleType(); //Assert Assert.AreEqual("Oresund MC", result); } /// <summary> /// Test with Brobizz discount /// </summary> [TestMethod()] public void PriceTest1() { //Arrange OresundMC omc = new OresundMC(); Brobizz brobizz = new Brobizz(true); //Act var result = omc.Price(brobizz); //Assert Assert.AreEqual(73, result); } } }<file_sep>/ClassLibraryTicketSystem/MC.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace ClassLibraryTicketSystem { /// <summary> /// The MC class inherites from VehicleBaseClass /// </summary> public class MC : VehicleBaseClass { /// <summary> /// Constructor to initialize all the properties /// </summary> /// <param name="licenseplate">Initializes Licenseplate property</param> /// <param name="date">Initializes Date property</param> public MC(string licenseplate, DateTime date) : base(licenseplate, date) { Licenseplate = licenseplate; Date = date; } /// <summary> /// Empty constructor /// </summary> public MC() { } /// <summary> /// Price() method returns a fixed price 125. Discount applies for Brobizz /// </summary> /// <returns>Double</returns> public override double Price(Brobizz brobizz) { if (brobizz.MyBrobizz == true) { return 125 - 125 * .05; } return 125; } /// <summary> /// VehicleType() method returns "MC" /// </summary> /// <returns>String</returns> public override string VehicleType() { return "MC"; } } } <file_sep>/ClassLibraryTicketSystem/Car.cs using System; namespace ClassLibraryTicketSystem { /// <summary> /// The Car class inherites from VehicleBaseClass /// </summary> public class Car : VehicleBaseClass { /// <summary> /// Constructor to initialize all the properties /// </summary> /// <param name="licenseplate">Initializes Licenseplate property</param> /// <param name="date">Initializes Date property</param> public Car(string licenseplate, DateTime date) : base(licenseplate, date) { Licenseplate = licenseplate; Date = date; } /// <summary> /// Empty constructor /// </summary> public Car() : base() { } /// <summary> /// Price() method returns a fixed price 240. Discount applies for Brobizz /// </summary> /// <returns>Double</returns> public override double Price(Brobizz brobizz) { if(brobizz.MyBrobizz == true) { return 240 - 240 * .05; } return 240; } /// <summary> /// VehicleType() method returns "Car" /// </summary> /// <returns>String</returns> public override string VehicleType() { return "Car"; } } } <file_sep>/ClassLibraryTicketSystem/VehicleBaseClass.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace ClassLibraryTicketSystem { /// <summary> /// VehicleBaseClass is an abstract class. It has two properties: Licenseplate, Date. It has two abstract methods: Price(), VehicleType(). /// </summary> public abstract class VehicleBaseClass { public string Licenseplate { get; set; } public DateTime Date { get; set; } protected VehicleBaseClass(string licenseplate, DateTime date) { Licenseplate = licenseplate; Date = date; } protected VehicleBaseClass() { } public abstract double Price(Brobizz brobizz); public abstract string VehicleType(); } } <file_sep>/StoreBaeltTicketLibrary/WeekendDiscountForCar.cs using ClassLibraryTicketSystem; using System; namespace StoreBaeltTicketLibrary { /// <summary> /// Weekend discount class only for Car /// </summary> public class WeekendDiscountForCar { public bool Weekend { get; set; } /// <summary> /// Constructor to initialize the property /// </summary> /// <param name="weekend"></param> public WeekendDiscountForCar(bool weekend) { Weekend = weekend; } /// <summary> /// Empty constructor /// </summary> public WeekendDiscountForCar() { } /// <summary> /// Method to return the price after weekend discount and Brobizz discount /// </summary> /// <returns>double</returns> public double GetWeekendBrobizzPrice() { Car car = new Car(); Brobizz noBrobizz = new Brobizz(false); var priceNoBrobizz = car.Price(noBrobizz); priceNoBrobizz = priceNoBrobizz - priceNoBrobizz * .2; var priceWeekendBrobizz = priceNoBrobizz - priceNoBrobizz * .05; return priceWeekendBrobizz; } /// <summary> /// Method to return the price after only weekend discount /// </summary> /// <returns>double</returns> public double GetWeekendNoBrobizzPrice() { Car car = new Car(); Brobizz noBrobizz = new Brobizz(false); var priceWeekendNoBrobizz = car.Price(noBrobizz); priceWeekendNoBrobizz = priceWeekendNoBrobizz - priceWeekendNoBrobizz * .2; return priceWeekendNoBrobizz; } } } <file_sep>/ClassLibraryTicketSystem/LicenseplateFunctionality.cs using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace ClassLibraryTicketSystem { /// <summary> /// This class checks the length of the numberplate. If it is longer than 7 character, it throws ArgumentException /// </summary> public class LicenseplateFunctionality { public LicenseplateFunctionality() { } /// <summary> /// Checks licenseplate length and creates Car /// </summary> /// <param name="licenseplate"></param> /// <param name="date"></param> /// <returns></returns> public Car CheckAndCreateCar(string licenseplate, DateTime date) { if(licenseplate.Length < 8) { Car car = new Car(licenseplate, date); return car; } else throw new ArgumentException("Licenseplate is longer than 7 character !!!"); } /// <summary> /// Checks licenseplate length and creates MC /// </summary> /// <param name="licenseplate"></param> /// <param name="date"></param> /// <returns></returns> public MC CheckAndCreateMC(string licenseplate, DateTime date) { if (licenseplate.Length < 8) { MC mc = new MC(licenseplate, date); return mc; } else throw new ArgumentException("Licenseplate is longer than 7 character !!!"); } } } <file_sep>/StoreBaeltTicketLibraryTests/WeekendDiscountForCarTests.cs using Microsoft.VisualStudio.TestTools.UnitTesting; using StoreBaeltTicketLibrary; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace StoreBaeltTicketLibrary.Tests { [TestClass()] public class WeekendDiscountForCarTests { /// <summary> /// Test for the method to get the price after weekend and Brobizz discount /// </summary> [TestMethod()] public void GetWeekendBrobizzPriceTest() { //Arrange WeekendDiscountForCar weekendDiscountForCar = new WeekendDiscountForCar(true); //Act var priceWeekendBrobizz = weekendDiscountForCar.GetWeekendBrobizzPrice(); //Assert Assert.AreEqual(182.4, priceWeekendBrobizz); } /// <summary> /// Test for the method to get the price after only weekend discount /// </summary> [TestMethod()] public void GetWeekendNoBrobizzPriceTest() { //Arrange WeekendDiscountForCar weekendDiscountForCar = new WeekendDiscountForCar(true); //Act var priceWeekendNoBrobizz = weekendDiscountForCar.GetWeekendNoBrobizzPrice(); //Assert Assert.AreEqual(192, priceWeekendNoBrobizz); } } }<file_sep>/ClassLibraryTicketSystemTests/MCTests.cs using Microsoft.VisualStudio.TestTools.UnitTesting; using ClassLibraryTicketSystem; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace ClassLibraryTicketSystem.Tests { /// <summary> /// Tests for MC class methods /// </summary> [TestClass()] public class MCTests { /// <summary> /// Test with no Brobizz /// </summary> [TestMethod()] public void PriceTest() { //Arrange MC mc = new MC(); Brobizz brobizz = new Brobizz(false); //Act var result = mc.Price(brobizz); //Assert Assert.AreEqual(125, result); } /// <summary> /// Test to check vehicle type for MC /// </summary> [TestMethod()] public void VehicleTypeTest() { //Arrange MC mc = new MC(); //Act var result = mc.VehicleType(); //Assert Assert.AreEqual("MC", result); } /// <summary> /// Test with Brobizz discount /// </summary> [TestMethod()] public void PriceTest1() { //Arrange MC mc = new MC(); Brobizz brobizz = new Brobizz(true); //Act var result = mc.Price(brobizz); //Assert Assert.AreEqual(118.75, result); } } }<file_sep>/OresundBronTicketSystem/OresundMC.cs using ClassLibraryTicketSystem; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace OresundBronTicketSystem { // <summary> /// The OresundMC class inherites from VehicleBaseClass /// </summary> public class OresundMC : VehicleBaseClass { /// <summary> /// Empty constructor /// </summary> public OresundMC() { } /// <summary> /// Constructor to initialize all the properties /// </summary> /// <param name="licenseplate">Initializes Licenseplate property</param> /// <param name="date">Initializes Date property</param> public OresundMC(string licenseplate, DateTime date) : base(licenseplate, date) { } /// <summary> /// Price() method returns a regular price 210. Discount applies for Brobizz /// </summary> /// <returns>Double</returns> public override double Price(Brobizz brobizz) { if (brobizz.MyBrobizz == true) { return 73; } return 210; } /// <summary> /// VehicleType() method returns "Oresund MC" /// </summary> /// <returns>String</returns> public override string VehicleType() { return "Oresund MC"; } } } <file_sep>/OresundBronTicketSystem/OresundCar.cs using ClassLibraryTicketSystem; using System; namespace OresundBronTicketSystem { /// <summary> /// The OresundCar class inherites from VehicleBaseClass /// </summary> public class OresundCar : VehicleBaseClass { /// <summary> /// Empty constructor /// </summary> public OresundCar() : base() { } /// <summary> /// Constructor to initialize all the properties /// </summary> /// <param name="licenseplate">Initializes Licenseplate property</param> /// <param name="date">Initializes Date property</param> public OresundCar(string licenseplate, DateTime date) : base(licenseplate, date) { } /// <summary> /// Price() method returns a regular price 410. Discount applies for Brobizz /// </summary> /// <returns>Double</returns> public override double Price(Brobizz brobizz) { if (brobizz.MyBrobizz == true) { return 161; } return 410; } /// <summary> /// VehicleType() method returns "Oresund car" /// </summary> /// <returns>String</returns> public override string VehicleType() { return "Oresund car"; } } } <file_sep>/OresundBronTicketSystemTests/OresundCarTests.cs using ClassLibraryTicketSystem; using Microsoft.VisualStudio.TestTools.UnitTesting; using OresundBronTicketSystem; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace OresundBronTicketSystem.Tests { /// <summary> /// Tests for Oresund Car class methods /// </summary> [TestClass()] public class OresundCarTests { /// <summary> /// Test with no Brobizz /// </summary> [TestMethod()] public void PriceTest() { //Arrange OresundCar car = new OresundCar(); Brobizz brobizz = new Brobizz(false); //Act var result = car.Price(brobizz); //Assert Assert.AreEqual(410, result); } /// <summary> /// Method to test the type of the vehicle /// </summary> [TestMethod()] public void VehicleTypeTest() { //Arrange OresundCar car = new OresundCar(); //Act var result = car.VehicleType(); //Assert Assert.AreEqual("Oresund car", result); } /// <summary> /// Test with Brobizz discount /// </summary> [TestMethod()] public void PriceTest1() { //Arrange OresundCar car = new OresundCar(); Brobizz brobizz = new Brobizz(true); //Act var result = car.Price(brobizz); //Assert Assert.AreEqual(161, result); } } }<file_sep>/ClassLibraryTicketSystemTests/LicenseplateFunctionalityTests.cs using Microsoft.VisualStudio.TestTools.UnitTesting; using ClassLibraryTicketSystem; using System; using System.Collections.Generic; using System.Linq; using System.Text; using System.Threading.Tasks; namespace ClassLibraryTicketSystem.Tests { [TestClass()] public class LicenseplateFunctionalityTests { /// <summary> /// This test is for a longer than 7 character licenseplate of car. /// </summary> [TestMethod()] public void CheckAndCreateCarTest() { //Arrange string message = null; LicenseplateFunctionality numberplate = new LicenseplateFunctionality(); //Act try { Car car = numberplate.CheckAndCreateCar("CAR12345", DateTime.Now); } catch (Exception e) { message = e.Message; } //Assert Assert.AreEqual("Licenseplate is longer than 7 character !!!", message); } /// <summary> /// This test is for 7 character licenseplate of car. /// </summary> [TestMethod()] public void CheckAndCreateCarTest1() { //Arrange string message = null; Car car = new Car(); LicenseplateFunctionality numberplate = new LicenseplateFunctionality(); //Act try { car = numberplate.CheckAndCreateCar("CAR1234", DateTime.Now); } catch (Exception e) { message = e.Message; } //Assert Assert.AreEqual("CAR1234", car.Licenseplate); } /// <summary> /// This test is for a longer than 7 character licenseplate of MC. /// </summary> [TestMethod()] public void CheckAndCreateMCTest() { //Arrange string message = null; LicenseplateFunctionality numberplate = new LicenseplateFunctionality(); //Act try { MC mc = numberplate.CheckAndCreateMC("MC123456", DateTime.Now); } catch (Exception e) { message = e.Message; } //Assert Assert.AreEqual("Licenseplate is longer than 7 character !!!", message); } /// <summary> /// This test is for 7 character licenseplate of MC /// </summary> [TestMethod()] public void CheckAndCreateMCTest1() { //Arrange string message = null; MC mc = new MC(); LicenseplateFunctionality numberplate = new LicenseplateFunctionality(); //Act try { mc = numberplate.CheckAndCreateMC("MC12345", DateTime.Now); } catch (Exception e) { message = e.Message; } //Assert Assert.AreEqual("MC12345", mc.Licenseplate); } } }
65f7c29706a2ceb2656186d752525d2610d7d447
[ "C#" ]
14
C#
ziakhan82/ticketsystem-razuhmd
612db06da0a187343ec6be674abf0a5f7baaeb1d
9c56762f858ba8627a54d3abc6012b39e49ac48f
refs/heads/master
<repo_name>reffyferdiyatno/test<file_sep>/index.php <!DOCTYPE html> <html> <head> <title>Company</title> <link rel="stylesheet" type="text/css" href="css1/style.css"> </head> <body> <div class="box-form"> <marquee><h3>Company</h3></marquee><hr><br> <center><img src="img/profil/CM.png"width="200" height="200"></center> <?php if(isset($_GET['err1'])){ echo '<div class="alert-error">Maaf, input nomor telepon harus angka</div>'; } ?> <form action="proses-daftar.php" method="post"> Nama Lengkap :<br> <input type="text" name="nama" required/><br><br> No. Telepon :<br> <input type="text" name="telp" required/><br><br> Email :<br> <input type="email" name="email" required/><br><br> Alamat Lengkap :<br> <textarea name="alamat" rows="5" cols="50"></textarea><br></br><br></br> <center><input type="submit" name="daftar" value="Daftar"></center> </form> </div> </body> </html>
ccb268d193cff17bde4121c8962399262ffeb3f1
[ "PHP" ]
1
PHP
reffyferdiyatno/test
94fc47a3ec252cdf945ca9480f49c78b9ec31f6a
945ef891a62cb7f4ef2627a096c0d06267fc53b2
refs/heads/master
<file_sep>// // LogbookComposer.swift // htmlUPDATE // // Created by <NAME> on 2/2/18. // Copyright © 2018 AnnaDev7. All rights reserved. // import UIKit class LogbookComposer: NSObject { // Referance HTML Templates let pathToLogbookHTMLTemplate = Bundle.main.path(forResource: "flightRECORD_BODYTEMP", ofType: "html") let pathToLogbookHTMLLineTemplate = Bundle.main.path(forResource: "flightRECORD_LINE", ofType: "html") // hard programmed for now //let captNAME:String = "DENTON" override init() { super.init() } // generate line Entries func lineEntry(captain:String) -> String! { do { // Load the logbook HTML template code into a String variable. var HTMLContentLine = try String(contentsOfFile: pathToLogbookHTMLLineTemplate!) // Replace all the placeholders with real values except for the items. // The capt Name. HTMLContentLine = HTMLContentLine.replacingOccurrences(of: "#CAPTAIN#", with: captain) // Set the items. //HTMLContent = HTMLContent.replacingOccurrences(of: "#ITEMS#", with: allItems) // The HTML code is ready. return HTMLContentLine } catch { print("Unable to open and use line HTML template files.") } return nil } // take the HTML Template and pass in ENTRY data to give back HTML String func renderLogbook(entry:String) -> String! { // Store the invoice number for future use. //self.captNAME = entry do { // Load the logbook HTML template code into a String variable. var HTMLContent = try String(contentsOfFile: pathToLogbookHTMLTemplate!) // Replace all the placeholders with real values except for the items. // The capt Name. HTMLContent = HTMLContent.replacingOccurrences(of: "#LINEENTRY#", with: entry) // Set the items. //HTMLContent = HTMLContent.replacingOccurrences(of: "#ITEMS#", with: allItems) // The HTML code is ready. return HTMLContent } catch { print("Unable to open and use HTML template files.") } return nil } } <file_sep>// // PreviewViewController.swift // htmlUPDATE // // Created by <NAME> on 31/1/18. // Copyright © 2018 AnnaDev7. All rights reserved. // import UIKit class PreviewViewController: UIViewController, UIWebViewDelegate { @IBOutlet weak var webPreview: UIWebView! // import logbookComposer var var logbookComposer:LogbookComposer! var htmlContent:String! var slideCDs:[String]! override func viewDidLoad() { super.viewDidLoad() // Do any additional setup after loading the view. } override func viewWillAppear(_ animated: Bool) { createLogbookAsHTML() //displayFileAsHTML() } override func viewDidAppear(_ animated: Bool) { super.viewDidAppear(animated) //webPreview.transform = CGAffineTransform(translationX: 0, y: -28) } override func didReceiveMemoryWarning() { super.didReceiveMemoryWarning() // Dispose of any resources that can be recreated. } /* // MARK: - Navigation // In a storyboard-based application, you will often want to do a little preparation before navigation override func prepare(for segue: UIStoryboardSegue, sender: Any?) { // Get the new view controller using segue.destinationViewController. // Pass the selected object to the new view controller. } */ // displays the given file in the UIWebView // CHECK TEMPLATES func displayFileAsHTML() { // set url path and sets up a request let url = Bundle.main.url(forResource: "flightRECORD_BODYTEMP", withExtension:"html") let request = URLRequest(url: url!) // load the html file to the webView webPreview.loadRequest(request) // rotates the webView 90 deg clockwise webPreview.transform = CGAffineTransform(rotationAngle: (CGFloat(Double.pi/2))) // scales html content to fit the WebView webPreview.scalesPageToFit = true } /////// SETUP DATA ENTRY // set the dummy entry data func setData() { slideCDs = ["DENTON", "WATSON", "MILLER", "JONES"] } func correctDisplay() { var t = CGAffineTransform.identity t = t.rotated(by: (CGFloat(Double.pi/2))) t = t.translatedBy(x: 0, y: 0) //t = CGAffineTransform.init(rotationAngle: (CGFloat(Double.pi/2))) //t = CGAffineTransform.init(translationX: 0, y: 35) webPreview.transform = t //let centreX = NSLayoutConstraint(item: webPreview, attribute: .centerX, relatedBy: .equal, toItem: view, attribute: .centerX, multiplier: 1.0, constant: 0) //let centreY = NSLayoutConstraint(item: webPreview, attribute: .centerY, relatedBy: .equal, toItem: view, attribute: .centerY, multiplier: 1.0, constant: 50) //view.addConstraints([centreX, centreY]) self.edgesForExtendedLayout = [] } // Load HTML template and change its default values func createLogbookAsHTML() { // creates object to work with logbookComposer = LogbookComposer() // some dummy HARD data setData() // line data var lineEntry = "" // add line entries for each leg for i in 0..<slideCDs.count { lineEntry += logbookComposer.lineEntry(captain: slideCDs[i]) } // then add blank lines for remainder to make up a full page let reqBlanks = 46 - slideCDs.count //add blank line entries for _ in 0..<reqBlanks { lineEntry += logbookComposer.lineEntry(captain: "") } // change HTML file with inputs and set the htmlContent var AND Display it on the WEBVIEW if let logbookHTML = logbookComposer.renderLogbook(entry: lineEntry) { // load HTML String into the webview webPreview.loadHTMLString(logbookHTML, baseURL: NSURL(string: logbookComposer.pathToLogbookHTMLTemplate!)! as URL) htmlContent = logbookHTML // rotates the webView 90 deg clockwise //webPreview.transform = CGAffineTransform(rotationAngle: (CGFloat(Double.pi/2))) // TEST ONLY Scale webView to fit bounds //webPreview.frame = self.view.bounds // move the webview below nav bar //let leftCont = NSLayoutConstraint(item: webPreview, attribute: .leftMargin, relatedBy: .equal, toItem: view, attribute: .leftMargin, multiplier: 1.0, constant: 0) //let rightCont = NSLayoutConstraint(item: webPreview, attribute: .rightMargin, relatedBy: .equal, toItem: view, attribute: .rightMargin, multiplier: 1.0, constant: 0) //let topCont = NSLayoutConstraint(item: webPreview, attribute: .topMargin, relatedBy: .equal, toItem: view, attribute: .topMargin, multiplier: 1.0, constant: 0) //let botCont = NSLayoutConstraint(item: webPreview, attribute: .bottomMargin, relatedBy: .equal, toItem: view, attribute: .bottomMargin, multiplier: 1.0, constant: 0) //view.addConstraints([leftCont, rightCont, topCont, botCont]) // scales html content to fit the WebView webPreview.scalesPageToFit = true correctDisplay() } } }
1abaf5f291538f0b2441afa0d408cba85c1c216d
[ "Swift" ]
2
Swift
DevAnna7/htmlUPDATE
ba979eb7b1bdc5139265cd55b5aa612ee01e30ac
b85ac587b52c0b0732a42f149c52d1bce2fecff9
refs/heads/main
<repo_name>AmirhoseinDelavar/LinkStateProtocol<file_sep>/README.md # LinkStateProtocol dijkstra algorithm with link state packet connecting routers together, used main function to initiallize tcp threads for manager connection to routers. <file_sep>/Manager.py import heapq import os import sys import time import threading import socket from multiprocessing import Process from multiprocessing import set_start_method PROJECT_PATH = 'D:/Uni-Courses/Network/Final-Pro/' MAX_COST = 99 PORTS_START = 65432 # shared vars routers_cnt = 0 manager_tcp_shared = [] routers_acked_l = threading.Lock() routers_safe_l = threading.Lock() routers_acked = 0 routers_safe = 0 log_f = 0 man_order = {} def router_tranmiter(soc,f,ft,n_udp_port,socs,n_udp_ip): packet = list(soc.recvfrom(1024)[0]) if packet[1] == id: f.write('Packet rec from' + str(packet[2]) + ' ' + '\n') f.flush() else: next_id = ft[packet[1]][0] print(next_id) next_port = PORTS_START - next_id - 1 next_index = n_udp_port.index(next_port) socs[next_index].sendto(bytes(packet), (n_udp_ip[next_index], next_port)) f.write('Transit Packet ' + str(packet[2]) + ' to ' + str(packet[1]) + '\n') f.flush() def dijkstra(graph, src, dest, visited, distances, predecessors): """ calculates a shortest path tree routed in src """ # ending condition if src == dest: # We build the shortest path and display it path = [] pred = dest while pred != None: path.append(pred) pred = predecessors.get(pred, None) # reverses the array, to display the path nicely path.pop() path.reverse() return path else: # if it is the initial run, initializes the cost if not visited: distances[src] = 0 # visit the neighbors for neighbor in graph[src]: if neighbor not in visited: new_distance = distances[src] + graph[src][neighbor] if new_distance < distances.get(neighbor, float('inf')): distances[neighbor] = new_distance predecessors[neighbor] = src # mark as visited visited.append(src) # now that all neighbors have been visited: recurse # select the non visited node with lowest distance 'x' # run Dijskstra with src='x' unvisited = {} for k in graph: if k not in visited: unvisited[k] = distances.get(k, float('inf')) x = min(unvisited, key=unvisited.get) return dijkstra(graph, x, dest, visited, distances, predecessors) def router(id): udp_ip = '127.0.0.' + str(id + 2) udp_port = PORTS_START - id - 1 neighbors = [] n_udp_port = [] n_udp_ip = [] # list of udp sockets socs = [] # distance vector for topology d_dic = {} # forwarding table ft = {} f = open('router{0}.txt'.format(id), 'a') s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: # connect with tcp to Manager s.connect(('127.0.0.1', PORTS_START + id)) # send UDP port to manager s.sendall(bytes(PORTS_START - id - 1)) # get connectivity table neighbors = list(s.recv(1024)) f.write('Connectivity Table: ' + repr(list(neighbors)) + '\n') f.flush() # cal neighbors udp_port for index, val in enumerate(neighbors): if MAX_COST > val > 0: n_udp_port.append(PORTS_START - index - 1) n_udp_ip.append('127.0.0.' + str(index + 2)) f.write('UDPS TO Connect: ' + str(n_udp_port) + '\n') f.flush() # setup router server udp soc = socket.socket(family=socket.AF_INET, type=socket.SOCK_DGRAM) soc.bind((udp_ip, udp_port)) # send ready sig to manager s.sendall(b'ready') status = s.recv(1024) f.write('Status From Manager: ' + repr(str(status)) + '\n') f.flush() # setup udp connections for ip, port in zip(n_udp_ip, n_udp_port): soc_t = socket.socket(family=socket.AF_INET, type=socket.SOCK_DGRAM) # Listen for incoming datagrams soc_t.sendto(str.encode('ack from {0} to {1}'.format(id, PORTS_START - port - 1)), (ip, port)) socs.append(soc_t) for port in n_udp_port: f.write(str(soc.recvfrom(1024)[0]) + '\n') f.flush() # tell manager all neighbors are ok s.sendall(b'Acked') status = s.recv(1024) f.write('Status From Manager: ' + repr(str(status)) + '\n') f.flush() # send LSP to all neighbors d_dic[id] = list(neighbors) for soc_t, ip, port in zip(socs, n_udp_ip, n_udp_port): t_list = neighbors.copy() t_list.append(id) soc_t.sendto(bytes(t_list), (ip, port)) f.write('Send LSP To Neighbors\n') f.flush() # receive and resend LSP to all neighbors while True: resp = list(soc.recvfrom(1024)[0]) neigh_id = resp.pop() if neigh_id not in d_dic.keys(): d_dic[neigh_id] = resp resp.append(neigh_id) for soc_t, ip, port in zip(socs, n_udp_ip, n_udp_port): soc_t.sendto(bytes(resp), (ip, port)) if len(d_dic.items()) == routers_cnt: break # log network topology f.write(str(d_dic) + '\n') f.flush() # build graph graph = {} for key in d_dic.keys(): graph[key] = {} for index, val in enumerate(d_dic.get(key)): if index >= routers_cnt: break if MAX_COST > val > 0: graph[key][index] = val # log graph network topology f.write(str(graph) + '\n') f.flush() # create SPT for i in range(routers_cnt): if i == id: ft[id] = [id] else: ft[i] = dijkstra(graph, id, i,[],{},{}) f.write(str(ft)) f.flush() # save SPT f_spt = open('routerSPT{0}.txt'.format(id), 'w') f_spt.write(str(ft)) f_spt.flush() f_spt.close() # resend received packet # send or quit order from manager # newt = threading.Thread(target=router_tranmiter, args=(soc,f,ft,n_udp_port,socs,n_udp_ip,), daemon=True) # newt.start() s.settimeout(0.1) soc.settimeout(0.1) while True: try: order = s.recv(1024) if int(bytes.decode(order, encoding='UTF-8')) == MAX_COST: f.write('Quit' + str(id) + '\n') f.flush() break else: dest_id = int(bytes.decode(order, encoding='UTF-8')) next_id = ft[dest_id][0] next_port = PORTS_START - next_id - 1 next_index = n_udp_port.index(next_port) packet = [223, dest_id, id] socs[next_index].sendto(bytes(packet), (n_udp_ip[next_index], next_port)) f.write('Start Transmission Packet ' + str(id) + ' to ' + str(dest_id) + '\n') f.flush() except Exception as e: pass try: packet = list(soc.recvfrom(1024)[0]) if packet[1] == id: f.write('Packet rec from' + str(packet[2]) + ' ' + '\n') f.flush() else: next_id = ft[packet[1]][0] next_port = PORTS_START - next_id - 1 next_index = n_udp_port.index(next_port) socs[next_index].sendto(bytes(packet), (n_udp_ip[next_index], next_port)) f.write('Transit Packet ' + str(packet[2]) + ' to ' + str(packet[1]) + '\n') f.flush() except Exception as e: pass # closing thread soc.close() s.close() f.close() except Exception as e: print(e.with_traceback()) def manager_tcp(i): global routers_acked global manager_tcp_shared global routers_acked_l global routers_safe global routers_safe_l global man_order s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind(('127.0.0.1', PORTS_START + i)) s.listen() conn, addr = s.accept() # save log for Starting TCP to Router log_f.write('Starting TCP connection R {0}\n'.format(i)) log_f.flush() try: udp_port = conn.recv(1024) conn.sendall(bytes(manager_tcp_shared[i])) # save log for Connectivity Table to Router log_f.write('Connectivity Table R {0}\n'.format(i)) log_f.flush() # send safety message ready_sig = conn.recv(1024) routers_safe_l.acquire() routers_safe -= 1 routers_safe_l.release() while routers_safe != 0: pass conn.sendall(b'safe') log_f.write('Safe for R {0}\n'.format(i)) log_f.flush() # acked and ready for routing ready_sig = conn.recv(1024) routers_acked_l.acquire() routers_acked -= 1 routers_acked_l.release() while routers_acked != 0: pass conn.sendall(b'route!!') log_f.write('Ready to Route Sent for R {0}\n'.format(i)) log_f.flush() # send order to dest while i not in man_order.keys(): pass conn.sendall(str(man_order[i]).encode()) log_f.write('Send Order R {0} msg {1}\n'.format(i, man_order[i])) log_f.flush() time.sleep(5.005) # send quit conn.sendall(str(man_order[i]).encode()) log_f.write('Send Order R {0} msg {1}\n'.format(i, man_order[i])) log_f.flush() except Exception as e: print(e.with_traceback()) def main(): global manager_tcp_shared global routers_acked global routers_cnt global routers_safe global man_order # keeping distances d = [] # routers ip and ports ips = [] tcp_ports = [] # threads list t = [] # TCP Sockets to Routers tcp_s = [] # packets send list orders = [] # log file for manager global log_f log_f = open('manager.txt', 'a') with open('config.txt') as f: routers_cnt = int(f.readline()) routers_acked = routers_cnt routers_safe = routers_cnt d = [[MAX_COST if i != j else 0 for i in range(routers_cnt)] for j in range(routers_cnt)] for line in f.readlines(): inputs = line.split(sep=' ') d[int(inputs[0])][int(inputs[1])] = int(inputs[2]) d[int(inputs[1])][int(inputs[0])] = int(inputs[2]) # save log for reading config log_f.write('Read Config\n') log_f.flush() # fill connectity tables for routers manager_tcp_shared = [d[i] for i in range(routers_cnt)] # instantiate routers for i in range(routers_cnt): # save log for creating routers log_f.write('Created Router {0}\n'.format(i)) log_f.flush() newt = threading.Thread(target=router, args=(i,), daemon=True) newt.start() t.append(newt) ips.append('127.0.0.' + str(i + 2)) tcp_ports.append(str(PORTS_START + i)) # connect with tcp to routers for i in range(routers_cnt): newt = threading.Thread(target=manager_tcp, args=(i,), daemon=True) newt.start() tcp_s.append(newt) # read packet sending orders with open('orders.txt') as f_orders: for line in f_orders.readlines(): inputs = line.strip().split(sep=' ') orders.append([int(inputs[0]), int(inputs[1])]) # send relative packets to routers for order in orders: man_order[order[0]] = order[1] print(man_order) time.sleep(5) # send quit order for order in orders: man_order[order[0]] = MAX_COST time.sleep(0.005) while True: pass if __name__ == '__main__': main()
528941811a9d8c0456d3fc59952d44fce5fb1e81
[ "Markdown", "Python" ]
2
Markdown
AmirhoseinDelavar/LinkStateProtocol
e970a7d688cce2673b4c7339c86a043991a08491
243723e2b67ee0e8faffdb53e7356eb248b82914
refs/heads/master
<file_sep><?php //заглушка echo '404 Страница не найдена'; <file_sep><?php namespace blog; //Абстрактный класс для работы с записями //от него наследуются комментарии и блоги abstract class abstractText { protected $ref; //референс записи protected $author; //автор записи protected $date; //дата создания записи protected $text; //текст записи protected $conn; //дескриптор соединения abstract public function addOne();//функция для сохранения одной записи //проверка на пустое занчение protected function isEmpty($temp) { return (mb_strlen($temp)) ? 0 : 10; } //очистка данных protected function allClear($temp) { $temp = strip_tags($temp); $temp = htmlspecialchars($temp); return $temp; } } <file_sep><?php //основной файл для работы с конкретной блоговой записью //Подключаем хедер require_once 'view/layouts/main.php'; //Подключаем все классы require_once 'controller/absractText.php'; require_once 'controller/Blog.php'; require_once 'controller/Comment.php'; //проверка на наличие переменной id в урле if (isset($_GET['id'])) { $blog_id = $_GET['id']; //Если есть, то присваиваем ее значение переменной $data = blog\Blog::getOne($blog_id, $conn); //вытаскиваем все данные по этому номеру блога //проверка на неверное значение референса блога if (!$data) { require_once 'view/NotFound.php'; die; } //вьюха для отображения развернутой информации по блогу require_once 'view/record.php'; //пост запрос, который сохраняет данные по новому комменту if (@$_POST['send']) { $new_comment = new \blog\Comment($_POST, $conn, $blog_id); $new_comment->addOne(); } //получим все комментарии по этому блогу $data = blog\Comment::getAll($conn, $blog_id); //генерим вьюху на вывод всех комментов require_once 'view/layouts/outAll.php'; //генерим вьюху на добавление нового коммента require_once 'view/layouts/addText.php'; } else { require_once 'view/NotFound.php'; die; }<file_sep><hr> <!-- Вьюха для вывода информации по всем записям (комментам или блогам) --> <div class="container"> <h3 class="text-center">Список </h3> <br> <?php foreach ($data as $k => $v): ?> <div class="panel panel-primary"> <div class="panel-heading"> <h3 class="panel-title"><?php echo $v['author']; ?></h3> </div> <div class="panel-body"> <?php echo $v['text']; ?>... </div> <small><span class="glyphicon glyphicon-calendar" aria-hidden="true"></span>:<?php echo $v['date']; ?></small> <?php echo (isset($v['kol']))? '<small><span class="glyphicon glyphicon-bullhorn" aria-hidden="true"></span>:'.$v['kol'].' </small> ' . '<a href=record.php?id='.$v['ref'].'>Перейти</a>':'' ;?> </div> <?php endforeach; ?> </div> <file_sep><?php namespace blog; //класс для работы с блоговыми записями class Blog extends abstractText { public function __construct($array, $conn) { $author = $this->isEmpty($array['author']) ? 'Guest' : $array['author']; //Проверка на налаичие автора, если нету, пишем что гость $text = $array['blog']; //очистим и экранируем входящие данные $author = $this->allClear($author); $text = $this->allClear($text); $this->ref = date('Ymd') . 'BL' . date('His') . rand(10, 99); $this->author = $author; $this->date = date('Y:m:d H:i:s'); $this->text = $text; $this->conn = $conn; } //Выбор всех блоговых записей public static function getAll($conn) { $data = $conn->query("select t1.ref,t1.author,substr(t1.text,1,100)as 'text',t1.date,IFNULL(t2.kol,0)as 'kol' from blog as t1 left join (SELECT ref_blog,count(ref_com) as 'kol' FROM blog.comments group by ref_blog) as t2 on t1.ref=t2.ref_blog order by date desc")->fetchAll(); return $data; } //выбор одной записи по id public static function getOne($id, $conn) { $stmt = $conn->prepare("select t1.ref,t1.author,t1.text,t1.date,IFNULL(t2.kol,0)as 'kol' from blog as t1 left join (SELECT ref_blog,count(ref_com) as 'kol' FROM blog.comments group by ref_blog) as t2 on t1.ref=t2.ref_blog where t1.ref=?"); $stmt->execute(array($id)); $ret=$stmt->fetch(); return $ret; } //Сохранение нового блога public function addOne() { try { $sql = "INSERT INTO blog (ref, author,date,text) VALUES (?, ?,?,?);"; $stmt = $this->conn->prepare($sql); $stmt->execute(array($ref = $this->ref, $author = $this->author, $date = $this->date, $text = $this->text)); } catch (PDOException $e) { echo $e->getMessage(); } } //выбор топ-5 записей для слайдера public static function getTop($conn) { $data = $conn->query("select t1.ref,t1.author,substr(t1.text,1,100) as 'text' from blog as t1 left join (SELECT ref_blog,count(ref_com) as 'kol' FROM blog.comments group by ref_blog) as t2 on t1.ref=t2.ref_blog order by kol desc limit 5")->fetchAll(); return $data; } } <file_sep><?php namespace blog; //Класс для работы с коментариями class Comment extends abstractText { private $blog_id;//референс блога, к которму относится коментарий public function __construct($array, $conn,$id) { $author = $this->isEmpty($array['author']) ? 'Guest' : $array['author'];//проверка на автора. Если автор пустой, ставим гостя $text = $array['blog']; $blog_id = $id; //очистим и экранируем входящие данные $author = $this->allClear($author); $text = $this->allClear($text); $blog_id=$this->allClear($id); $this->ref = date('Ymd') . 'CM' . date('His') . rand(10, 99); $this->author = $author; $this->date = date('Y:m:d H:i:s'); $this->text = $text; $this->conn = $conn; $this->blog_id=$blog_id; ; } //Выбираем все коментарии для определенного блога public static function getAll($conn,$id) { $stmt = $conn->prepare("select * from comments where ref_blog=? order by date asc"); $stmt->execute(array($id)); $data=$stmt->fetchAll(); return $data; } //сохраняем коментарий public function addOne() { try { $sql = "INSERT INTO comments (ref_com,ref_blog, date,author,text) VALUES (?,?,?,?,?);"; $stmt = $this->conn->prepare($sql); $stmt->execute(array($ref_com = $this->ref,$ref_blog=$this->blog_id, $date = $this->date, $author = $this->author,$text = $this->text)); } catch (PDOException $e) { echo $e->getMessage(); } } } <file_sep>CREATE DATABASE `blog` /*!40100 DEFAULT CHARACTER SET utf8 */; CREATE TABLE blog.`blog` ( `ref` varchar(18) NOT NULL, `author` varchar(45) DEFAULT NULL, `date` datetime DEFAULT NULL, `text` longtext, PRIMARY KEY (`ref`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; CREATE TABLE blog.`comments` ( `ref_com` varchar(18) NOT NULL, `ref_blog` varchar(18) DEFAULT NULL, `date` datetime DEFAULT NULL, `author` varchar(45) DEFAULT NULL, `text` tinytext, PRIMARY KEY (`ref_com`), KEY `ref_idx` (`ref_blog`), CONSTRAINT `ref` FOREIGN KEY (`ref_blog`) REFERENCES blog.`blog` (`ref`) ON DELETE CASCADE ON UPDATE CASCADE ) ENGINE=InnoDB DEFAULT CHARSET=utf8;<file_sep><?php //основной файл для работы с основной страницей //Подключаем хедер require_once 'view/layouts/main.php'; require_once 'view/slide/slide.php'; //подключаем слайдер use blog\Blog; //Подключаем классы для работы require_once 'controller/absractText.php'; require_once 'controller/Blog.php'; //Обработка пост запроса на сохранения блоговой записи if (@$_POST['send']) { $new_blog = new Blog($_POST, $conn); $new_blog->addOne(); } //получаем данные по всем блогам $data=Blog::getAll($conn); //генерим вьюху для отображения всех записей require_once 'view/layouts/outAll.php'; //генерим вьюху для добавления новой записи require_once 'view/layouts/addText.php'; <file_sep><?php /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ /** * Description of db_conf * * @author DN220491TIA */ class db_conf { public $con; //Сюда запишем дескриптор соединения private final function Con() { //функция подключения к бд, закрыта, и без возможности изменения // $dsn = 'mysql:dbname=test;host=localhost'; $user = "test"; //Имя пользователя $password = '<PASSWORD>'; $this->con = new PDO($dsn, $user, $password) or die("Не удалось подключится"); //Подключаемся } public function __construct() { //Конструктор $this->Con(); //Подключаемся } public function __destruct() { //Деструтор $this->con = NULL; } } <file_sep><?php //Основная страница для подключения к бд, хедера и навбара include_once 'db_conf.php'; $db = new db_conf(); $conn = $db->con; ?> <!DOCTYPE html> <html lang="en"> <?php require_once 'view/head/head.php' ?> <body> <?php require_once 'view/navbar/navbar.php'; //Подключаем навбар ?> </body> </html><file_sep>Install: Clone project to new directory; Adjust: blog.sql - DB structure; db_conf.php - DB connection config. Start: Run index.php <file_sep><!-- вьюха для отображения развернутой информации по блогу --> <div class="container"> <h3 class="text-center">данные о статье</h3> <br> <div class="well"> <strong>Author:</strong><?php echo $data['author'] ?> <strong>Дата публикации:</strong><?php echo $data['date'] ?> <strong>Кол-во коментариев:</strong><?php echo $data['kol'] ?> <p><?php echo $data['text'] ?></p> </div> </div>
1f316a807fd0f8cddac5f8158379f6cbee55d653
[ "Markdown", "SQL", "PHP" ]
12
PHP
HomerJS/blog
a2a99747f975f9a088a6015fcbc91c13a08a3a1f
8a05c990f2ce9cdcb8549886343c880b267ac7cb
refs/heads/main
<repo_name>Epidiah/nimmt-a-tronic<file_sep>/nimmt.py # A 6 Nimmt Simulator for testing out weird beef-strategies from random import shuffle def construct_deck(): """ Assembles the cards because I'm not paid enough for data entry. """ deck = [] for number in range(1, 105): card = {"number": number} if number % 11: if number % 5 == 0: if number % 10 == 0: card["heads"] = 3 else: card["heads"] = 2 else: card["heads"] = 1 else: if number % 5 == 0: card["heads"] = 7 else: card["heads"] = 5 card["name"] = english_name(card) card["nickname"] = nickname(card) deck.append(card) return deck def english_name(card): """ Given a card, returns the number of the card written out in English """ ones = [ "", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", # Extended to 19 so that I don't have to write logic around those names "ten", "eleven", "twelve", "thirteen", "fourteen", "fifteen", "sixteen", "seventeen", "eighteen", "nineteen", ] tens = [ "", "", "twenty", "thirty", "fourty", "fifty", "sixty", "seventy", "eighty", "ninety", "one hundred", ] if card["number"] <= 19: name = ones[card["number"]] else: if card["number"] % 10 and card["number"] < 100: divider = "-" else: divider = " " name = tens[card["number"] // 10] + divider + ones[card["number"] % 10] return name.rstrip() def nickname(card): """ Given a card, returns our weird nickname for it. """ if card["number"] % 11: return card["name"] else: return [ "unsy-uns", "deuxsy-deux", "toisy-tois", "quatrosy-quatre", "cinqy-cinq", "seezy-seeze", "septy-sept", "huity-huit", "neufy-neuf", ][card["number"] % 11] class game: def __init__(self, *players): if not (2 <= len(players) <= 10): raise ValueError("Only works with 2 to 10 players.") self.players = players self.reset() def reset(self): for player in self.players: player.score = 66 self.table = [[], [], [], []] def deal(self): self.deck = construct_deck() shuffle(self.deck) for player in self.players: player.hand = [] for n in range(10): player.hand.append(self.deck.pop()) for n in range(4): self.table[n].clear() self.table[n].append(self.deck.pop()) self.high_cards = [row[-1]["number"] for row in self.table] def score_row(self, row): return sum([card["heads"] for card in self.table[row]]) def place_card(self, card, player): if card["number"] < min(self.high_cards): row = player.row_strategy(player, self) player.score -= self.score_row(row) self.table[row].clear() self.table[row].append(card) else: distances = [card["number"] - hc for hc in self.high_cards] closest = distances.index(min(distances)) if len(self.table[closest]) >= 5: player.score -= self.score_row(closest) self.table[closest].clear() self.table[closest].append(card) self.high_cards = [row[-1]["number"] for row in self.table] def play_turn(self): self.cards_played = sorted([ (player.card_strategy(player, self), player) for player in self.players ], key=lambda x: x[0]['number'], reverse=True) while self.cards_played: card, player = self.cards_played.pop() self.place_card(card, player) player.hand.pop(player.hand.index(card)) def play(self): hands = 0 while min([player.score for player in self.players]) > 0: hands += 1 self.deal() for n in range(10): self.play_turn() print( f"{hands}: {[(player.name, player.score) for player in self.players]}" ) return hands, sorted([(player.name, player.score) for player in self.players], key=lambda x: x[1], reverse=True ) class player: def __init__(self, name, card_strategy, row_strategy): self.name = name self.card_strategy = card_strategy self.row_strategy = row_strategy def choose_card(self, game): return self.card_trategy(game) def choose_row(self, game): return self.card_trategy(game) def __str__(self): return self.name def basic_card_strategy(player, game): hand = sorted(player.hand, key=lambda c: c["number"], reverse=True) # First check for a safe play and play the highest safe play. slots = [5 - len(row) for row in game.table] for card in hand: for row in range(4): if ( game.high_cards[row] < card["number"] <= game.high_cards[row] + slots[row] ): return card # Then check for least risky brisket and play highest of those play = None risk = 1 for card in hand: for row in range(4): if (card["number"] - (game.high_cards[row] + 2 * slots[row])) < risk: risk = card["number"] - (game.high_cards[row] + 2 * slots[row]) play = card if play: return play # Finally, plays lowest card return hand[-1] def basic_row_strategy(player, game): chosen_row = 5 damage = 28 for row in range(len(game.table)): row_total = sum([card["heads"] for card in game.table[row]]) if row_total < damage: damage = row_total chosen_row = row return chosen_row # Testing a = player("a", basic_card_strategy, basic_row_strategy) b = player("b", basic_card_strategy, basic_row_strategy) c = player("c", basic_card_strategy, basic_row_strategy) d = player("d", basic_card_strategy, basic_row_strategy) g = game(a, b, c, d) <file_sep>/README.md # nimmt-a-tronic
04c4e8eab578196eebbddb829026b2cbfc3e1ac3
[ "Markdown", "Python" ]
2
Python
Epidiah/nimmt-a-tronic
97bdb9608fb6f0dbe60d63c58cdf642f31d8a245
733affe574ff2ac204cab0f31fca485a8e4b5f1d
refs/heads/master
<repo_name>FULLXOACINC/TableStudentsEditorClientServer<file_sep>/src/Server/Model.java package Server; import library.Student; import java.util.ArrayList; import java.util.List; /** * Created by alex on 15.3.17. */ public class Model { public final int SEMESTER_NUMBER = 10; private List<Student> students; private int currentPage; private int studentOnPage ; public Model() { students = new ArrayList<Student>(); currentPage = 1; studentOnPage = 5; } public List<Student> getStudents() { return students; } public int getStudentOnPage() { return studentOnPage; } public int getCurrentPage() { return currentPage; } public void nextPage(){ boolean hasNextPage=students.size() > studentOnPage * (currentPage - 1) + studentOnPage; if (hasNextPage) currentPage++; } public void prevPage(){ if (currentPage > 1) currentPage--; } public void firstPage(){ if (currentPage > 1) currentPage = 1; } public void lastPage(){ if (currentPage != getNumberMaxPage()) currentPage = getNumberMaxPage(); } public int getNumberMaxPage() { return ((students.size() - 1)/ studentOnPage) + 1; } public void setStudentOnPage(int studentOnPage) { this.studentOnPage = studentOnPage; } public void addStudent(Student student) { students.add(student); } public void setStudents(List<Student> students) { this.students = students; } } <file_sep>/src/Server/SearchStrategyPackage/SearchContext.java package Server.SearchStrategyPackage; import library.Student; import java.util.List; /** * Created by alex on 29.3.17. */ public class SearchContext { private SearchStrategy searchStrategy; public SearchContext(SearchStrategy searchStrategy) { this.searchStrategy=searchStrategy; } public List<Student> executeSearchStrategy(List<Student> students){ return searchStrategy.execute(students); } } <file_sep>/src/library/SocialWork.java package library; import java.io.Serializable; /** * Created by alex on 29.3.17. */ public class SocialWork implements Serializable { private String work; public SocialWork(String work) { this.work = work; } public String getWork() { return work; } } <file_sep>/src/Server/SearchStrategyPackage/Find.java package Server.SearchStrategyPackage; import library.SocialWork; import java.util.List; /** * Created by alex on 31.3.17. */ public class Find { public static boolean correctGroup(String group, String searchGroup) { return group.equals(searchGroup); } public static boolean correctName(String name,String searchName){ return name.equals(searchName); } public static boolean findSocialWorkBitweenMinAndMax(String searchSocialWork, List<SocialWork> student, String minCount, String maxCount) { if (minCount.equals("-") && maxCount.equals("-")) return findSocialWork(searchSocialWork,student); int min = 0; if (!minCount.equals("-")) min = Integer.parseInt(minCount); int max = 0; if (!maxCount.equals("-")) max = Integer.parseInt(maxCount); int count = 0; for (SocialWork elOfSocialWork : student) { if (elOfSocialWork.getWork().equals(searchSocialWork)) count++; } return count >= min && count <= max; } private static boolean findSocialWork(String searchSocialWork, List<SocialWork> student) { for (SocialWork elOfSocialWork : student) { if (elOfSocialWork.getWork().equals(searchSocialWork)) return true; } return false; } } <file_sep>/src/client/Dialogs/Dialog.java package client.Dialogs; import library.AddComponent; import javax.swing.*; import java.awt.*; import java.awt.event.ActionListener; /** * Created by alex on 30.3.17. */ public class Dialog { private final String LAST_NAME = "Фамилия:"; private final String GROUP = "Группа:"; private final String SOCIAL_WORK = "Общественная работа:"; private final String COUNT_OF_SOCIAL_WORK = "Каличество общественной работы:"; private JTextField lastName; private JTextField group; private JComboBox minCount; private JComboBox maxCount; private JTextField socialWork; private JFrame frame; public Dialog(String dialogType,ActionListener actionListener) { frame = new JFrame(dialogType); JLabel labelText = new JLabel(); JPanel jPanelID = new JPanel(); jPanelID.setLayout(new GridBagLayout()); labelText.setHorizontalAlignment(JLabel.CENTER); AddComponent.add(jPanelID,labelText, 0, 0, 3, 1); String[] labelString = {LAST_NAME, GROUP,SOCIAL_WORK, COUNT_OF_SOCIAL_WORK}; labelText = new JLabel(labelString[0]); AddComponent.add(jPanelID,labelText, 0, 1, 1, 1); lastName = new JTextField(30); AddComponent.add(jPanelID, lastName, 1, 1, 3, 1); labelText = new JLabel(labelString[1]); AddComponent.add(jPanelID, labelText, 0, 2, 1, 1); group = new JTextField(30); AddComponent.add(jPanelID, group, 1, 2, 3, 1); labelText = new JLabel(labelString[2]); AddComponent.add(jPanelID, labelText, 0, 3, 1, 1); socialWork = new JTextField(30); AddComponent.add(jPanelID, socialWork, 1, 3, 3, 1); String[] markString = {"-","1","2","3", "4", "5", "6", "7", "8", "9", "10"}; labelText = new JLabel(labelString[3]); labelText.setHorizontalAlignment(JLabel.CENTER); AddComponent.add(jPanelID,labelText, 0, 4, 1, 1); minCount = new JComboBox(markString); AddComponent.add(jPanelID, minCount, 1, 4, 1, 1); maxCount = new JComboBox(markString); AddComponent.add(jPanelID, maxCount, 2, 4, 1, 1); frame.add(jPanelID, BorderLayout.NORTH); JButton deleteButton = new JButton(dialogType); deleteButton.addActionListener(actionListener); frame.add(deleteButton, BorderLayout.SOUTH); } public JFrame getFrame() { return frame; } public String getLastName() { return lastName.getText(); } public String getGroup() { return group.getText(); } public String getMinCount() { return String.valueOf(minCount.getSelectedItem()); } public String getMaxCount() { return String.valueOf(maxCount.getSelectedItem()); } public String getSocialWork() { return socialWork.getText(); } } <file_sep>/src/Server/SearchStrategyPackage/NameAndSocialWorkAndGroupSearch.java package Server.SearchStrategyPackage; import library.Student; import java.util.ArrayList; import java.util.List; /** * Created by alex on 31.3.17. */ public class NameAndSocialWorkAndGroupSearch implements SearchStrategy { private String lastName; private String group; private String minCount; private String maxCount; private String socialWork; public NameAndSocialWorkAndGroupSearch(String lastName, String group, String minCount, String maxCount, String socialWork){ this.lastName = lastName; this.group = group; this.minCount = minCount; this.maxCount = maxCount; this.socialWork = socialWork; } @Override public List<Student> execute(List<Student> students) { List<Student> searchStudent= new ArrayList<Student>(); for(Student student:students){ boolean correctNameAndGroupAndSocialWork =Find.correctName(lastName,student.getLastName())|| Find.correctGroup(group,student.getGroupNumber())|| Find.findSocialWorkBitweenMinAndMax(socialWork,student.getSocialWork(), minCount, maxCount); if(correctNameAndGroupAndSocialWork) searchStudent.add(student); } return searchStudent; } } <file_sep>/src/client/MainWindow.java package client; import client.Dialogs.AddDialog; import client.Dialogs.DeleteDialog; import client.Dialogs.SearchDialog; import library.AddComponent; import library.Constants; import org.apache.log4j.PropertyConfigurator; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.regex.Pattern; /** * Created by alex on 15.3.17. */ public class MainWindow { private StudentTable studentTable; private StudentTable searchPanel; private JTextField port; private JTextField host; private Client client; private boolean connect = false; public MainWindow() { JFrame frame = new JFrame("Таблица общественных работ студентов"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setJMenuBar(createFileMenu()); frame.add(createToolBar(), BorderLayout.PAGE_START); studentTable = new StudentTable(); studentTable.setNamePanel(Constants.MAIN_PANEL); searchPanel = new StudentTable(); searchPanel.setNamePanel(Constants.SEARCH_PANEL); frame.add(studentTable, BorderLayout.CENTER); frame.setExtendedState(JFrame.MAXIMIZED_BOTH); frame.setVisible(true); } private JMenuBar createFileMenu() { JMenuBar menuBar = new JMenuBar(); JMenu fileMenu = new JMenu("Файл"); Font font = new Font("Verdana", Font.ITALIC, 12); fileMenu.setFont(font); JMenuItem openItem = new JMenuItem("Открыть"); openItem.setFont(font); fileMenu.add(openItem); openItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { openFile(); } }); JMenuItem saveItem = new JMenuItem("Сохранить"); saveItem.setFont(font); fileMenu.add(saveItem); saveItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { saveFile(); } }); fileMenu.addSeparator(); JMenuItem exitItem = new JMenuItem("Выйти"); exitItem.setFont(font); fileMenu.add(exitItem); exitItem.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { System.exit(0); } }); menuBar.add(fileMenu); JMenu table = new JMenu("Таблица"); table.setFont(font); JMenuItem add = new JMenuItem("Добавление"); add.setFont(font); table.add(add); add.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { addDialog(); } }); JMenuItem delete = new JMenuItem("Удаление"); delete.setFont(font); table.add(delete); delete.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { deleteDialog(); } }); JMenuItem search = new JMenuItem("Поиск"); search.setFont(font); table.add(search); search.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { searchDialog(); } }); menuBar.add(table); return menuBar; } private JToolBar createToolBar() { JToolBar toolBar = new JToolBar(); toolBar.add(AddComponent.makeButton(new JButton(), "connect.png", new ActionListener() { public void actionPerformed(ActionEvent e) { Frame clientDialog = new JFrame("Настройка подключения"); JToolBar toolBar1 = new JToolBar(); clientDialog.setVisible(true); clientDialog.setLocationRelativeTo(null); clientDialog.setSize(400, 20); clientDialog.setResizable(true); JLabel label = new JLabel("Host: "); toolBar1.add(label); host = new JTextField("127.0.0.1", 16); host.setMaximumSize(new Dimension(160, 20)); toolBar1.add(host); label = new JLabel("Port: "); toolBar1.add(label); port = new JTextField("1337", 4); port.setMaximumSize(new Dimension(50, 20)); toolBar1.add(port); JButton addButton = new JButton("Подключиться"); addButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { connect(); } }); toolBar1.add(addButton); clientDialog.add(toolBar1); } })); toolBar.addSeparator(); toolBar.add(AddComponent.makeButton(new JButton(), "save.png", new ActionListener() { public void actionPerformed(ActionEvent e) { saveFile(); } })); toolBar.add(AddComponent.makeButton(new JButton(), "open.png", new ActionListener() { public void actionPerformed(ActionEvent e) { openFile(); } })); toolBar.addSeparator(); toolBar.add(AddComponent.makeButton(new JButton(), "search.png", new ActionListener() { public void actionPerformed(ActionEvent e) { searchDialog(); } })); toolBar.add(AddComponent.makeButton(new JButton(), "add.png", new ActionListener() { public void actionPerformed(ActionEvent e) { addDialog(); } })); toolBar.add(AddComponent.makeButton(new JButton(), "delete.png", new ActionListener() { public void actionPerformed(ActionEvent e) { deleteDialog(); } })); return toolBar; } private void addDialog() { if (connect) { new AddDialog(studentTable); } else { JOptionPane.showMessageDialog (null, "Вы не подключины к серверу!", "WARNING", JOptionPane.WARNING_MESSAGE); } } private void searchDialog() { if (connect) { new SearchDialog(searchPanel); } else { JOptionPane.showMessageDialog (null, "Вы не подключины к серверу!", "WARNING", JOptionPane.WARNING_MESSAGE); } } private void deleteDialog() { if (connect) { new DeleteDialog(studentTable); } else { JOptionPane.showMessageDialog (null, "Вы не подключины к серверу!", "WARNING", JOptionPane.WARNING_MESSAGE); } } private void openFile() { if (connect) { String name = (String) JOptionPane.showInputDialog(null, "Открыть файл", "Открыть файл", JOptionPane.QUESTION_MESSAGE, null, null, ""); if ((name != null) && (name.length() > 0)) { client.sendToServer(Constants.OPEN_FILE); client.sendToServer(name); studentTable.update(); } } else { JOptionPane.showMessageDialog (null, "Вы не подключины к серверу!", "WARNING", JOptionPane.WARNING_MESSAGE); } } private void saveFile() { if (connect) { String name = (String) JOptionPane.showInputDialog(null, "Сохранить файл", "Сохранить файл", JOptionPane.QUESTION_MESSAGE, null, null, ""); if ((name != null) && (name.length() > 0)) { client.sendToServer(Constants.SAVE_FILE); client.sendToServer(name); } } else { JOptionPane.showMessageDialog (null, "Вы не подключины к серверу!", "WARNING", JOptionPane.WARNING_MESSAGE); } } private void connect() { if (isCorrectHostAndPort()) { client = new Client(this, host.getText(), Integer.parseInt(port.getText())); studentTable.setClient(client); searchPanel.setClient(client); } else { client.sendToServer(Constants.CLIENT_EXIT); connect = false; JOptionPane.showMessageDialog (null, "Не корректный Host или Port", "ERROR", JOptionPane.ERROR_MESSAGE); } } private boolean isCorrectHostAndPort() { Pattern pHost = Pattern.compile("((25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])"); Pattern pPort = Pattern.compile("[0-9]{1,5}"); if (pPort.matcher(port.getText()).matches()) { int portInt = Integer.parseInt(port.getText()); return (pHost.matcher(host.getText()).matches() && 0 <= portInt && portInt <= 65535); } else { return false; } } StudentTable getStudentTable() { return studentTable; } StudentTable getSearchPanel() { return searchPanel; } Client getClient() { return client; } void setConnect(boolean connect) { this.connect = connect; } public static void main(String[] args) { final MainWindow mainWindow = new MainWindow(); PropertyConfigurator.configure("log4j.property"); Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { public void run() { if (mainWindow.getClient() != null) { mainWindow.getClient().sendToServer(Constants.CLIENT_EXIT); } } })); } } <file_sep>/src/Server/FileWorker.java package Server; import library.SocialWork; import library.Student; import org.w3c.dom.Attr; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; import javax.swing.*; import javax.xml.parsers.*; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerException; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import java.io.File; import java.util.ArrayList; import java.util.List; /** * Created by alex on 16.3.17. */ class FileWorker { private final String LAST_NAME = "last_name"; private final String FIRST_NAME = "first_name"; private final String FATHER_NAME = "father_name"; private final String GROUP = "group"; private final String SOCAIL_WORK = "social_work"; private final String STUDENT = "student"; private final String STUDENTS = "students"; private final String EXTENSION = "xml"; private Model tableModel; private Server server; FileWorker(Session serverSession){ tableModel = serverSession.getTableModel(); server = serverSession.getServer(); } void saveFile(String saveName){ try { DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder docBuilder = docFactory.newDocumentBuilder(); Document doc = docBuilder.newDocument(); Element rootElement = doc.createElement(STUDENTS); doc.appendChild(rootElement); for (Student student : tableModel.getStudents()) { Element studentEl = doc.createElement(STUDENT); rootElement.appendChild(studentEl); Attr attr = doc.createAttribute(FIRST_NAME); attr.setValue(student.getFirstName()); studentEl.setAttributeNode(attr); attr = doc.createAttribute(LAST_NAME); attr.setValue(student.getLastName()); studentEl.setAttributeNode(attr); attr = doc.createAttribute(FATHER_NAME); attr.setValue(student.getFatherName()); studentEl.setAttributeNode(attr); attr = doc.createAttribute(GROUP); attr.setValue(student.getGroupNumber()); studentEl.setAttributeNode(attr); for (SocialWork socialWork : student.getSocialWork()) { Element firstname = doc.createElement(SOCAIL_WORK); firstname.appendChild(doc.createTextNode(socialWork.getWork())); studentEl.appendChild(firstname); } TransformerFactory transformerFactory = TransformerFactory.newInstance(); Transformer transformer = transformerFactory.newTransformer(); DOMSource source = new DOMSource(doc); StreamResult result = new StreamResult(new File(saveName + "." + EXTENSION)); transformer.transform(source, result); } server.log("Save "+saveName+"\n"); } catch (ParserConfigurationException | TransformerException pce) { server.log("Can not save "+saveName+"\n"); JOptionPane.showMessageDialog(null, "Не удалось сохранить файл", "Ошибка", JOptionPane.ERROR_MESSAGE | JOptionPane.OK_OPTION); } } void openXMLFile(String fileName) { try { SAXParserFactory factory = SAXParserFactory.newInstance(); SAXParser saxParser = factory.newSAXParser(); tableModel.getStudents().clear(); DefaultHandler handler = new DefaultHandler() { private String lastName = ""; private String firstName = ""; private String fatherName = ""; private String group = ""; private List<SocialWork> socialWork = new ArrayList<SocialWork>(); boolean isSocialWork = false; @Override public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException { if (qName.equalsIgnoreCase(STUDENT)) { lastName=attributes.getValue(LAST_NAME); firstName=attributes.getValue(FIRST_NAME); fatherName=attributes.getValue(FATHER_NAME); group=attributes.getValue(GROUP); } else if (qName.equalsIgnoreCase(SOCAIL_WORK)) { isSocialWork = true; } } @Override public void endElement(String uri, String localName, String qName) throws SAXException { if(socialWork.size()==tableModel.SEMESTER_NUMBER){ tableModel.getStudents().add(new Student(lastName, firstName, fatherName, group, new ArrayList<SocialWork>(socialWork) )); socialWork.clear(); } } @Override public void characters(char ch[], int start, int length) throws SAXException { if (isSocialWork) { socialWork.add(new SocialWork(new String(ch, start, length))); isSocialWork = false; } } }; saxParser.parse(new File(fileName+"."+EXTENSION), handler); server.log("Open "+fileName+"\n"); } catch (Exception e) { server.log("Can not open "+fileName+"\n"); e.printStackTrace(); } } }
a7c0c977f4c219968588bda416f21cc120bc2080
[ "Java" ]
8
Java
FULLXOACINC/TableStudentsEditorClientServer
52dcdb87bf2a43468e0a364684847b4a800f599a
879389d3bdff5231407ea97dae8e9c6041ee01f4
refs/heads/master
<file_sep>SELENIUM_BROWSER=firefox node $1 if [ $? -eq 0 ] then echo sim else echo nao fi
72a76143792c26e06ea5e5a9babb2eac3272b806
[ "Shell" ]
1
Shell
rodrigovilar/experiment
60519dfd13d4d752106fe40e9d4b55222a5dff8a
e1dee9fc03f37c2009f28a66e3eed7d43d80b68c
refs/heads/master
<file_sep>package org.ks.kotlin.samples.base.bytecode data class DataClass(val normalProperty: String, val propertyWithDefault: Int = 42)<file_sep># kotlin-samples # Overview This project was created for presentation purposes. For more examples see http://kotlinlang.org/docs/reference/coding-conventions.html ## Base syntax * Bytecode * Kotlin class - declarations etc. * Delegates * StringBuilder * Lambda ## M10 * Declaration - site variance * Generic * Inline package function - reified type ## M11 * Multiple constructors - super(), this() * Init - initializer block * Companion - class object has been deprecated * Labeled Returns in Lambdas * Reflection - properties of class and references ## Dependencies * kotlin-fluent-assertions - needs to be compiled into local repository <file_sep>package org.ks.kotlin.samples.m10 import cz.krlst.kotlin.fluentassertions.core.should import org.junit.Test public class GenericBoxTest { Test fun genericTypeShouldBeSpecifiedExplicitly() { GenericBox<Int>(1) .value.should().be(1) } Test fun genericTypeShouldBeInferred() { GenericBox(1) .value.should().be(1) } }<file_sep>package org.ks.kotlin.samples import cz.krlst.kotlin.fluentassertions.core.should import org.junit.Test public class LabeledReturnsTest { Test fun labelForMapExpressionIsImplicit() { val listOfInts = listOf(1, -10, 100, -1000) val listOfStrings = listOfInts.map { i -> if (i < 0) return@map -1 //... i * 2 } listOfStrings[0].should().be(2) listOfStrings[1].should().be(-1) listOfStrings[2].should().be(200) listOfStrings[3].should().be(-1) } Test fun canReturnOnOuterFunction() { class Vertex(var neighbors: Iterable<Vertex> = listOf()) { } val f = fun reachable(from: Vertex, to: Vertex): Boolean { val visited = hashSetOf<Vertex>() fun dfs(current: Vertex) { // here we return from the outer function: //if (current == to) return@reachable true // And here -- from local function: if (!visited.add(current)) return for (v in current.neighbors) dfs(v) } dfs(from) return false // if dfs() did not return true already } f(Vertex(), Vertex()) .should().beFalsy() } }<file_sep>description = "Module which contains Kotlin basic samples."<file_sep>package org.ks.kotlin.samples.m10 public class DeclarationSiteVariance<in TIn, out TOut> { fun cast(value: TIn) : TOut { return value as TOut } }<file_sep>description = "Module which contains Kotlin samples in M11." dependencies { compile "org.jetbrains.kotlin:kotlin-reflect:$kotlin_version" }<file_sep>package org.ks.kotlin.samples.base.types public class BasicTypes( var double: Double = 64.0, var float: Float = 32F, var long: Long = 64L, var int: Int = 23, var short: Short = 16, var byte: Byte = 8, var char: Char = 'a', var string: String = "AString", var boolean: Boolean = true, var nullableDouble: Double? = 64.0 ) { }<file_sep>package org.ks.kotlin.samples.m10 import cz.krlst.kotlin.fluentassertions.core.should import org.junit.Test public class DeclarationSiteVarianceTest { Test fun castIntToLong() { DeclarationSiteVariance<Float, Long>() .cast(1F) .should().be(1L) } Test fun castLongToInt() { DeclarationSiteVariance<Long, Float>() .cast(1L) .should().be(1F) } }<file_sep>package org.ks.kotlin.samples.base.types import cz.krlst.kotlin.fluentassertions.core.should import org.junit.Before import org.junit.Test import kotlin.properties.Delegates public class BasicTypesTest { private var h: BasicTypes by Delegates.notNull() Before fun before() { h = BasicTypes() } Test fun stringHasTemplates() { val anotherString = "AnAnotherString" h.string.concat(" + $anotherString") .should().be("AString + AnAnotherString") } Test fun stringHasLiterals() { val anotherString = "AnAnotherString" val s = h.string.concat( """ $anotherString """) println(s) //s.should().be("AString\nAnAnotherString") } Test fun numbersHasNotImplicitConversion() { h.float = 1F h.float = 1f // h.Long = h.float // h.Long = 1l } Test fun explicitConversion() { h.long = h.float.toLong() h.byte = 1 h.long = h.byte.toLong() } }<file_sep>package org.ks.kotlin.samples.base.syntax import cz.krlst.kotlin.fluentassertions.core.should import org.junit.Before import org.junit.Test import kotlin.properties.Delegates public class KotlinClassTest { var w: WorkflowDto by Delegates.notNull() Before fun before() { w = WorkflowDto(1L, listOf<ModuleDto>(), listOf<ConnectionDto>()) } Test fun hasWorkflowDto() { val command = CreateWorkflowCommand(w) command.dto.should().be(w) } Test fun dataClassCanBeCopied() { println(w) val w2 = w.copy(w.id, w.modules, w.connections) println(w2) w.should().be(w2) w.equals(w2).should().be(true) w.equals(w2).should().beTruthy() } Test fun dataClassCanBeUsedToMultiDeclarations() { val (id, modules, connections) = w id.should().be(1L) modules.count().should().be(0) connections.count().should().be(0) val (id2) = w id2.should().be(1L) } Test fun extensionPrintsToConsole() { CreateWorkflowCommand(w) .printToConsole() } } <file_sep>package org.ks.kotlin.samples.base.syntax trait Routable { val routingKey: String } trait DtoHolder<out T> { val dto: T // var mutableDto: T } abstract class Command<T>(override var dto: T) : Routable, DtoHolder<T> { } open class CreateWorkflowCommand(var workflow: WorkflowDto? = null): Command<WorkflowDto>(workflow!!) { override val routingKey: String get() = "workflow.command" } class CreateDefaultWorkflowCommand(): CreateWorkflowCommand(WorkflowDto(-1L, listOf(), listOf())) { } internal fun CreateWorkflowCommand.printToConsole(): Unit { println(this) } data class WorkflowDto(var id: Long, var modules: Iterable<ModuleDto>, var connections: Iterable<ConnectionDto>) data class ModuleDto {} data class ConnectionDto {}<file_sep>package org.ks.kotlin.samples.base.syntax import cz.krlst.kotlin.fluentassertions.core.should import org.junit.Test import org.junit.experimental.runners.Enclosed import org.junit.runner.RunWith import kotlin.properties.Delegates import kotlin.test.failsWith RunWith(javaClass<Enclosed>()) public class DelegatesSampleTest { public class LazyDelegatesSampleTest { val lazyProperty: String by Delegates.lazy { "DefaultLazyValue" } Test fun delegatesCanBeSpecifiedAsLocalVariables() { // val lazyProperty: String by Delegates.lazy { "DefaultLazyValue" } } Test fun lazyAddsValueFromLambda() { lazyProperty.should().be("DefaultLazyValue") } } public class NotNullDelegatesSampleTest { // val notNullProperty: String by Delegates.notNull() var notNullProperty: String by Delegates.notNull() Test fun propertyShouldBeInitializedBeforeGetIsNull() { val e = failsWith(javaClass<IllegalStateException>(), { println(notNullProperty) }) println(e) } Test fun propertyInitialization() { notNullProperty = "Value" println(notNullProperty) notNullProperty.should().be("Value") } } }<file_sep>package org.ks.kotlin.samples.m10 import org.junit.Test public class ReifiedTypeParametersTest { Test fun printAllMethodsViaReifiedType() { val arrayOfMethods = methodsOf<String>() println(arrayOfMethods.joinToString("\n")) } Test fun printAllMethodsViaParameter() { val arrayOfMethods = methodsOfOld(javaClass<String>()) println(arrayOfMethods.joinToString("\n")) } }<file_sep>package org.ks.kotlin.samples public class ObjectWithCompanion { // class object { // var value: Int = 0 // // fun plus(increment: Int) { // value += increment // } // } object Singleton { var value: Int = 0 fun plus(increment: Int) { value += increment } } companion object CompanionSingleton { var value: Long = 0L fun plus(increment: Long) { value += increment } } } fun ObjectWithCompanion.CompanionSingleton.minus(decrement: Long) { value -= decrement }<file_sep>package org.ks.kotlin.samples.m10 class GenericBox<T>(t: T) { var value = t }<file_sep>package org.ks.kotlin.samples.base.bytecode trait Trait1 { fun abstractMethod() } trait Trait2 { fun implementedMethod() { println("implementation in trait") } } class MultiClass : Trait1, Trait2 { override fun abstractMethod() { println("implemented abstract method") } }<file_sep>package org.ks.kotlin.samples import kotlin.properties.Delegates public class InitKeyword { var property: String by Delegates.notNull() init { property = "Value" } }<file_sep>description = "Module which contains Kotlin samples in M10."<file_sep>allprojects { apply plugin: 'java' apply plugin: 'kotlin' group = 'org.ks.kotlin.samples' version = '0.0.1.0-SNAPSHOT' } repositories { mavenCentral() } buildscript { ext.kotlin_version = '0.11.91' repositories { mavenCentral() } dependencies { classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version" } } subprojects { repositories { mavenLocal() mavenCentral() } sourceSets { main.java.srcDirs += 'src/main/kotlin' } buildscript { repositories { mavenCentral() } } dependencies { compile "org.jetbrains.kotlin:kotlin-stdlib:$kotlin_version" testCompile 'junit:junit:4.11' testCompile 'cz.krlst.kotlin.fluentassertions:kotlin-fluent-assertions-core:0.0.1.0-SNAPSHOT' } } <file_sep>rootProject.name = 'kotlin-samples' include 'kotlin-samples-m11' include 'kotlin-samples-m10' include 'kotlin-samples-base-syntax' <file_sep>package org.ks.kotlin.samples.base.bytecode fun String.dummize(): String { return "dummy ${this}" } val result = "Kotlin".dummize() <file_sep>package org.ks.kotlin.samples.base.bytecode public class Lambdas { val constructorLambda = Array(5, { i -> 42 * i }) fun inFunction(funParamLambda: (Int) -> String) { funParamLambda.invoke(42) } fun inFunction22(funParamLambda22: (p1: Int, p2: Int, p3: Int, p4: Int, p5: Int, p6: Int, p7: Int, p8: Int, p9: Int, p10: Int, p11: Int, p12: Int, p13: Int, p14: Int, p15: Int, p16: Int, p17: Int, p18: Int, p19: Int, p20: Int, p21: Int, p22: Int) -> String) { // 22 is maximum, when you raise to 23, Kotlin compiler will fail on KotlinBuiltIns.getBuiltInClassByName funParamLambda22.invoke(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22) } }<file_sep>package org.ks.kotlin.samples import cz.krlst.kotlin.fluentassertions.core.should import org.junit.Before import org.junit.Test public class ObjectWithCompanionTest { Before fun before() { ObjectWithCompanion.Singleton.value = 0 ObjectWithCompanion.value = 0L } Test fun addsIncrementToSingletonValue() { ObjectWithCompanion.Singleton.value .should().be(0) ObjectWithCompanion.Singleton.plus(666) ObjectWithCompanion.Singleton.value .should().be(666) ObjectWithCompanion.Singleton.plus(10) ObjectWithCompanion.Singleton.value .should().be(676) } Test fun addIncrementToCompanionSingletonValue() { ObjectWithCompanion.value .should().be(0L) ObjectWithCompanion.plus(10L) ObjectWithCompanion.value .should().be(10L) } Test fun operatesWithCompanionSingletonValue() { ObjectWithCompanion.value .should().be(0L) ObjectWithCompanion.minus(10L) ObjectWithCompanion.plus(666L) ObjectWithCompanion.value .should().be(656L) } }<file_sep>package org.ks.kotlin.samples.reflection public class DummyClass ( var double: Double = 64.0, var float: Float = 32F, var long: Long = 64L, var int: Int = 23, var short: Short = 16, var byte: Byte = 8, var char: Char = 'a', var string: String = "AString", var boolean: Boolean = true, var nullableDouble: Double? = 64.0 ) { var _value: Int? = null fun print(text: String) { println(text) } } var DummyClass.nullableInt: Int? get() { return _value } set(value) { _value = value } val DummyClass.ImutablleNullableInt: Int? get() { return _value }<file_sep>package org.ks.kotlin.samples public open class Base(var property: String) { } public class SingleConstruction(property: String) : Base(property) { } public class MultipleConstruction : Base { constructor() : this("DefaultValue") { } constructor(property: String) : super(property) { } }<file_sep>package org.ks.kotlin.samples.reflection import org.junit.Before import org.junit.Test import kotlin.properties.Delegates public class DummyClassTest { private var dummy: DummyClass by Delegates.notNull() Before fun before() { dummy = DummyClass() } Test fun printProperties() { val r = DummyClass::class println("Properties:") r.properties.forEach { println(StringBuilder { appendln("name: ${it.name}") appendln("value: ${it.get(dummy)}") appendln("type: ${it.javaClass.getSimpleName()}") }) } println("Extension properties:") r.extensionProperties.forEach { println(StringBuilder { appendln("name: ${it.name}") // appendln("value: ${it.get(dummy)}") appendln("type: ${it.javaClass.getSimpleName()}") }) } } Test fun functionHasReference() { fun isOdd(x: Int) = x % 2 != 0 val numbers = listOf(1, 2, 3) println(numbers.filter(::isOdd)) } var x = 1 Test fun propertyHasReference() { // var x = 1 // Unsupported yet println(::x.get(this)) // prints "1" ::x.set(this, 2) println(x) } }<file_sep>package org.ks.kotlin.samples.base.lambda import cz.krlst.kotlin.fluentassertions.core.should import org.junit.Test public class DummyClassTest { Test fun buildsDummyClass() { val c = buildDummyClass({ property = "Value" }) c.property.should().be("Value") } }<file_sep>package org.ks.kotlin.samples.base.syntax import org.junit.Test public class StringBuilderSampleTest { Test fun youCanBuildStringByBuilder() { val builder = StringBuilder { } .append("Some String") .append(" - Second String") println(builder) println(StringBuilder({ appendln("NewBuilder started") }) .appendln(builder) .appendln("NewBuilder started")) } }<file_sep>package org.ks.kotlin.samples import cz.krlst.kotlin.fluentassertions.core.should import org.junit.Before import org.junit.Test import kotlin.properties.Delegates public class InitKeywordTest { private var initKeyword: InitKeyword by Delegates.notNull() Before fun before() { initKeyword = InitKeyword() } Test fun hasInitializedProperty() { initKeyword.property.should().be("Value") } }<file_sep>package org.ks.kotlin.samples.base.lambda public class DummyClass { var property: String = "AValue" } fun buildDummyClass(init: DummyClass.() -> Unit): DummyClass { val l = DummyClass() l.init() return l }<file_sep>package org.ks.kotlin.samples.m10 inline fun methodsOf<reified T>() = javaClass<T>().getMethods() fun methodsOfOld(clazz: Class<*>) = clazz.getMethods() <file_sep>package org.ks.kotlin.samples import cz.krlst.kotlin.fluentassertions.core.should import org.junit.Test import org.junit.experimental.runners.Enclosed import org.junit.runner.RunWith RunWith(javaClass<Enclosed>()) public class ConstructionTest { public class SingleConstructionTest { Test fun propertyHasAssignedValue() { SingleConstruction("Value").property .should().be("Value") } } public class MultipleConstructionTest { Test fun propertyHasDefaultValue() { MultipleConstruction().property .should().be("DefaultValue") } Test fun propertyHasAssignedValue() { MultipleConstruction("Value").property .should().be("Value") } } }
91c20d5b899db43a93acde06351d297f4f467730
[ "Markdown", "Kotlin", "Gradle" ]
33
Kotlin
karelsteinmetz/kotlin-samples
e3c6a2808fcf5f7e4031415aac1863c665a5d13f
b33f49e9c31d6ba9dcdae33ac56dd505bece6d85
refs/heads/main
<file_sep>import React, { Component } from "react"; import Navbar from "./components/Navbar/Navbar"; import Users from "./components/users/Users"; import Search from "./components/users/Search"; import axios from "axios"; import "./App.css"; // import Api from "./music.json"; class App extends Component { state = { users: [], loading: false, }; searchUser = async (e) => { this.setState({ loading: true }); //const res = await axios.get(`./music.json`); const res = await axios.get(`https://api.github.com/search/users?q=${e}`); //console.log(res.data[0]); this.setState({ users: res.data.items, loading: false }); }; clearUsers = () => { this.setState({ users: [], loading: false }); }; render() { return ( <> <Navbar /> <div className="container"> <Search searchUser={this.searchUser} clearUsers={this.clearUsers} showClear={this.state.users.length > 0 ? true : false} /> <Users users={this.state.users} loading={this.state.loading} /> </div> </> ); } } export default App; <file_sep>import React from "react"; import { css } from "@emotion/core"; import MoonLoader from "react-spinners/MoonLoader"; const override = css` display: block; margin: auto; `; const Spiner = () => ( <> <MoonLoader css={override} size={100} color={"#D42C43"} /> </> ); export default Spiner; <file_sep>import React from "react"; import MusicNoteIcon from "@material-ui/icons/MusicNote"; const Navbar = () => { return ( <nav className="navbar bg-primary"> <MusicNoteIcon /> <h1 className="navtitle">Music Finder</h1> </nav> ); }; export default Navbar;
192cc157931dd424a28b4130256694e8b6ee6b8f
[ "JavaScript" ]
3
JavaScript
gautrohit/MusicFinder
26503f6e9184b6efb7920c01eda8dd0ff46e0019
fc798f59746cff8b3f3507355b99883ab6a443bc
refs/heads/master
<repo_name>phucpnt/min<file_sep>/js/statistics.js const settings = require('util/settings/settings.js') const statistics = { upload: function () { if (settings.get('collectUsageStats') === false) { return } fetch('https://services.minbrowser.org/stats/v1/collect', { method: 'post', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ clientID: settings.get('clientID'), installTime: settings.get('installTime'), os: process.platform, lang: navigator.language, appVersion: window.globalArgs['app-version'] }) }) .catch(e => console.warn('failed to send usage statistics', e)) }, initialize: function () { setTimeout(statistics.upload, 10000) setInterval(statistics.upload, 24 * 60 * 60 * 1000) settings.listen('collectUsageStats', function (value) { if (value === false) { // disabling stats collection should reset client ID settings.set('clientID', undefined) } else if (!settings.get('clientID')) { settings.set('clientID', Math.random().toString().slice(2)) } }) if (!settings.get('installTime')) { // round install time to nearest hour to reduce uniqueness const roundingFactor = 60 * 60 * 1000 settings.set('installTime', Math.floor(Date.now() / roundingFactor) * roundingFactor) } } } module.exports = statistics
986526a33aa0f9118298a6f511d0ee125b9139b6
[ "JavaScript" ]
1
JavaScript
phucpnt/min
091d3e6714e14a2a6995ce86a4e17f2a2d0ce0e6
ce69ec3debdcafd2157adac6f4a6b14ea69ff71f
refs/heads/main
<repo_name>ramya160/training<file_sep>/readtext.c #include<stdio.h> #include<sys/types.h> #include<unistd.h> struct employee { int empid; char empname[20]; }obj1[10]; //struct employee obj2[10]; int main() { int i; FILE *fp; fp=fopen("employeedb","rb"); for(i=1;i<3;i++) { fread(&obj1[i],sizeof(obj1),1,fp); } for(i=1;i<3;i++) { printf("%d %s",obj1[i].empid,obj1[i].empname); } fclose(fp); } <file_sep>/writebinary3.c #include<stdio.h> #include<sys/types.h> #include<unistd.h> struct employee { int empid; char empname[20]; }obj1[10]; int main() { int i; FILE *fp; fp=fopen("employeedb","wb"); for(i=1;i<3;i++) { scanf("%d %s",&obj1[i].empid,&obj1[i].empname); } for(i=1;i<3;i++) { fwrite(&obj1[i],sizeof(obj1),1,fp); } printf("written successfully"); } <file_sep>/pipefork.c #include <stdio.h> #include <sys/types.h> #include <unistd.h> int main() { int pid=0; int fds[2]; int fds1[2]; char data[10]; char data1[10]; if(pipe(fds)<0) { printf("pipe error"); } if(pipe(fds1)<0) { printf("pipe error"); } if((pid = fork())<0) { printf("error"); } if(pid == 0) { sleep(5); read(fds[0],data,5); printf("%s",data); write(fds1[1],"hai",5); } if(pid>0) { write(fds[1],"hello",5); sleep(5); read(fds1[0],data1,5); printf("%s",data1); } }
7733242746155c1ce1beaab01718856f190dae61
[ "C" ]
3
C
ramya160/training
89f9773d57a44c94e722b7e1ee85f457cf453e40
310fed0943cef1cb1afd6e13c34f67c43934ec6f
refs/heads/master
<repo_name>andrescuello7/python-utn<file_sep>/index.py #llamado a lel framework Flask from flask import Flask, render_template #importar Flask app = Flask(__name__) #rutas de la pagina @app.route('/') def home(): return render_template('index.html') #servidor en funcionamiento if __name__ == '__main__': app.run(port = 5000, debug = True)<file_sep>/requirements.txt airdrop-ng==1.1 airgraph-ng==1.1 appdirs==1.4.4 arandr==0.1.10 backports.entry-points-selectable==1.1.0 bson==0.5.10 btrfsutil==5.13.1 CacheControl==0.12.6 cairocffi==1.2.0 cffi==1.14.6 chardet==4.0.0 click==8.0.1 colorama==0.4.4 contextlib2==0.6.0.post1 cryptography==3.4.8 distlib==0.3.2 distro==1.5.0 filelock==3.0.12 Flask==2.0.1 Flask-Cors==3.0.10 Flask-MySQLdb==0.2.0 Flask-PyMongo==2.3.0 gufw==21.4.0 gunicorn==20.1.0 html5lib==1.1 idna==3.2 itsdangerous==2.0.1 Jinja2==3.0.1 kazam==1.4.5 lightdm-gtk-greeter-settings==1.2.2 MarkupSafe==2.0.1 more-itertools==8.8.0 msgpack==1.0.2 mysql-connector-python==8.0.26 mysqlclient==2.0.3 openshot-qt==2.5.1 ordered-set==4.0.2 packaging==20.9 pep517==0.11.0 platformdirs==2.2.0 ply==3.11 progress==1.5 protobuf==3.17.3 pycairo==1.20.0 pycparser==2.20 pycritty==0.3.5 PyGObject==3.40.1 pymongo==3.12.0 pyOpenSSL==20.0.1 pyparsing==2.4.7 PyQt5==5.15.4 PyQt5-sip==12.9.0 python-dateutil==2.8.2 python-distutils-extra==2.39 pytube==11.0.0 pyxdg==0.27 PyYAML==5.4.1 pyzmq==22.2.1 qtile==0.18.1.dev0+g8e7ecc0a.d20210719 Reflector==2021.7.8.19.52.52 requests==2.26.0 resolvelib==0.5.5 retrying==1.3.3 six==1.16.0 team==1.0 toml==0.10.2 tomli==1.2.1 ufw==0.36 urllib3==1.26.6 virtualenv==20.7.2 webencodings==0.5.1 Werkzeug==2.0.1 xcffib==0.11.1
c0e2fdc100c2cf4075f427348ba27b81d1918fd3
[ "Python", "Text" ]
2
Python
andrescuello7/python-utn
192e167152b3d933b8c23a6a413366579e59e400
3a9e14167785cd8cffedca4315739a1c862deb9a
refs/heads/master
<repo_name>VictorPimentel06/Zendesk<file_sep>/main.py # Modulos Basicos import env import datetime import requests import os import json import pandas as pd import time from pandas.io.json import json_normalize # Dev Modules from utils import RedShift, clean, Initialize, New_columns, Upload_Redshift class Zendesk_support(RedShift): def __init__(self, fecha = None, tipo = "complete", table = None, ids = None): super().__init__() self.tipo = tipo self.fecha = fecha self.search_url = "https://runahr.zendesk.com/api/v2/search.json?query=" self.incremental_users = "https://runahr.zendesk.com/api/v2/incremental/users.json" self.ids = ids if table == "tickets": try: self.__tickets_extract() except: print("Errores en la extraccion de tickets") exit() if table == "users": if self.tipo == "complete": try: self.__users_extract_complete() except: print("Errores en la extraccion de usuarios") exit() if self.tipo == "partial": self.__users_extract_partial() if table == "orgs": if self.tipo == "complete": try: self.__orgs_extract() except: print("Errores en la extraccion de organizaciones") exit() else: self.__orgs_extract_partial() if table == "field_history": if self.tipo == "complete": try: self.__extract_tickets_audits() except: print("Errores en la extraccion de field_history") exit() else: self.__extract_tickets_audits_partial(ids= self.ids ) if table == "field_options": try: self.__extract_field_options() except: print("Errores en la extraccion de Field Options") exit() if table == "tag_history": if self.tipo == "complete": try: self.__extract_tag_history() except: print("Errores en la Extraccion de Tag History") exit() else: self.__extract_tag_history_partial(fecha = None, ids = self.ids) if table == "groups": try: self.extract_groups() except: print("Errores en la extraccion de grupos") exit() if table == "group_members": try: self.extract_group_members() except: print("Errores en la extraccion de miembros de grupos") exit() if table == "comments": if self.tipo == "complete": try: self.__extract_comments() except: print("Errores en la extraccion de Comentarios") else: self.__extract_comments_partial(ids = self.ids) def __tickets_extract(self): """ Extraccion completa de Tickets a traves del endpoint incremental. Retoma los valores pasados como argumentos en la instancia. Fecha: Valor en timestamp desde la cual se hara la extraccion. tipo : - complete: Extraccion de la totalidad de los tickets desde el primero de enero de 2018. - partial: se tomara el valor de la fecha de entrada para hacer la extraccion. """ self.incremental = "https://runahr.zendesk.com/api/v2/incremental/tickets.json" def extract_custom_fields(): self.custom_fields_url = "https://runahr.zendesk.com/api/v2/ticket_fields.json" respuesta = requests.get(self.custom_fields_url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) data = respuesta.json() fields = data["ticket_fields"] dic = {} for i in fields: dic.update({i["id"]: { "Name":i["raw_title_in_portal"], "Description":i["description"], "Raw Description": i["raw_description"], "Created_at": i["created_at"], "removable": i["removable"] # if removable == False entonces es uncampo de sistema. } }) return dic def add_field(tickets_table): dic = {} for ticket, fields in zip(tickets_table.id, tickets_table.custom_fields): for field in fields: if ticket not in dic: dic.update( {ticket: { field["id"]: { "value": field["value"], "name":self.dic_fields[field["id"]]["Name"] } } }) else: dic[ticket].update( { field["id"]: { "value": field["value"], "name":self.dic_fields[field["id"]]["Name"] } } ) tabla = pd.DataFrame.from_dict(dic).T for column in tabla.columns: nombre = "Custom_" + str(tabla[column].iloc[0]["name"]) tabla = tabla.rename(columns = {column: nombre}) aux = [] for record in tabla[nombre]: aux.append(record["value"]) tabla[nombre] = aux tabla.reset_index(inplace= True) tabla = tabla.rename(columns = {"index": "ticket_id"}) tabla = tabla.merge(tickets_table, left_on = "ticket_id", right_on= "id") return tabla # Creacion de tabla catalogo de los custom fields siempre se actualizara por completo tickets = [] if self.tipo == "complete": fecha = int(datetime.datetime.strptime("2018-01-01","%Y-%m-%d").timestamp()) response = requests.get(self.incremental + str("?start_time=")+ str(fecha), auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) if response.status_code != 200: print("Error en la extraccion. CodeError: "+ str(response.status_code)) data = response.json() tickets.extend(data['tickets']) url = data['next_page'] if self.tipo == "partial": fecha = int(datetime.datetime.strptime(self.fecha, "%Y-%m-%d").timestamp()) url = self.incremental + str("?start_time=")+ str(fecha) response = requests.get(url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) if response.status_code != 200: print("Error en la extraccion. CodeError: "+ str(response.status_code)) data = response.json() tickets.extend(data['tickets']) url = data['next_page'] while url: response = requests.get(url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) time.sleep(4) if response.status_code != 200: print("Error en la extraccion. CodeError: "+ str(response.status_code)) data = response.json() tickets.extend(data['tickets']) if url == data['next_page']: break print("Numero de tickets extraidos: {}".format(len(tickets))) url = data["next_page"] tabla = pd.io.json.json_normalize(tickets) self.dic_fields = extract_custom_fields() tabla = add_field(tabla) tabla = clean.fix_columns(tabla) self.tickets_table = tabla print("Comienza Extraccion de Custom Fields") tabla = [] for i in self.dic_fields.keys(): aux = self.dic_fields[i] tabla.append(aux) tabla = pd.DataFrame(tabla) tabla["id"] = [i for i in self.dic_fields.keys()] tabla = clean.fix_columns(tabla) self.fields_table = tabla if self.tipo == "complete": Initialize("custom_fields", self.engine) New_columns(tabla, "custom_fields", self.engine) Upload_Redshift(tabla,"custom_fields", "zendesk_support","zendesk-runahr",self.engine) if self.tipo == "partial": New_columns(tabla, "custom_fields", self.engine) Upload_Redshift(tabla,"custom_fields", "zendesk_support","zendesk-runahr",self.engine) print("Termino extraccion de custom fields") def Tickets(self): tabla = self.tickets_table column_list = clean.column_list(self.tickets_table) tabla = tabla.drop([str(i) for i in column_list], axis = 1) if self.tipo == "complete": # Borra la tabla anterior e inicializa una nueva con solo un ID, posteriormente comprueba las nuevas columnas # para insertarlas. Initialize("tickets", self.engine) New_columns(tabla, "tickets", self.engine) Upload_Redshift(tabla,"tickets", "zendesk_support","zendesk-runahr",self.engine) if self.tipo == "partial": New_columns(tabla, "tickets", self.engine) Upload_Redshift(tabla,"tickets", "zendesk_support","zendesk-runahr",self.engine) def Tickets_tags(self): final_table = pd.DataFrame() column_list = clean.column_list(self.tickets_table) tabla = self.tickets_table[column_list + ["id"]] tabla = pd.concat([pd.DataFrame({"tags":tabla['tags']}),pd.DataFrame({"id":tabla['id']})], axis = 1) for index, row in tabla.iterrows(): id = row['id'] tags = row['tags'] inter_table = pd.DataFrame(tags, columns= ['tags']) inter_table['id'] = [id for i in range(len(inter_table)) ] final_table = final_table.append(inter_table) if int(index) % 1000 == 0: print("tags del ticket: " + str(index)) final_table.reset_index(inplace= True,drop= True ) if self.tipo == "complete": # Borra la tabla anterior e inicializa una nueva con solo un ID, posteriormente comprueba las nuevas columnas # para insertarlas. Initialize("tickets_tags", self.engine) New_columns(final_table, "tickets_tags", self.engine) Upload_Redshift(final_table,"tickets_tags", "zendesk_support","zendesk-runahr",self.engine) if self.tipo == "partial": New_columns(final_table, "tickets_tags", self.engine) Upload_Redshift(final_table,"tickets_tags", "zendesk_support","zendesk-runahr",self.engine) def __users_extract_complete(self): users = [] self.users_url = "https://runahr.zendesk.com/api/v2/users.json" response = requests.get(self.users_url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) data = response.json() users.extend(data['users']) url = data['next_page'] while url: response = requests.get(url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) if response.status_code != 200: print("Error en la extraccion. CodeError: "+ str(response.status_code)) data = response.json() users.extend(data['users']) if url == data['next_page']: break print("Numero de usuarios extraidos: {}".format(len(users))) url = data["next_page"] tabla = pd.io.json.json_normalize(users) tabla = clean.fix_columns(tabla) self.users_table = tabla def __users_extract_partial(self): """ No se puede usar el incremental export porque todos los usuarios estan siendo updated conforme a sus valores en Salesforce. Para utilizar el enpoint incremental sera necesario modificar el flujo de trabajo de la actualizacion del user type para que solo actualice aquellos usuarios que tengan algun cambio en sus valores. """ users =[] # fecha = int(datetime.datetime.strptime(self.fecha, "%Y-%m-%d").timestamp()) # url = self.incremental_users + str("?start_time=")+ str(fecha) # response = requests.get(url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) query = "type:user created>={}".format(self.fecha) response = requests.get(self.search_url + query, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) data = response.json() users.extend(data["results"]) url = data["next_page"] print(url) while url: response = requests.get(url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) if response.status_code != 200: print("Error en la extraccion. CodeError: "+ str(response.status_code)) data = response.json() users.extend(data['results']) if url == data['next_page']: break print("Numero de usuarios extraidos: {}".format(len(users))) url = data["next_page"] print(url) tabla = pd.io.json.json_normalize(users) tabla = clean.fix_columns(tabla) self.users_table = tabla return self def Users(self): tabla = self.users_table column_list = clean.column_list(self.users_table) tabla = tabla.drop([str(i) for i in column_list], axis = 1) if len(tabla) != 0: if self.tipo == "complete": # Borra la tabla anterior e inicializa una nueva con solo un ID, posteriormente comprueba las nuevas columnas # para insertarlas. Initialize("zendesk_users", self.engine) New_columns(tabla, "zendesk_users", self.engine) Upload_Redshift(tabla,"zendesk_users", "zendesk_support","zendesk-runahr",self.engine) if self.tipo == "partial": New_columns(tabla, "zendesk_users", self.engine) Upload_Redshift(tabla,"zendesk_users", "zendesk_support","zendesk-runahr",self.engine) else: print("No se econtraron registros") def Users_tags(self): tabla = self.users_table print(tabla["tags"]) column_list = clean.column_list(self.users_table) tabla = tabla[column_list + ["id"]] tabla = pd.concat([pd.DataFrame({"tags":tabla['tags']}),pd.DataFrame({"id":tabla['id']})], axis = 1) final_table = pd.DataFrame() for index, row in tabla.iterrows(): if len(row['tags']) == 0: pass else: id = row['id'] tags = row['tags'] inter_table = pd.DataFrame(tags, columns= ['tags']) inter_table['id'] = [id for i in range(len(inter_table)) ] final_table = final_table.append(inter_table) try: final_table.reset_index(inplace= True,drop= True ) except: pass if len(final_table) == 0: self.users_tags = final_table else: if self.tipo == "complete": # Borra la tabla anterior e inicializa una nueva con solo un ID, posteriormente comprueba las nuevas columnas # para insertarlas. Initialize("user_tags", self.engine) New_columns(final_table, "user_tags", self.engine) Upload_Redshift(final_table,"user_tags", "zendesk_support","zendesk-runahr",self.engine) if self.tipo == "partial": New_columns(final_table, "user_tags", self.engine) Upload_Redshift(final_table,"user_tags", "zendesk_support","zendesk-runahr",self.engine) self.user_tags =final_table def __orgs_extract(self): """ Se incluye la opcion de count """ self.orgs_url = "https://runahr.zendesk.com/api/v2/organizations.json" response = requests.get(self.orgs_url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) count = response.json()['count'] data = response.json() orgs = [] orgs.extend(data['organizations']) url = data['next_page'] while url: response = requests.get(url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) if response.status_code != 200: print("Error en la extraccion. CodeError: "+ str(response.status_code)) data = response.json() orgs.extend(data['organizations']) if url == data['next_page']: break print("Numero de organizaciones extraidas: {}".format(len(orgs))) url = data["next_page"] tabla = pd.io.json.json_normalize(orgs) tabla = clean.fix_columns(tabla) self.orgs_table = tabla def __orgs_extract_partial(self): query = "type:organization created>={}".format(self.fecha) orgs = [] response = requests.get(self.search_url + query, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) data = response.json() orgs.extend(data["results"]) url = data["next_page"] while url: response = requests.get(url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) if response.status_code != 200: print("Error en la extraccion. CodeError: "+ str(response.status_code)) data = response.json() orgs.extend(data['results']) if url == data['next_page']: break print("Numero de organizaciones extraidas: {}".format(len(orgs))) url = data["next_page"] tabla = pd.io.json.json_normalize(orgs) tabla = clean.fix_columns(tabla) self.orgs_table = tabla return self def Orgs(self): if len(self.orgs_table) == 0: print("No se encontraron registros nuevos de organizaciones") else: tabla = self.orgs_table column_list = clean.column_list(self.orgs_table) tabla = tabla.drop([str(i) for i in column_list], axis = 1) if self.tipo == "complete": # Borra la tabla anterior e inicializa una nueva con solo un ID, posteriormente comprueba las nuevas columnas # para insertarlas. Initialize("orgs", self.engine) New_columns(tabla, "orgs", self.engine) Upload_Redshift(tabla,"orgs", "zendesk_support","zendesk-runahr",self.engine) if self.tipo == "partial": New_columns(tabla, "orgs", self.engine) Upload_Redshift(tabla,"orgs", "zendesk_support","zendesk-runahr",self.engine) def Orgs_tags(self): if len(self.orgs_table) == 0: print("No se encontraron registros nuevos de organizaciones") else: tabla = self.orgs_table column_list = clean.column_list(self.orgs_table) tabla = tabla[column_list + ["id"]] tabla = pd.concat([pd.DataFrame({"tags":tabla['tags']}),pd.DataFrame({"id":tabla['id']})], axis = 1) final_table = pd.DataFrame() for index, row in tabla.iterrows(): if len(row['tags']) == 0: pass else: id = row['id'] tags = row['tags'] inter_table = pd.DataFrame(tags, columns= ['tags']) inter_table['id'] = [id for i in range(len(inter_table)) ] final_table = final_table.append(inter_table) try: final_table.reset_index(inplace= True,drop= True ) except: pass if len(final_table) == 0: self.orgs_tags = final_table else: if self.tipo == "complete": # Borra la tabla anterior e inicializa una nueva con solo un ID, posteriormente comprueba las nuevas columnas # para insertarlas. Initialize("orgs_tags", self.engine) New_columns(final_table, "orgs_tags", self.engine) Upload_Redshift(final_table,"orgs_tags", "zendesk_support","zendesk-runahr",self.engine) if self.tipo == "partial": New_columns(final_table, "orgs_tags", self.engine) Upload_Redshift(final_table,"orgs_tags", "zendesk_support","zendesk-runahr",self.engine) self.orgs_tags =final_table def Orgs_domains(self): if len(self.orgs_table) == 0: print("No se encontraron registros nuevos de organizaciones") else: tabla = self.orgs_table tabla = tabla[["domain_names","id"]] final_table = pd.DataFrame() for index, row in tabla.iterrows(): if len(row['domain_names']) == 0: pass else: id = row['id'] tags = row['domain_names'] inter_table = pd.DataFrame(tags, columns= ['domain_names']) inter_table['id'] = [id for i in range(len(inter_table)) ] final_table = final_table.append(inter_table) try: final_table.reset_index(inplace= True,drop= True ) except: pass if len(final_table) == 0: self.orgs_table = final_table else: if self.tipo == "complete": # Borra la tabla anterior e inicializa una nueva con solo un ID, posteriormente comprueba las nuevas columnas # para insertarlas. Initialize("orgs_domains", self.engine) New_columns(final_table, "orgs_domains", self.engine) Upload_Redshift(final_table,"orgs_domains", "zendesk_support","zendesk-runahr",self.engine) if self.tipo == "partial": New_columns(final_table, "orgs_domains", self.engine) Upload_Redshift(final_table,"orgs_domains", "zendesk_support","zendesk-runahr",self.engine) self.orgs_domains =final_table return self def __extract_tickets_audits(self): self.audits_url = "https://runahr.zendesk.com/api/v2/ticket_audits.json" response = requests.get(self.audits_url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) data = response.json() next_url = data["before_url"] audits = [] audits.append(data["audits"]) while next_url != None : response = requests.get(next_url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) data = response.json() audits.append(data["audits"]) next_url = data["before_url"] print("Total de Audits extraidos: " + str(len(audits))) dic = [] for request in audits: for audit in request: id = audit["ticket_id"] created_at = audit["created_at"] for evento in audit["events"]: if evento["type"] == "Change" and evento["field_name"] != "tags": dic.append([evento["field_name"], evento["value"], evento["previous_value"], id, created_at]) tabla = pd.DataFrame(dic, columns = ["field_name", "value", "previous_value", "id", "updated_at"]) self.tabla_field_history = tabla def __extract_tickets_audits_partial(self, ids): """ Esta funcion tiene que trabajarse de nuevo, no existe forma de filtrar los audits que han sido creados el dia de hoy, ni por query. En su lugar se hara la extraccion completa de los registros. Se ha optado por utilizar los tickets creados por dia. Sin embargo, esto no es escalabre porque tiene que hacer tantas llamadas como tickets hayan sido creados. """ dic = [] for ticket in ids: url = "https://runahr.zendesk.com/api/v2/tickets/{}/audits.json".format(str(ticket)) response = requests.get(url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) data = response.json() for audit in data["audits"]: id = audit["ticket_id"] created_at = audit["created_at"] for evento in audit["events"]: if evento["type"] == "Change" and evento["field_name"] != "tags": dic.append([evento["field_name"], evento["value"], evento["previous_value"], id, created_at]) tabla = pd.DataFrame(dic, columns = ["field_name", "value", "previous_value", "id", "updated_at"]) self.tabla_field_history = tabla def field_history(self): """ La tabla de field_history no incluye a los tickets que esten archivados. """ if self.tipo == "complete": # Borra la tabla anterior e inicializa una nueva con solo un ID, posteriormente comprueba las nuevas columnas # para insertarlas. Initialize("field_history", self.engine) New_columns(self.tabla_field_history, "field_history", self.engine) Upload_Redshift(self.tabla_field_history,"field_history", "zendesk_support","zendesk-runahr",self.engine) if self.tipo == "partial": New_columns(self.tabla_field_history, "field_history", self.engine) Upload_Redshift(self.tabla_field_history,"field_history", "zendesk_support","zendesk-runahr",self.engine) return self def __extract_field_options(self): self.custom_fields_url = "https://runahr.zendesk.com/api/v2/ticket_fields.json" respuesta = requests.get(self.custom_fields_url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) data = respuesta.json() fields = data["ticket_fields"] tabla = [] for field in fields: if "custom_field_options" in field.keys(): data = field["custom_field_options"] for entry in data: tabla.append(entry) tabla = pd.DataFrame(tabla) tabla = clean.fix_columns(tabla) self.table_fields_options = tabla def field_options(self): if self.tipo == "complete": # Borra la tabla anterior e inicializa una nueva con solo un ID, posteriormente comprueba las nuevas columnas # para insertarlas. Initialize("field_option", self.engine) New_columns(self.table_fields_options, "field_option", self.engine) Upload_Redshift(self.table_fields_options,"field_option", "zendesk_support","zendesk-runahr",self.engine) if self.tipo == "partial": New_columns(self.table_fields_options, "field_option", self.engine) Upload_Redshift(self.table_fields_options,"field_option", "zendesk_support","zendesk-runahr",self.engine) def __extract_tag_history(self): self.tag_history_url = "https://runahr.zendesk.com/api/v2/ticket_audits.json" response = requests.get(self.tag_history_url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) data = response.json() next_url = data["before_url"] tags = [] tags.append(data["audits"]) counter = 0 while next_url != None and counter < 1: response = requests.get(next_url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) data = response.json() tags.append(data["audits"]) next_url = data["before_url"] print(len(tags)) #deprecate counter += 1 tabla = pd.DataFrame() for request in tags: for audit in request: id = audit["ticket_id"] created_at = audit["created_at"] for evento in audit["events"]: if evento["type"] == "Change" and evento["field_name"] == "tags": aux = pd.DataFrame(evento["value"]) aux["id"] = id aux["updated"] = created_at tabla = tabla.append(aux) tabla.rename(columns = {0: "tag"}, inplace = True) tabla = tabla.reset_index(drop = True) tabla = clean.fix_columns(tabla) self.tabla_tag_history = tabla def __extract_tag_history_partial(self, fecha, ids): tabla = pd.DataFrame() for ticket in ids: url = "https://runahr.zendesk.com/api/v2/tickets/{}/audits.json".format(str(ticket)) response = requests.get(url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) data = response.json() for audit in data["audits"]: id = audit["ticket_id"] created_at = audit["created_at"] for evento in audit["events"]: if evento["type"] == "Change" and evento["field_name"] == "tags": aux = pd.DataFrame(evento["value"]) aux["id"] = id aux["updated"] = created_at tabla = tabla.append(aux) print("Audists del ticket: ", str(ticket)) tabla.rename(columns = {0: "tag"}, inplace = True) tabla = tabla.reset_index(drop = True) tabla = clean.fix_columns(tabla) self.tabla_tag_history = tabla def tag_history(self): if self.tipo == "complete": # Borra la tabla anterior e inicializa una nueva con solo un ID, posteriormente comprueba las nuevas columnas # para insertarlas. Initialize("tag_history", self.engine) New_columns(self.tabla_tag_history, "tag_history", self.engine) Upload_Redshift(self.tabla_tag_history,"tag_history", "zendesk_support","zendesk-runahr",self.engine) if self.tipo == "partial": New_columns(self.tabla_tag_history, "tag_history", self.engine) Upload_Redshift(self.tabla_tag_history,"tag_history", "zendesk_support","zendesk-runahr",self.engine) def extract_groups(self): self.groups_url = "https://runahr.zendesk.com/api/v2/groups.json" response = requests.get(self.groups_url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) tabla = [] for i in response.json()["groups"]: tabla.append(i) tabla = clean.fix_columns(pd.DataFrame(tabla)) self.table_groups = tabla Initialize("groups", self.engine) New_columns(self.table_groups, "groups", self.engine) Upload_Redshift(self.table_groups,"groups", "zendesk_support","zendesk-runahr",self.engine) return self def extract_group_members(self): self.groups_members_url = "https://runahr.zendesk.com/api/v2/group_memberships.json" response = requests.get(self.groups_members_url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) data = response.json() tabla= [] for user in data["group_memberships"]: tabla.append(user) tabla = clean.fix_columns(pd.DataFrame(tabla)) self.table_groups_members = tabla Initialize("groups_members", self.engine) New_columns(self.table_groups_members, "groups_members", self.engine) Upload_Redshift(self.table_groups_members,"groups_members", "zendesk_support","zendesk-runahr",self.engine) return self def __extract_comments(self): """ Se hace a traves de los ids de los tickets que ya estan creados en Zendesk. Solo se insertaran los datos de body, created_at, id, y si el comentario es publico o no. """ response = self.engine.execute("SELECT id from zendesk.tickets") array = [i[0] for i in response] final_table = pd.DataFrame() for ticket in array: respuesta = requests.get("https://runahr.zendesk.com/api/v2/tickets/{}/comments.json".format(ticket), auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) if respuesta.status_code != 200: print(respuesta.text) continue data = respuesta.json() comments = data["comments"] ticket_comments = pd.DataFrame(comments) ticket_comments = ticket_comments[["body", "created_at", "id", "public"]] ticket_comments["ticket_id"] = ticket final_table = final_table.append(ticket_comments) if len(final_table) % 1000 == 0: print("Tickets extraidos hasta ahora: " + str(len(final_table))) final_table.reset_index(inplace= True,drop= True ) self.Comments_table = final_table def __extract_comments_partial(self, ids): array = [i for i in ids] final_table = pd.DataFrame() for ticket in array: respuesta = requests.get("https://runahr.zendesk.com/api/v2/tickets/{}/comments.json".format(str(ticket)), auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) if respuesta.status_code != 200: print(respuesta.text) continue data = respuesta.json() comments = data["comments"] ticket_comments = pd.DataFrame(comments) ticket_comments = ticket_comments[["body", "created_at", "id", "public"]] ticket_comments["ticket_id"] = ticket final_table = final_table.append(ticket_comments) print("Comentarios del ticket:", str(ticket)) if len(final_table) % 1000 == 0: print("Tickets extraidos hasta ahora: " + str(len(final_table))) final_table.reset_index(inplace= True,drop= True ) self.Comments_table = final_table def Comments(self): if self.tipo == "complete": # Borra la tabla anterior e inicializa una nueva con solo un ID, posteriormente comprueba las nuevas columnas # para insertarlas. Initialize("ticket_comments", self.engine) New_columns(self.Comments_table, "ticket_comments", self.engine) Upload_Redshift(self.Comments_table,"ticket_comments", "zendesk_support","zendesk-runahr",self.engine) if self.tipo == "partial": New_columns(self.Comments_table, "ticket_comments", self.engine) Upload_Redshift(self.Comments_table,"ticket_comments", "zendesk_support","zendesk-runahr",self.engine) return self if __name__ == "__main__": instancia = Zendesk_support(table = "tickets", tipo = "complete", fecha = "2020-06-01") instancia.Tickets() instancia.Tickets_tags()<file_sep>/utils.py from sqlalchemy import create_engine from io import StringIO import os import boto3 class RedShift(): def __init__(self, schema = "zendesk"): self.engine = create_engine("postgresql+psycopg2://{user}:{contr}@{host}:{port}/{base}".format(user = os.environ['REDSHIFT_USER'], contr = os.environ['REDSHIFT_PASSWORD'], base = os.environ['REDSHIFT_DATABASE'], host = os.environ['REDSHIFT_HOST'], port = os.environ['REDSHIFT_PORT']), connect_args = {'sslmode': 'prefer','options': '-csearch_path={}'.format(schema)}, echo = False, encoding = 'utf8', pool_pre_ping=True) class Upload_Redshift() : def __init__(self, Dataframe, name, carpeta, bucket, engine) : """ Clase que recibe como argumento un dataframe object de pandas - Dataframe: Dataframe de pandas - Folder: carpeta dentro de bucket - Name: nombre del documento. Este debe tener extension .csv ejemplo: "prueba.csv" - bucket: bucket de AWS - engine: objeto de sqlalchmey desde create_engine. """ s3 = boto3.client("s3", aws_access_key_id = os.environ["AWS_KEY"], aws_secret_access_key = os.environ["AWS_SECRET_KEY"]) csv_buffer = StringIO() Dataframe.to_csv(csv_buffer, index = False) folder = str(carpeta) + "/" +str(name)+'.csv' s3.put_object(Bucket = bucket, Key = folder, Body = csv_buffer.getvalue()) cols = Dataframe.columns engine.execute("COPY {schema}.{table} ({cols}) FROM '{s3}' WITH CREDENTIALS 'aws_access_key_id={keyid};aws_secret_access_key={secretid}' CSV IGNOREHEADER 1 EMPTYASNULL;commit;".format(schema = os.environ['REDSHIFT_SCHEMA'], table = name, cols = ', '.join(cols[j] for j in range( len(cols) ) ), s3='s3://{}/{}/{}'.format(os.environ['AWS_BUCKET'], carpeta, name+".csv"), keyid = os.environ['AWS_KEY'], secretid= os.environ['AWS_SECRET_KEY'])) class clean(): @staticmethod def fix_columns(datos) : NOT_SUPPORT_COLUMN_NAMES = ["from", "to", "user", "group", "default", "name", "tag"] columns = datos.columns for col in columns : if col in NOT_SUPPORT_COLUMN_NAMES: col_new = str(col) + "_" col_new = col_new.replace(".","_") else: col_new = col.replace(".", "_") col_new = col_new.replace(" ", "_") datos = datos.rename(columns = {col:col_new}) return datos @staticmethod def column_list(datos): """ Recibe como argumento solo dataframes completos y regresa una lista con las columnas que tienen valores de diccionario. """ lista = [] for column in datos.columns: if type(datos[column].iloc[0]) == list: lista.append(str(column)) else: pass return lista class Initialize() : def __init__(self, name, engine) : engine.execute("DROP TABLE IF EXISTS {} CASCADE".format(name)) engine.execute("CREATE TABLE {}(id character varying (1024) PRIMARY KEY)".format(name)) class New_columns(): def __init__(self, tabla, name, engine): """ Tabla: Dataframe a insertar name : nombre de la tabla en Redshift """ types = { "int" : "bigint", "int64": "bigint", "bool": "boolean", "float64": "double precision", "object": "character varying (65535)" } cols = [column.lower() for column in tabla.columns] try: q = engine.execute("SELECT column_name FROM information_schema.columns WHERE table_name = '{}'".format(name)) self.cols_db = [i[0] for i in q] if len(self.cols_db) == 0: self.cols = self.cols_db else: self.cols = list(set(cols)-set(self.cols_db)) except: self.cols = [] if len(self.cols) != 0: print("Columnas Nuevas: " + str(self.cols)) for j in self.cols: engine.execute("ALTER TABLE {} ADD COLUMN {} {}".format(name, str(j),types[str(tabla[j].dtypes)]))<file_sep>/prueba.py import env import datetime import requests import os import json import pandas as pd import time # Dev Modules from utils import RedShift, clean, Initialize, New_columns, Upload_Redshift class Zendesk_support(RedShift): def __init__(self, fecha = None, tipo = "complete", table = None): """ Extraccion de tickets desde Zendesk Support. """ super().__init__() self.incremental = "https://runahr.zendesk.com/api/v2/incremental/tickets.json" self.tipo = tipo self.fecha = fecha if table == "tickets": try: self.__tickets_extract() except: print("Errores en la extraccion de tickets") exit() def __tickets_extract(self): """ Extraccion completa de Tickets a traves del endpoint incremental. Retoma los valores pasados como argumentos en la instancia. Fecha: Valor en timestamp desde la cual se hara la extraccion. tipo : - complete: Extraccion de la totalidad de los tickets desde el primero de enero de 2018. - partial: se tomara el valor de la fecha de entrada para hacer la extraccion. """ tickets = [] if self.tipo == "complete": fecha = int(datetime.datetime.strptime("2018-01-01","%Y-%m-%d").timestamp()) response = requests.get(self.incremental + str("?start_time=")+ str(fecha), auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) if response.status_code != 200: print("Error en la extraccion. CodeError: "+ str(response.status_code)) data = response.json() tickets.extend(data['tickets']) url = data['next_page'] if self.tipo == "partial": fecha = int(datetime.datetime.strptime(self.fecha, "%Y-%m-%d").timestamp()) url = self.incremental + str("?start_time=")+ str(fecha)+ str("&include=comment_events") response = requests.get(url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) if response.status_code != 200: print("Error en la extraccion. CodeError: "+ str(response.status_code)) data = response.json() tickets.extend(data['tickets']) url = data['next_page'] while url: response = requests.get(url, auth = (os.environ["ZENDESK_USER"], os.environ["ZENDESK_PASSWORD"])) if response.status_code == 429: print(url) print(response.json()) if response.status_code != 200: print("Error en la extraccion. CodeError: "+ str(response.status_code)) data = response.json() tickets.extend(data['tickets']) if url == data['next_page']: break print("Numero de tickets extraidos: {}".format(len(tickets))) url = data["next_page"] tabla = pd.io.json.json_normalize(tickets) tabla = clean.fix_columns(tabla) self.tickets_table = tabla if __name__ == "__main__": orgs = Zendesk_support(table = "comments") orgs.Comments() <file_sep>/requirements.txt boto3==1.12.46 botocore==1.15.46 certifi==2020.4.5.1 chardet==3.0.4 DateTime==4.3 docutils==0.15.2 idna==2.9 jmespath==0.9.5 python-dateutil==2.8.1 pytz==2019.3 requests==2.23.0 s3transfer==0.3.3 six==1.14.0 SQLAlchemy==1.3.16 urllib3==1.25.8 zope.interface==5.1.0 <file_sep>/exe_diaria.py # La extraccion diaria tendra que ser sobre los ids extraidos de los tickets # debido a que la mayoria de los endpoints no permiten filtrado por fecha # Hay que hacer modificacionesl main from main import Zendesk_support import threading from datetime import date, datetime import time fecha = str(date.today()) # fecha = "2020-06-01" """ El valor del search tiene que ser mayor o igual en le query de extraccion """ def Users(fecha): print("Comenzo Extraccion de Usuarios") instancia = Zendesk_support(table = "users", tipo= "partial", fecha = fecha) instancia.Users() print("Termino Extraccion de Usuarios") print() print("Comienza Extraccion de User Tags") instancia.Users_tags() print("Termina Extraccion de User Tags") print() def Tickets(fecha): print("Comenzo Extraccion de Tickets") instancia = Zendesk_support(table = "tickets", tipo = "partial", fecha = fecha) instancia.Tickets() instancia.Tickets_tags() print("Termino Extraccion de Tickets") print() def Organizations(fecha): print("Comenzo Extraccion de Organizaciones") instancia = Zendesk_support(table = "orgs", tipo = "partial", fecha = fecha) instancia.Orgs() instancia.Orgs_tags() instancia.Orgs_domains() print("Termino Extraccion de Organizaciones, tags y domains") def Field_history(fecha, ids): """ Se hara la extraccion completa de los audits por que no tiene forma de filtrarse a traves del query. Debe buscarse una forma adicional de poder extraer estos datos. """ print("Inicia Extraccion de Field History") instancia = Zendesk_support(table = "field_history", ids = ids, tipo = "partial") instancia.field_history() print("Termina Extraccion de Field history") print() def Field_options(): """ La extraccion de field Options se hara completa. """ print("Inicia Extraccion de Field Options") instancia = Zendesk_support(table = "field_options") instancia.field_options() print("Termina Extraccion de Field Options") print() def Tag_history(fecha, ids): """ Mismo problema que Fields History, se tiene que hacer la extraccion completa de audits Es posible que se pueda hacer el merge de esta funcion con la de field_history para poder extraer todo de una sola peticion """ print("Inicia Extraccion de Tag history") instancia = Zendesk_support(table = "tag_history", tipo = "partial", ids = ids) instancia.tag_history() print("Termina Extraccion de Tag History") print() def Groups(): print("Inicia Extraccion de Grupos") Zendesk_support(table = "groups") print("Termina Extraccion de Grupos") print() def Group_Members(): print("Comenzo Extraccion de miembros de grupo") Zendesk_support(table = "group_members") print("Termino extraccion de miembros de grupo") print() def Comments(ids): print("Comienza Extraccion de Comentarios") instancia = Zendesk_support(table= "comments", tipo = "partial", ids = ids) instancia.Comments() print("Termino Extraccion de Commentarios. Al Fin") print() def hilos(): print("Comenzo Extraccion de Tickets") instancia = Zendesk_support(table = "tickets", tipo = "partial", fecha = fecha) instancia.Tickets() instancia.Tickets_tags() print("Termino Extraccion de Tickets") ids = list(instancia.tickets_table["ticket_id"]) users = threading.Thread(target = Users, args= (fecha,)) tickets = threading.Thread(target = Tickets, args= (fecha,)) orgs = threading.Thread(target = Organizations, args = (fecha,)) field_history = threading.Thread(target = Field_history, args=(fecha,ids,)) tag_history = threading.Thread(target = Tag_history, args=(fecha, ids, )) field_options = threading.Thread(target = Field_options) groups = threading.Thread(target = Groups) group_members = threading.Thread(target = Group_Members) comments = threading.Thread(target = Comments, args = (ids, )) users.start() orgs.start() field_history.start() tag_history.start() field_options.start() groups.start() group_members.start() comments.start() if __name__ == "__main__": t1 = datetime.now() print("Hora de Comienzo: ", str(t1)) hilos() t2 = datetime.now() print("Hora de Termino", str(t2)) print("Tiempo Total: ", str(t2 -t1)) <file_sep>/exe_complete.py from main import Zendesk_support import threading def Tickets(): print("Comienza Extraccion de Tickets") instancia = Zendesk_support(table = "tickets", tipo = 'complete') instancia.Tickets() print("Termino Extraccion de Tickets") print() print("Comienza extraccion de Ticket Tags") instancia.Tickets_tags() print("Termina Extraccion de Ticket Tags") print() def Users(): print() print("Comienza Extraccion de Usuarios") instancia = Zendesk_support(table = "users") instancia.Users() print("Termina Extraccion de Usuarios") print() print("Comienza Extraccion de User Tags") instancia.Users_tags() print("Termina Extraccion de User Tags") print() def Orgs(): print() print("Comienza Extraccion de Organizaciones") instancia = Zendesk_support(table = "orgs") instancia.Orgs() print("Termina Extraccion de Organizaciones") print() print("Inicia extraccion de Org tags") instancia.Orgs_tags() print("Termina Extraccion de Org Tags") print() print("Inicia Extracciond e Orgs Domains") instancia.Orgs_domains() print("Termina Extraccion de Org Domains") print() def Field_options(): print("Inicia Extraccion de Field Options") instancia = Zendesk_support(table = "field_options") instancia.field_options() print("Termina Extraccion de Field Options") print() def Field_history(): print("Inicia Extraccion de Field History") instancia = Zendesk_support(table = "field_history") instancia.field_history() print("Termina Extraccion de Field history") print() def Tag_history(): print("Inicia Extraccion de Tag history") instancia = Zendesk_support(table = "tag_history") instancia.tag_history() print("Termina Extraccion de Tag History") print() def Groups(): print("Inicia Extraccion de Grupos") Zendesk_support(table = "groups") print("Termina Extraccion de Grupos") print() def Group_Members(): print("Comenzo Extraccion de miembros de grupo") Zendesk_support(table = "group_members") print("Termino extraccion de miembros de grupo") print() def Comments(): print("Comienza Extraccion de Comentarios") instancia = Zendesk_support(table= "comments") instancia.Comments() print("Termino Extraccion de Commentarios. Al Fin") print() def hilos (): tickets = threading.Thread(target = Tickets) users = threading.Thread(target = Users) orgs = threading.Thread(target = Orgs) field_options = threading.Thread(target= Field_options) field_history = threading.Thread(target = Field_history) tag_history = threading.Thread(target = Tag_history) groups = threading.Thread(target = Groups) group_members = threading.Thread(target = Group_Members) comments = threading.Thread(target = Comments) tickets.start() users.start() orgs.start() field_options.start() field_history.start() tag_history.start() groups.start() group_members.start() comments.start() if __name__ == "__main__": hilos()
e8aae1a7741514ffeb0b527b5a2477f0091982ab
[ "Python", "Text" ]
6
Python
VictorPimentel06/Zendesk
c1fa44394e5f81623b255564783050f28a3e37dc
0912cec5465746b7341f2091b98b42af84b68650
refs/heads/master
<file_sep>import React, { useState, useEffect } from "react"; export const Gallary = () => { const [gallaryData, handleGallaryData] = useState({}) const getGallaryData = async () => { const data = await fetch(`https://api.giphy.com/v1/gifs/trending?api_key=<KEY>&limit=25&rating=g`) const getData = await data.json() return handleGallaryData(getData) } useEffect(() => { return getGallaryData() }, []) return <div className="wrapper"> {gallaryData.data && gallaryData.data.map(gifImage => { const src = gifImage.images.original_mp4.mp4 return <video src={src}> </video> })} </div> }
2f06515def3d0cfd9e3c3478205ac4f7e4a17eef
[ "JavaScript" ]
1
JavaScript
shreyaa1/flickr
ffeaeb44638fc88f5b3464d4928f465231d64a51
440ca7eb9229c6c2027ce40ea776de9103dcc29d
refs/heads/Django
<repo_name>blueweaver/Gruvest<file_sep>/yfinance_parser/parse_yahoo_finance.py ''' Tequila Trader 2020 ''' import yfinance as yf import argparse # Allows us to use command line arguments import threading # multithreading import csv # write to csv files #import sys #import uno #import datetime #pip3 install -e git+git://github.com/vonHacht/yfinance.git@master#egg=yfinance # Function used in multithreading to collect information from yfinance def parse_stock_pages(entities, symbol): proceed = 5 while(proceed > 0): try: stock = yf.Ticker(symbol) # call to yfinance info = stock.info data = [] items.append([ info["longName"], info["symbol"], info["bid"], info["ask"], info["open"], info["dayLow"], info["dayHigh"], info["previousClose"], info["volume"], info["fiftyDayAverage"], info["twoHundredDayAverage"], info["dividendRate"], info["forwardPE"], info["beta"], info["trailingAnnualDividendYield"], info["payoutRatio"], info["fiftyTwoWeekHigh"], info["fiftyTwoWeekLow"], info["marketCap"], info["quoteType"], info["fiveYearAvgDividendYield"], info["enterpriseToRevenue"], info["forwardEps"], info["bookValue"], info["enterpriseToEbitda"], info["enterpriseValue"], info["lastSplitFactor"], info["earningsQuarterlyGrowth"] ]) print( info["longName"], info["symbol"], info["bid"], info["ask"], info["open"], info["dayLow"], info["dayHigh"], info["previousClose"], info["volume"], sep=",") # info["fullTimeEmployees"], ----- Not all stocks have this listed #print(hist) proceed = 0 except exception as e: proceed -= 1 print("ERROR FOR %s: %s" %(symbol, e)) #sys.exc_info() def main(doc): #if doc[-4:] != ".csv": #print("Error: Document is not a CSV File.") #quit() print("Fetching data for %s"%(doc)) #start by loading up symbols from txt file stocksToLoad = open(doc + ".txt", "r") fromStocksToLoad = stocksToLoad.readlines() items = [] fields = [] threads = [] #parse_stock_pages(items, fromStocksToLoad[0][:-1], fields, True) #amtStocks = len(fromStocksToLoad) #for i in range(1, amtStocks): for stock in fromStocksToLoad: #print(stock[:-1]) #stock = fromStocksToLoad[i][:-1] th = threading.Thread(target=parse_stock_pages,args=[items, stock[:-1]]) th.start() threads.append(th) for th in threads: th.join() # Write to CSV File fields = [ 'Name', 'Symbol', 'Bid', 'Ask', 'Open', 'Day Low', 'Day High', 'Previous Close', 'Volume', 'Fifty Day Average', 'Two Hundred Day Average', 'Dividend Rate', 'Forward PE', 'Beta', 'Trailing Annual Dividend Yield', 'Payout Ratio', 'Fifty Two Week High', 'Fifty Two Week Low', 'Market Cap', 'Quote Type', 'Five Year Avg Dividend Yield', 'Enterprise To Revenue', 'Forward Eps', 'Book Value', 'Enterprise To EBITDA', 'Enterprise Value', 'Last Split Factor', 'Earnings Quarterly Growth' ] with open(doc + ".csv", 'w') as csvfile: # creating a csv writer object csvwriter = csv.writer(csvfile) # writing the fields csvwriter.writerow(fields) # writing to file csvwriter.writerows(items) # close documents csvfile.close() stocksToLoad.close() if __name__ == "__main__": argparser = argparse.ArgumentParser() argparser.add_argument('document', help = 'CSV File') args = argparser.parse_args() doc = args.document #if doc[-4:] != ".csv": #print("Error: Document is not a CSV File.") #quit() main() <file_sep>/Gruvest-Django/mysite/myapp/templates/home.html {% extends "base.html" %} {% block content %} <div> {% if user.is_authenticated %} Current Funds: ${{ user.catchermodel.funds }} Subscriptions: {% for itor in subscription %} {{ itor.pitcher }} {% endfor %} {% else %} Current Funds: $0 {% endif %} </div> <a href="/" class="button">Popularity</a> <a href="{% url 'sortedCost' %}" class="button">Cost</a> <a href="{% url 'sortedDate' %}" class="button">Date</a> <p> sorted by: {{sort}} </p> <ul class="pitch-list-column"> {% for pItor in post %} <p class="net-votes">{{pItor.getTotalVotes}}</p> <div class="pitch-list"> <div class="voting-buttons"> <form action="{% url 'upVotePost' pItor.pk %}" method="POST"> {% csrf_token %} <button type="submit" name="post_id" value="{{ pItor.id }}" id="upvote-button" style='font-size:25px;'>&#8593</button> </form> <form action="{% url 'downVotePost' pItor.pk %}" method="POST"> {% csrf_token %} <button type="submit" name="post_id" value="{{ pItor.id }}" id="downvote-button" style='font-size:25px;'>&#8595</button> </form> </div> <div class="abstracted-pitch-info"> <a href="{% url 'pitchDetail' pItor.pk %}" id="pitch-title">{{pItor.header}}</a> <p id="abstracted-pitch-info-cost">Pitcher: {{pItor.author}}</p> <p id="abstracted-pitch-info-cost">Made On: {{pItor.published_on}}</p> <p id="abstracted-pitch-info-cost">Cost: {{pItor.cost}}</p> <form action="{% url 'sub' pItor.id %}" method="POST"> {% csrf_token %} <button type="submit" name="post_id" value="{{ pItor.id }}" id="subscribe-button"> Subscribe </button> </form> </div> <br> </div> {% endfor %} </ul> </div> {% endblock %}<file_sep>/alpha_vantage/demo.py import pandas as pd from alpha_vantage.timeseries import TimeSeries import time apiKey = '<KEY>' ts = TimeSeries(key="apiKey", output_format='pandas') data, meta_data = ts.get_daily(symbol='AAPL', outputsize='full') closeData = data['4. close'] print(closeData) percentChange = closeData.pct_change() print(percentChange) <file_sep>/Gruvest-Django/requirements.txt aiohttp==3.7.3 alpha-vantage==2.2.0 asgiref==3.2.10 astroid==2.4.2 async-timeout==3.0.1 attrs==20.3.0 certifi==2020.12.5 chardet==3.0.4 coverage==5.3 Django==3.1.1 django-chartjs==2.2.1 djangorestframework==3.12.2 idna==2.10 isort==5.6.4 lazy-object-proxy==1.4.3 mccabe==0.6.1 multidict==5.1.0 numpy==1.19.4 pandas==1.1.4 pylint==2.6.0 pylint-django==2.3.0 pylint-plugin-utils==0.6 python-dateutil==2.8.1 pytz==2020.1 requests==2.25.0 six==1.15.0 sqlparse==0.3.1 toml==0.10.1 typing-extensions==3.7.4.3 urllib3==1.26.2 wrapt==1.12.1 yarl==1.6.3 <file_sep>/Gruvest-Django/mysite/myapp/migrations/0001_initial.py # Generated by Django 3.1.1 on 2020-12-07 06:50 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='StocksModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=100)), ('ticker', models.CharField(max_length=10)), ('date', models.DateTimeField(auto_now_add=True)), ('closingPrice', models.DecimalField(decimal_places=2, default=0.0, max_digits=4)), ('percentageChange', models.DecimalField(decimal_places=2, default=0.0, max_digits=3)), ], ), migrations.CreateModel( name='UserModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('header', models.CharField(max_length=100)), ('post', models.CharField(max_length=5000)), ('comparisonSP500', models.DecimalField(decimal_places=2, default=0.0, max_digits=3)), ('comparisonGruvest', models.DecimalField(decimal_places=2, default=0.0, max_digits=3)), ('userFeedback', models.DecimalField(decimal_places=1, default=0.0, max_digits=1)), ('upVotes', models.IntegerField(default=0)), ('downVotes', models.IntegerField(default=0)), ('cost', models.PositiveIntegerField(default=1)), ('published_on', models.DateTimeField(auto_now_add=True)), ('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='UpvoteModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('upvotedPost', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='upvoted_post', to='myapp.usermodel')), ('upvoter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='upvote_user', to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='TrackedStocksModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('description', models.CharField(default='', max_length=100)), ('date', models.DateTimeField(auto_now_add=True)), ('category', models.CharField(choices=[(0, 'BUY'), (1, 'HOLD'), (2, 'SELL')], default=1, max_length=4)), ('data', models.ManyToManyField(to='myapp.StocksModel')), ('pitcher', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='myapp.usermodel')), ], ), migrations.CreateModel( name='SubscribeModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('pitcher', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='pitcher_user', to=settings.AUTH_USER_MODEL)), ('subscriber', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='subcribe_user', to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='PurchaseModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('purchasedPost', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='purchased_post', to='myapp.usermodel')), ('purchaser', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='purchase_user', to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='DownvoteModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('downvotedPost', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='downvoted_post', to='myapp.usermodel')), ('downvoter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='downvote_user', to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='CommentModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('comment', models.CharField(max_length=240)), ('published_on', models.DateTimeField(auto_now_add=True)), ('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='myapp.usermodel')), ], ), migrations.CreateModel( name='CatcherModel', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('funds', models.PositiveIntegerField(default=0, verbose_name='Add amount')), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), ] <file_sep>/Gruvest-Django/mysite/myapp/forms.py from django import forms from django.contrib.auth.forms import UserCreationForm from django.contrib.auth.models import User from . import models def must_be_unique(value): user = User.objects.filter(email=value) if len(user) > 0: raise forms.ValidationError("Email Already in Use") return value # Class used by PitchCreator class PostPitchForm(forms.ModelForm): # meta class class Meta: # model to be used model = models.UserModel # fields to be used fields = [ "header", "post", "cost", ] def save(self, request): post_instance = models.UserModel() post_instance.post = self.cleaned_data["post"] post_instance.author = request.user post_instance.header = self.cleaned_data["header"] post_instance.cost = self.cleaned_data["cost"] post_instance.save() return post_instance # Class used by CommentCreator class PostCommentForm(forms.ModelForm): # meta class class Meta: # model to be used model = models.CommentModel # fields to be used fields = [ "comment", ] def save(self, request, pk): post_instance = models.UserModel.objects.get(id=pk) comment_instance = models.CommentModel() comment_instance.post = post_instance comment_instance.comment = self.cleaned_data["comment"] comment_instance.author = request.user comment_instance.save() return comment_instance class AddFundsForm(forms.ModelForm): # meta class class Meta: # model to be used model = models.CatcherModel # fields to be used fields = [ "funds", ] class RegistrationForm(UserCreationForm): email = forms.EmailField( label="Email", required=True, validators=[must_be_unique] ) class Meta: model = User fields = ("username", "email", "password1", "<PASSWORD>") def save(self, commit=True): user = super().save(commit=False) user.email = self.cleaned_data["email"] if commit: user.save() return user ''' class UpdateStocksForm(forms.ModelForm): class Meta: model = models.TrackedStocksModel fields = [ "percentage" ] '''
f8443969e9cad40b2e4bce9ef9c9fdad5e8d1135
[ "Python", "Text", "HTML" ]
6
Python
blueweaver/Gruvest
41bd7f7cc40df428c8dd5f4c46f67dfaed8b247f
63f6702bf31ad31330f888665f021bea8822d840
refs/heads/master
<file_sep>from tkinter import * import tkinter as tk import shutil_main import shutil_func def load_gui(self): self.btn_one = tk.Button(self.master,width=25,height=1,text='Folder with files', command=lambda:shutil_func.checkdaily(self)) self.btn_one.grid(row=1,column=0,padx=(15,0),pady=(35,10),sticky=W) self.btn_two = tk.Button(self.master,width=25,height=1,text='Folder that will receive files', command=lambda:shutil_func.receivedaily(self)) self.btn_two.grid(row=2,column=0,padx=(15,0),pady=(5,10),sticky=W) self.btn_three = tk.Button(self.master,width=25, height=2,text='Press to File Check', command=lambda:shutil_func.last_mod_time) self.btn_three.grid(row=3,column=0,padx=(15,0),pady=(5,10),sticky=W) self.txt_one = tk.Entry(self.master, width=50, text='') self.txt_one.grid(row=1,column=5,rowspan=1,columnspan=6,padx=(15,0),pady=(30,10),sticky=E) self.txt_two = tk.Entry(self.master, width=50, text='') self.txt_two.grid(row=2,column=5,rowspan=1,columnspan=6,padx=(15,0),pady=(0,10),sticky=E) if __name__ == "__main__": pass <file_sep>import shutil import os import datetime import time import shutil_gui import shutil_main from tkinter import filedialog #set where the source of the files are source = '/Users/AL/Desktop/FolderA/' #set the destination path to folderB dest = '/Users/AL/Desktop/FolderB/' #files = os.listdir(source) SECONDS_IN_DAY = 24 * 60 * 60 now = time.time() before = now - SECONDS_IN_DAY def last_mod_time(fname): return os.path.getmtime(fname) for fname in os.listdir(source): source_fname = os.path.join(source, fname) if last_mod_time(source_fname) > before: dest_fname = os.path.join(dest, fname) shutil.move(source_fname, dest_fname) def checkdaily(self): folder = filedialog.askdirectory() #self.txt_one.delete(0, END) self.txt_one.insert(0,folder) def receivedaily(self): folder = filedialog.askdirectory() #self.txt_two.delete(0, END) self.txt_two.insert(0,folder) <file_sep>from tkinter import * import tkinter as tk from tkinter import messagebox import shutil_gui import shutil_func class ParentWindow(Frame): def __init__(self, master, *args, **kwargs): Frame.__init__(self, master, *args, **kwargs) self.master = master self.master.title("Move Daily Files") self.master.minsize(525,175) #(Height, Width) self.master.maxsize(525,175) shutil_gui.load_gui(self) if __name__ == "__main__": root = tk.Tk() App = ParentWindow(root) root.mainloop()
856baa76a27e0202143daa827e4d21099edacfb5
[ "Python" ]
3
Python
ASKL88/File-Transfer-Assignment
663730dc65b3ff550ef5da3c8c571b6f44cbfc75
1fa5edbea3b8422ce1bdc3b059ae6dbdd74d326d
refs/heads/master
<file_sep>import csv import math from operator import itemgetter from collections import Counter import os #CSV file openning def CsvReader(filename): csvData = [] current_folder = os.path.dirname(os.path.abspath(__file__)) #parent folder path exact_file_path = os.path.join(current_folder, filename) #add filename in path with open(exact_file_path, 'r') as f: #open file reader = csv.reader(f, delimiter=',') for row in reader: csvData.append(row) #adding value in our array return csvData #calcul for the euclidienne's distance betweeen two plant def Distance(dataItem,searchItem): distance = 0 for index in range(min(len(searchItem), len(dataItem))): distance += math.pow(float(dataItem[index])-float(searchItem[index]),2) return math.sqrt(distance) #Search nearest neighbors def SearchNeighbors(dataSet,searchItem,k): distanceArray = [] for itemIndex in range(len(dataSet)): #For each plant distance = Distance(dataSet[itemIndex],searchItem) if(distance > 0.0): #Check for positive value distanceArray.append([distance,dataSet[itemIndex]]) distanceArray = sorted(distanceArray, key=itemgetter(0)) #sort the array for finding the topest one nearestNeighbors = [distanceArray[i][1][-1] for i in range(k)] typeSearchItem = Counter(nearestNeighbors) #count the number or nearest neighbors return typeSearchItem.most_common(1)[0][0] #Search and write result for prediction def FindValue(trainingSet,predictSet,k): dir_path = os.path.dirname(os.path.realpath(__file__)) text_file = open(dir_path+"\\Prediction.txt", "w") for i in range(len(predictSet)): searchItem = predictSet[i] text_file.write(SearchNeighbors(trainingSet,searchItem,k)+"\n") #write the value found in our result file text_file.close() print("The prediction is complete, the result is in the Prediction.txt file") #Evaluation for choosing K def EvaluateK(dataSet,pourcentageTraining,k): trainingSet = dataSet[:int(len(dataSet)*pourcentageTraining)] predictSet = dataSet[int(len(dataSet)*pourcentageTraining):] goodGuess = 0 for itemIndex in range(len(predictSet)): predictLine = predictSet[itemIndex] #add predict value in the array searchItem = predictLine[:len(predictLine)-1] realValue = predictLine[-1] #get the real value preditedValue = SearchNeighbors(trainingSet,searchItem,k) goodGuess += 1 if realValue == preditedValue else 0 #if prediction is correct print("K : "+str(k)+", Percentage of TrainingSet : "+str(pourcentageTraining*100)+"% Percentage of accuracy: " + str(goodGuess/len(predictSet)*100)+"%") if __name__ == '__main__': trainingSet = CsvReader('training.csv') for k in range(1,21): EvaluateK(trainingSet,0.8,k) predictSet = CsvReader('predict.csv') FindValue(trainingSet,predictSet,7) <file_sep># KNN : K-Nearest Neighbors Algorithm KNN can be used for <b>classification</b> : the output is a class membership. An object is classified by a majority vote of its neighbors, with the object being assigned to the class most common among its k nearest neighbors. It can also be used for <b>regression</b> : output is the value for the object (predicts continuous values). This value is the average (or median) of the values of its k nearest neighbors. ## Data set An exemple of data we can process: ![](./Readme_Content/iris.jpg?raw=true "Iris data set") The project has two steps: - We have a first dataset named "training.csv". It has exactly the same structure as the Iris dataset with 4 input variables and a qualitative output variable. the goal is to cut it into two sets: <b>training</b> and <b>testing</b> set. For this, parameters such as the number of lines in each data set and the K must varied. - The second dataset is named "predict.csv". We must use the model to predict the class for each sample of the file. An output file will contain each prediction associated with its line. ## Run the code Now we can run the code ! ```cmd python knn.py ``` It will show us the different k for our parameters: - 80% training set - 20% testing set ![](./Readme_Content/prediction.jpg?raw=true "Iris data set") We found that K=7 has the best percentage of accuracy after running some test. At the end we have an output file with our prediction called "Prediction.txt"
57e3d69465281053547d75a2225ed75829e55b87
[ "Markdown", "Python" ]
2
Python
MarcEtienneDartus/KNN
63b633c7f815103e3e1ec2b802726e98868c08b1
69c21a8df9d1da92dd7bdccf9094e24fcf857fe4
refs/heads/master
<file_sep>#include<stdio.h> int main() { int a=0; printf("输入"); scanf("%d",&a); printf("a=%d",a); printf("\n\n++a=%d",++a); printf("\na=%d",a); printf("\n\na++=%d",a++); printf("\na=%d",a); return 0; } <file_sep>#include<stdio.h> main() {int n,a,q,S; printf("aqn\n"); scanf("%d%d%d",&a,&q,&n); while(n>0) {q*q; n--; } S=a*(1-q); S=S/(1-q); printf("%d\n",S); return 0; } <file_sep>#include<stdio.h> int fat(int n) {int z; if(n==1) z=1; else z=n*fat(n-1); return(z); } main() {int x=0 ,A=0; int fat(int n); printf("请输入数据:"); scanf("%d",&x); if(x<0) printf("输入数据错误"); else if (x==0) printf("1"); else A=fat(x); printf("%d",A); } <file_sep>#include<stdio.h> main() {int a,b,c,x; scanf("%d",&x); a=x%10; b=x/10%10; c=x/100; if (a>0) printf("%d%d%d",a,b,c); else if(a=0,b>0) printf("%d%d",b,c); else printf("%d",c); return (0); }
ab7cddb137b3645f85c8525f893b35f3fc6a75e6
[ "C++" ]
4
C++
Yycxj/c-
2b53e04c285314d1e5c737cfcae2e82d90583c90
089d591be439e7a3b5aa3044f443d7d5da05a47d
refs/heads/master
<repo_name>camrohlof/display-board<file_sep>/events/filter.py import django_filters import datetime now = datetime.datetime.now() class TheFilter(django_filters.FilterSet): time = django_filters.TimeFilter(field_name='time_of_event', lookup_expr='range=(now.hour, (now.hour + 2))') strict = False <file_sep>/events/models.py from django.db import models from datetime import datetime class Event(models.Model): name_of_event = models.CharField(max_length=20) time_of_event = models.TimeField('time of event') location_of_event = models.CharField(max_length=20) time_posted = datetime.now() class Meta: ordering = ["time_of_event"] def __str__(self): return self.name_of_event <file_sep>/events/migrations/0003_auto_20181205_2050.py # Generated by Django 2.1.4 on 2018-12-06 01:50 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('events', '0002_auto_20181205_0128'), ] operations = [ migrations.AlterField( model_name='event', name='location_of_event', field=models.CharField(max_length=20), ), migrations.AlterField( model_name='event', name='name_of_event', field=models.CharField(max_length=20), ), ] <file_sep>/requirements.txt gunicorn Django==2.1.4 psycopg2-binary dj-database-url django-filter==2.0.0 pytz==2018.7 whitenoise==4.1.2 <file_sep>/README.md # display-board <file_sep>/events/views.py from django.shortcuts import render from .models import Event from datetime import * def index(request): event_list = Event.objects.filter(time_of_event__range=(add_time(-1,2)))[:5] context = {'event_list': event_list} return render(request, 'events/index.html', context) def add_time(timeShift1, timeShift2): timeStr = str(datetime.now(tz=None)) now = datetime.strptime(timeStr, "%Y-%m-%d %H:%M:%S.%f").time() firstTime = (datetime.combine(date(1,1,1), now) + timedelta(hours=timeShift1)) secTime = (datetime.combine(date(1,1,1), now) + timedelta(hours=timeShift2)) if firstTime < secTime: return (firstTime, secTime) else: secTime = (datetime.combine(date(1,1,1), now) + timedelta(days=1,hours=timeShift2)) return (firstTime, secTime) <file_sep>/events/migrations/0002_auto_20181205_0128.py # Generated by Django 2.1.4 on 2018-12-05 01:28 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('events', '0001_initial'), ] operations = [ migrations.RenameField( model_name='event', old_name='location_text', new_name='location_of_event', ), migrations.RenameField( model_name='event', old_name='name_text', new_name='name_of_event', ), ]
0bf71da54d8c080c2a9021f9065707d43c276d3b
[ "Markdown", "Python", "Text" ]
7
Python
camrohlof/display-board
335db4f224876f0a47414d3e5aa4a83d7a47304d
0c2ea2aacfa6b1ff3dead2b253e6fb4ad839d3bc
refs/heads/master
<file_sep>INSERT INTO workouts (sunday, monday, tuesday, wednesday, thursday, friday, saturday) VALUES ( 'Leg Extension', 'Dumbbell Bench Press', 'Cardio', 'Pullup', 'Shoulder Press', 'Barbell Bench Press', 'Push ups' ), ( 'Leg Curl', 'Incline BarbellBench Press', 'Reverse Grip Pulldown', 'Dumbbell Lateral Raise', 'Squat', 'Incline Push ups', 'Cable Crossover' ), ( 'Leg press', 'Dumbbell Fly', 'Barbell Upright Row', 'Lat Pulldown', 'Barbell Curl', 'Sit Ups', 'Dumbbell Shrug' ), ( 'Dumbbell Lunge', 'Deadlift', 'Reverse Crunch', 'Cardio', 'Bodyweight Dip', 'Seated Cable Row', 'Calf Raise' ); <file_sep>module.exports = { show404(err, req, res, next) { res.sendStatus(404); }, show406(err, req, res, next) { res.sendStatus(406); }, showWorkouts(req, res) { res.render('workouts/workouts-index', { data:res.locals.workouts, }); }, showOne(req, res) { res.render('workouts/workouts-single', { data:res.locals.workouts, }); }, showAddForm(req, res) { res.render('workouts/workouts-add'); }, showEditform(req, res) { res.render('workouts/workouts-add', { data: res.locals.workout, }); }, handleCreate(req, res) { res.redirect('/workouts'); }, handleUpdate(req, res) { res.redirect(`/workouts/${req.params.id}`); }, handleDelete(req, res) { res.redirect('/workouts'); }, }; <file_sep>// Express library const express = require('express'); const logger = require('morgan'); const bodyParser = require('body-parser'); const path = require('path'); //const methodOverride = require('method-override'); const workoutRouter = require('./routes/workouts'); // Create the port const PORT = process.env.PORT || 3000; // Start up express const app = express(); app.set('views', path.join(__dirname, 'views')); app.set('view engine', 'ejs'); //set up logging app.use(logger('dev')); app.use(bodyParser.urlencoded({ extended: false })); app.use(bodyParser.json()); // Routes app.use('/workouts', workoutRouter); app.use('/workouts-edit', workoutRouter); //app.use(methodOverride('_method')); //app.get('/', (req, res) => res.render('index', { // message: 'Hello you!', // subTitle: 'Welcome to the workout', // workoutAuthors: [] //})); app.get('/', (req, res) => { res.render('workouts'); }); // Set up a listener on PORT // Log the message "Server up and listening on port XXXX" // Log which environment we're in app.listen(PORT, () => { console.log(`Server up and listening on port ${PORT}, in ${app.get('env')} mode.`); }); <file_sep># Project 2 ## Daily workout plan with nutrition facts and gym locator. # App Description This app will allow users to create, update and delete routines for a daily workout plan. There will be different workouts for the seven days in the week. They also will be able to track how many calories they burn per day. I would like to include a gym and food store locator API so the user can search for nearby places. # Tables Create table for differnt days of the week. Add a workout table. Joined table to display on the page. Separate into two tables. # Project 2 wire frames ![project2wb1](https://git.generalassemb.ly/storage/user/7151/files/e53f1264-8d66-11e7-93e3-4a73e5fab169) ![project2wb2](https://git.generalassemb.ly/storage/user/7151/files/f5bcda18-8d66-11e7-9f0b-41e230061284) ![project2wb3](https://git.generalassemb.ly/storage/user/7151/files/02872550-8d67-11e7-821c-6c1ab0e9cc8b) ![project2wb4](https://git.generalassemb.ly/storage/user/7151/files/0c1782a4-8d67-11e7-91ac-0e347d533f9a) # Technologies needed HTML, CSS, Bootstrap, EJS, Express.js, Node.js, Postgres # Issues Using an API for location of gyms and stores to buy food. # Suggestions Add a pricing sheet that displays gym prices and price of foods. # Workout-app <file_sep> const workoutDB = require('../models/workoutDB'); module.exports = { //Create a blank Quote and set it in res.locals makeBlankQuote(req, res) { res.json({ id: null, content: null, author: null, genre_type: null, }); }, // Middleware function: index(req, res, next) { workoutDB.findAll() .then((workouts) => { res.locals.workouts = workouts; next(); }) .catch(err => next(err)); }, // Get a quote from the DB and set it in res.locals getOne(req, res, next) { workoutDB.findById(req.params.id) .then((workouts) => { res.locals.workouts = workouts; next(); }) .catch(err => next(err)); }, // Create Middleware: // Get quote data from the front-end and set it in the DB // Sets the results of the insertion into res.locals.quote create(req, res, next) { workoutDB.save(req.body) .then((workouts) => { res.locals.workouts = workouts; next(); }) .catch(err => next(err)); }, // Update Middleware: // Get quote data from the DB; // Merge the data from the front-end; // Set it in the DB; update(req, res, next) { workoutDB.update(req.body) .then((workouts) => { res.locals.workouts = workouts; next(); }) .catch(err => next(err)); }, // Destroy the quote at this id destroy(req, res, next) { workoutDB.destroy(req.params.id) .then(() => next()) .catch(err => next(err)); }, // Show a blank HTML form showQuoteForm: (req, res) => { res.json({message: 'I’m the HTML form for new quotes. I post to /workouts'}); }, }; <file_sep>\c workouts_db; DROP TABLE IF EXISTS workouts; CREATE TABLE workouts ( id SERIAL PRIMARY KEY, Sunday varchar, Monday varchar(255), Tuesday varchar(255), Wednesday varchar(255), Thursday varchar(255), Friday varchar(255), Saturday varchar(255) ); <file_sep>const pgp = require('pg-promise')(); const dbConfig = require('../db/config'); // execute pgp with our db config, so a connection is made. const db = pgp(dbConfig); // export our collection of functions module.exports = { findAll() { return db.many(` SELECT * FROM workouts ORDER BY id `); }, findById(id) { return db.one(` SELECT * FROM workouts WHERE id = $1 `, id); }, save(workouts) { console.log(workouts); return db.one(` INSERT INTO workouts RETURNING * `, workouts); }, update(workouts) { return db.one(` UPDATE workouts SET WHERE id = $/id/ RETURNING * `, workouts); }, destroy(id) { return db.none(` DELETE FROM workouts WHERE id = $1 `, id); }, };
fd92d34c6d6fa91e03289f0d1a9b53d9c7ec7328
[ "JavaScript", "SQL", "Markdown" ]
7
SQL
richardboles/Workout-app
eb2cbdccf38590eba503ecdd63da266155a30d99
d535a2bb32af85f994467702e612cefbebe8b9b2
refs/heads/master
<file_sep>var numberArray = [1,2,3,4,5,6,7, 8, 9, 10]; console.log("Number Array: ", numberArray); // var filteredNumberArray = numberArray.filter( function (value) { // return value > 5; // }); function filter5above(value) { return value > 5; } numberArray.filter(filter5above); console.log(filteredNumberArray); var parent = { value: "ParentValue", obj: { objValue: "parentObjValue" }, walk: function() { console.log("walking!"); } } var child = Object.create(parent); console.log(child.value); console.log(child.obj.objValue); console.log(parent.value); console.log(parent.obj.objValue); console.log(parent); console.log("child", child);<file_sep>(function(){ 'use strict'; angular.module('ShoppingListCheckOff',[]) .controller('ToBuyController', ToBuyController) .controller('AlreadyBoughtController', AlreadyBoughtController) .service('ShoppingListCheckOffService', ShoppingListCheckOffService) ToBuyController.$inject = ['ShoppingListCheckOffService']; function ToBuyController(ShoppingListCheckOffService){ var toBuyList = this; toBuyList.items = ShoppingListCheckOffService.getToBuyList(); toBuyList.buyItem = function (itemIndex) { var bought_item = ShoppingListCheckOffService.removeSelectedItem(itemIndex); ShoppingListCheckOffService.addItemToBoughtList(bought_item); }; }; AlreadyBoughtController.$inject = ['ShoppingListCheckOffService']; function AlreadyBoughtController(ShoppingListCheckOffService){ var alreadyBuyList = this; alreadyBuyList.boughtList = ShoppingListCheckOffService.getBoughtList(); } function ShoppingListCheckOffService(){ var service = this; var listItems = [ { name: "Milk", quantity: "2" }, { name: "Donuts", quantity: "200" }, { name: "Cookies", quantity: "300" }, { name: "Chocolate", quantity: "10" }, { name: "Ginger", quantity: "2" } ]; var boughtList = []; service.getToBuyList = function() { return listItems; }; service.removeSelectedItem = function(itemIndex) { var bought_item = listItems.splice(itemIndex, 1); return bought_item; } service.addItemToBoughtList = function(selectedItem) { var item = { name: selectedItem[0].name, quantity: selectedItem[0].quantity }; boughtList.push(item); return boughtList; } service.getBoughtList = function () { return boughtList; } } })();
b65808a8704f8f4e2c6aa2a3e799403970e13587
[ "JavaScript" ]
2
JavaScript
ducthanh/ducthanh.github.io
3c5e2647cd18561ea6b0c686cac2b5b035d7319f
fef2af09333d072357c74042bf80668261dad0d5
refs/heads/master
<file_sep>#!/usr/bin/env bash set -e # halt script on error #sudo add-apt-repository ppa:jonathonf/texlive-2016 -y sudo apt-get update sudo apt-get install -y --no-install-recommends \ texlive-latex-recommended \ texlive-latex-extra \ texlive-fonts-recommended \ latex-xcolor \ texlive-lang-portuguese \ texlive-pictures \ pgf openssl aes-256-cbc -K $encrypted_6609dda7fdde_key -iv $encrypted_6609dda7fdde_iv -in publish-key.enc -out publish-key -d -out ~/.ssh/publish-key -d chmod u=rw,og= ~/.ssh/publish-key echo "Host github.com" >> ~/.ssh/config echo " IdentityFile ~/.ssh/publish-key" >> ~/.ssh/config git remote set-url origin <EMAIL>:fhcflx/cpc-neuro.git git fetch origin -f master:master #cp cpc-neuro2014/bib.bib ./ <file_sep>#!/usr/bin/env bash set -e # halt script on error #mv CPC-NEURO2014.pdf c.pdf #rm *2014.* #mv c.pdf CPC-NEURO2014.pdf #rm bib.bib git add . git commit -m "new update [skip ci]" git push origin HEAD:master exit <file_sep># Definições: ``` pro·to·co·lo |ó| substantivo masculino 1. Formulário. 2. Ata de conferências celebradas entre ministros plenipotenciários de diferentes nações, ou entre os membros de um congresso internacional. 3. Registro em que o escrivão do juízo relata o que se passou na audiência. 4. Regulamento que se observa em alguns atos públicos. "protocolo", in Dicionário Priberam da Língua Portuguesa [em linha], 2008-2013, http://www.priberam.pt/dlpo/protocolo [consultado em 23-03-2016]. ``` ``` Diretrizes clínicas: recomendações desenvolvidas de forma sistemática, com o objetivo de auxiliar profissionais e pacientes, na tomada de decisão em relação a alternativa mais adequada para o cuidado de sua saúde em circunstâncias clínicas específicas. São desenvolvidas com o objetivo de sintetizar as evidências científicas em relação à prevenção, diagnóstico, tratamento e reabilitação, sistematizando o conhecimentocientífico em relação a determinado agravo e propondo recomendações para o atendimento efetivo e seguro dos pacientes nas condições clínicas explicitadas. As diretrizes clínicas, deste modo, são mais completas e detalhadas e devem ser atualizadas a cada três anos. Os protocolos clínicos são adaptações das recomendações e orientações das diretrizes para os serviços em particular e definem fluxos de atendimento e algoritmos voltados para garantir o melhor cuidado em determinado serviço de saúde. Fixam-se, os protocolos clínicos, numa parte do processo da condição ou doenca e, em geral, num único ponto de atenção à saúde. Protocolos clínicos (definição 1): rotinas dos cuidados e das ações de gestão de um determinado serviço, equipe ou departamento, elaborados a partir do conhecimento científico atual, respaldados em evidências científicas, por profissionais experientes e especialistas em uma dada área, e que servem para orientar fluxos, condutas e procedimentos clínicos dos trabalhadores dos serviços de saúde. Protocolos clínicos (definição 2): conjunto de diretrizes, de estratégias, de critérios e de pautas, provenientes da revisão sistemática da evidência científica disponível e de uma avaliação profissional, apresentado de maneira estruturada e elaborado com o objetivo de ajudar os profissionais de saúde e os pacientes em suas decisões. Nota: nos protocolos clínicos são estabelecidos claramente os critérios diagnósticos de cada doença, o tratamento preconizado, com os medicamentos disponíveis nas respectivas doses corretas, os mecanismos de controle, o acompanhamento e a verificação de resultados e a racionalização da prescrição e do fornecimento dos medicamentos. Fonte: Implantação de diretrizes e protocolos clínicos, E-EFT-01, Agência Nacional de Saúde Suplementar - ANS, Ministerio da Saúde do Brasil, novembro de 2012. ``` ``` Protocolos Clínicos e Diretrizes Terapêuticas - PCDT Os PCDT têm o objetivo de estabelecer claramente os critérios de diagnóstico de cada doença, o algoritmo de tratamento das doenças com as respectivas doses adequadas e os mecanismos para o monitoramento clínico em relação à efetividade do tratamento e a supervisão de possíveis efeitos adversos. Observando ética e tecnicamente a prescrição médica, os PCDT, também, objetivam criar mecanismos para a garantia da prescrição segura e eficaz. Portanto, no âmbito do Componente Especializado da Assistência Farmacêutica (CEAF), os medicamentos devem ser dispensados para os pacientes que se enquadrarem nos critérios estabelecidos no respectivo Protocolo Clínico e Diretriz Terapêutica (Ministério da Saúde do Brasil). ``` > Protocolo (definição de trabalho deste projeto): rotinas de tratamento de doenças neoplásicas > cerebrais baseadas em ensaios clínicos de grupos cooperativos nacionais ou internacionais. > O esquema terapêutico é apresentado **_as per_** o descrito no protocolo de pesquisa dos ensaios > clínicos nos quais ele se baseia. Mais de um protocolo de pesquisa de ensaios clínicos pode > ser usado para embasar cada protocolo de tratamento de nosso serviço. Adaptações são feitas > baseadas na melhor evidência científica disponível e nas particularidades locais de nosso serviço. > Os pacientes não são incluídos em nenhum dos ensaios clínicos, nem recrutados para nenhuma outra > pesquisa clínica. O recrutamento para pesquisa clínica pressupõe a não inclusão dos pacientes nos > tratamentos descritos nestas rotinas. <file_sep>[![GitHub release](https://img.shields.io/github/release/fhcflx/cpc-neuro.svg)](https://github.com/fhcflx/cpc-neuro/releases) [![GitHub issues](https://img.shields.io/github/issues/fhcflx/cpc-neuro.svg)](https://github.com/fhcflx/cpc-neuro/issues) [![GitHub downloads](https://img.shields.io/github/downloads/fhcflx/cpc-neuro/total.svg)](https://github.com/fhcflx/cpc-neuro/releases) [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.845724.svg)](https://doi.org/10.5281/zenodo.845724) [![ORCID](https://img.shields.io/badge/ORCID-0000--0002--8398--0993-blue.svg)](http://orcid.org/0000-0002-8398-0993) [![License: CC BY 4.0](https://img.shields.io/badge/License-CC%20BY%204.0-lightgrey.svg)](https://creativecommons.org/licenses/by/4.0) [![Build](https://img.shields.io/travis/fhcflx/cpc-neuro.svg)](https://travis-ci.org/fhcflx/cpc-neuro) # Manual de tratamento clínico de pacientes pediátricos com tumores cerebrais ## O que é: Manuscrito de texto de revisão dos protocolos de tratamento utilizados pela neuro-oncologia do Centro Pediátrico do Câncer do Hospital Infantil Albert Sabin. Protocolo aqui significa um esquema de tratamento baseado em um ensaio clínico patrocinado por grandes grupos cooperativos de tratamento do câncer infantil. Nenhum destes protocolos inclui o texto completo ou trechos dos protocolos originais de pesquisa. Os pacientes tratados em nosso centro seguindo estes protocolos não estão sendo recrutados para pesquisa clínica. Estes protocolos também não constituem diretrizes terapêuticas, nem protocolos clínicos no sentido estrito, pois não foram elaborados por instituições ou grupos organizados, usando metodologia explícita. Nossos protocolos podem ser encarados como rotinas de manuseio dos pacientes e suas patologias, utilizados em nosso serviço hospitalar e baseados em ensaios clínicos dos grande grupos cooperativos. Este manuscrito não necessariamente representa os pontos de vista ou é endossado pelo Hospital Infantil Albert Sabin ou pela Secretaria de Saúde do Estado do Ceará, sendo de iniciativa do responsável acima identificado. ## Como usar: Este manuscrito tem fins educativos e é voltado para falantes da língua portuguesa. Para baixar o arquivo em formato **pdf**, por favor, clique [aqui][manuscrito]. Embora este documento seja usado pelo responsável deste projeto como rotina de tratamento dos seus pacientes, o abaixo identificado responsável não pode responsabilizar-se pelo seu uso em outros locais e para o tratamento de outros pacientes, que não aqueles sob sua estrita supervisão. Os procedimentos e doses de medicamentos descritos no documento são no máximo possível fiéis ao empregado na literatura científica utilizada. No entanto, o responsável abaixo identificado não pode se responsabilizar por estas doses e seu uso, incluindo o manuseio não criterioso por profissional não habilitado para prescrever e administrar tais medicamentos. Apenas médicos registrados de acordo com a legislação vigente em seu país e devidamente habilitados por sociedades de cancerologia (hemato-oncologia) pediátrica devem usar este documento, em parte, ou no todo, e segundo seu juízo, para o tratamento de pacientes. Neste caso, o responsável abaixo identificado isenta-se de responsabilidade legal sobre quaisquer resultados, incluindo complicações, eventos adversos, prejuízos ou custos, advindos do uso deste documento, ou parte dele, por qualquer outro que não ele mesmo. Ao obter este documento a partir deste projeto, o usuário dele (o documento) está tacitamente concordando com estes e outros termos explicitados aqui. ## Divulgação: O projeto tem uma página na internet gerada pelo [GitHub Pages][pages], a qual pode ser vista [aqui][projeto]. ## Como contribuir: O manual foi escrito em [LaTeX][latex], usando [ShareLatex][share] e programas para _desktop_. O arquivo ```*.tex``` contém o código correspondente. Contribuições são bem-vindas. Se você ainda não tem uma conta no [GitHub][gh] (gratuita), [inscreva-se][gh-i], abra uma pendência ([_issue_][issue]) ou faça uma cópia ([_fork_][fork]), modifique o que achar necessário e peça para integrar ([_pull request_][pull]) suas mudanças ao projeto. ## Responsável: > <NAME>, > Médico cancerologista pediátrico > atuando desde 2008 (ingresso por concurso público) no [Hospital Infantil Albert Sabin][hias], Fortaleza - CE, Brasil <!-- > Membro da Sociedade Brasileira de Pediatria - [SBP][sbp] desde 2002, > Membro da Sociedade Brasileira de Oncología Pediátrica - [SOBOPE][sobope] desde 2013, > Membro da Society for Neuroncology - [SNO][sno] desde 2015 --> > Título de especialista em pediatria pela Sociedade Brasileira de Pediatria - [SBP][sbp] em 2012, > Título de especialista em cancerologia pediátrica pela Sociedade Brasileira de Cancerologia - [SBC][sbc] em 2014 > Contato: <EMAIL> - twitter: @ciencia4medica ## Licença: <a rel="license" href="http://creativecommons.org/licenses/by/4.0/"><img alt="Licença Creative Commons" style="border-width:0" src="https://i.creativecommons.org/l/by/4.0/88x31.png" /></a><br /><span xmlns:dct="http://purl.org/dc/terms/" href="http://purl.org/dc/dcmitype/Text" property="dct:title" rel="dct:type">CPC-NEURO</span> de <span xmlns:cc="http://creativecommons.org/ns#" property="cc:attributionName"><NAME></span> está licenciado com uma Licença <a rel="license" href="http://creativecommons.org/licenses/by/4.0/">Creative Commons - Atribuição 4.0 Internacional</a> [pages]:https://pages.github.com [manuscrito]:v0.1/CPC-NEURO2014.pdf?raw=true [hias]:http://www.hias.ce.gov.br [sbp]:http://www.sbp.com.br [sobope]:http://www.sobope.org.br [sno]:http://soc-neuro-onc.org [projeto]:http://fhcflx.github.io/cpc-neuro [gh]:https://github.com [gh-i]:https://github.com/join?source=header-home [issue]:https://github.com/fhcflx/cpc-neuro/issues/new [fork]:https://help.github.com/articles/fork-a-repo/ [pull]:https://github.com/fhcflx/cpc-neuro/compare [latex]:https://www.latex-project.org [share]:https://pt.sharelatex.com/project [sbc]:https://www.sbcancer.org.br/
79c91d01bc002ff02f4134b68d1bf709c2c31ec9
[ "Markdown", "Shell" ]
4
Shell
fhcflx/cpc-neuro
c7fedcd5425208234a7afc0cae82d3ace2a6b60a
1d59c6e4da894f6fd9b1419405c7bd4f582455b6
refs/heads/master
<file_sep>var React = require('react'); var ReactDOM = require('react-dom'); var List = require('./components/List.jsx'); //import List from "./components/List"; var Main = React.createClass({ render: function() { return ( <div> <List/> <h3>Hello World!!</h3> </div> ); } }); const app = document.getElementById('container'); ReactDOM.render(<Main/>, app); <file_sep>var React = require('react'); var ReactDOM = require('react-dom'); var List = React.createClass({ render: function() { return ( <h4>This is my sample code for React.</h4> ); } }); module.exports = List;
37e2a70af3d00ec1e6dbd3853078e7f78a6d0a90
[ "JavaScript" ]
2
JavaScript
ektarathi/react-skeleton
153cedd1f37ee240cff314800f23d80efd02da4c
d6b32edc41d0d56bb03350567f6ca94351f36b30
refs/heads/master
<file_sep>Simple(? not anymore!) shell script to automate some of the ways one might create a new git repo. The initial intent was to automate creating remote repos to push to for backup/sharing purposes. Has been expanding to slightly more varied uses. ## Supports: * creating a new project from scratch with no code yet (" -n <newprojectname> ") * already being somewhere and initializing from all current code (" -i ") * creating a remote repo on your own server * creating a remote repo on GitHub <file_sep>#!/bin/bash # -x # newgit.sh [ -i ] [ -n <projectname> ] [ -g ] [ -t <githubtoken> ] [ -u <username> ] [ -p <password> ] [ -s <remote-git-system> ] [ -d <remote-git-dir> ] [ -T <project-type> ] # this assumes you've created a project locally (or it will create a brand new one itself if given "-i"), and are managing it via git, again, just with a local repo. # now you want to use some remote thing as the "origin" # run this script in the top directory of the project (where ".git/" lives) # it will set up a bare git repo of the same folder name (with ".git" appended) at whatever "$remotebase" is # the hostname on which to put the repo # honors "REMOTE_GIT_SYSTEM" environment variable #remoteSystem='example.com' # the account to which you have SSH access # honors "REMOTE_GIT_USER" environment variable #remoteAccount='example' # EXAMPLES: # # newgit.sh -n mynewproject -g -T Node # Create a brand new project here called "mynewproject", and keep it at GitHub. Initialize .gitignore for NodeJS # # newgit.sh -i # I'm in a directory with existing code that I want to put under GIT version control # It will host the git repo on my private server (using REMOTE_GIT_SYSTEM and REMOTE_GIT_USER environment variables) # The project is auto-named the name of the current directory. # If it looks like a known project type, a .gitignore file will be created for it before committing anything. # TODO: support multiple project types for .gitignore initialization, both command-line and guessed # TODO: support naming the project as something other than the directory name (-n option coupled with -i) # TODO: massive code cleanup, sheesh # TODO: better documentation and example use-cases # TODO: use "git init <directory>" for plain "-n newprojectname" instead of manually creating it... I didn't realize that was available # The directory in which all the repos live, if on a private server. remoteDir='git' # in this case, just "git/" in the login account's home dir #### #### Don't change anything past here #### # a repo we can pull stuff from that is useful for initializing .gitignore files gitignoreBase="https://raw.github.com/github/gitignore/master" # $1 is the project type, $2 is a message. function pullGitignore() { projectGitignoreURL="$gitignoreBase/$1.gitignore" tmpGitignore="tmpGitignore$$" curl --location --write-out "%{http_code}" --url "$projectGitignoreURL" --output $tmpGitignore.tmp --fail > $tmpGitignore.out 2> $tmpGitignore.err result=$? if [ $result -ne 0 ]; then echo "Was unable to retrieve .gitignore information from: $projectGitignoreURL" echo "Perhaps '$1' is not a valid/known type in the list?" echo -n "Response code was: " cat $tmpGitignore.out echo echo -n "And curl said: " cat $tmpGitignore.err echo else echo "$2 Appending to .gitignore from:" echo " $projectGitignoreURL" # Append, in case there was already a ".gitignore" file here. # Yes, we might end up duplicating rules. So what? echo >> .gitignore echo "#" >> .gitignore echo "# START FROM: $projectGitignoreURL" >> .gitignore echo "# RETRIEVED: `date --iso-8601=ns --reference=$tmpGitignore.tmp`" >> .gitignore echo "#" >> .gitignore cat $tmpGitignore.tmp >> .gitignore echo >> .gitignore echo "#" >> .gitignore echo "# END FROM: $projectGitignoreURL" >> .gitignore echo "# RETRIEVED: `date --iso-8601=ns --reference=$tmpGitignore.tmp`" >> .gitignore echo "#" >> .gitignore fi # cleanup all the temp files we createde while CURLing /bin/rm -f $tmpGitignore.tmp $tmpGitignore.out $tmpGitignore.err } while getopts "?hxiGHn:s:d:u:p:T:t:" flag do # echo "$flag" $OPTIND $OPTARG case "$flag" in i) initialize="true";; G) useGithub="true";; H) useHeroku="true";; n) newProjectName="$OPTARG";; s) remoteSystem="$OPTARG";; d) remoteDir="$OPTARG";; u) remoteAccount="$OPTARG";; p) password="$OPTARG";; T) projectType="$OPTARG";; t) githubToken="$OPTARG";; x) set -x;; [?h]) echo >&2 "Usage: $0 [ -?hx ] [ -i ] [ -n <projectname> ] [ -H ] [ -G ] [ -t <githubtoken> ] [ -u <username> ] [ -p <password> ] [ -s <remote-git-system> ] [ -d <remote-git-dir> ] [ -T <project-type> ]" exit 1;; esac done shift $((OPTIND-1)) if [ "$remoteSystem" == "" -a "$useGithub" != "true" ]; then if [ "$REMOTE_GIT_SYSTEM" != "" ]; then remoteSystem="$REMOTE_GIT_SYSTEM" else echo "Need either a remote system ( -s ) or to be told to use GitHub ( -G ). Quitting." exit 99 fi fi if [ "$remoteAccount" == "" -a "$useGithub" == "true" ]; then fromConfig=`git config --get github.user` if [ "$GITHUB_USER" != "" ]; then remoteAccount="$GITHUB_USER" elif [ "$GITHUB_USERNAME" != "" ]; then remoteAccount="$GITHUB_USERNAME" elif [ "$GITHUB_LOGIN" != "" ]; then remoteAccount="$GITHUB_LOGIN" elif [ "$fromConfig" != "" ]; then remoteAccount="$fromConfig" fi fi # okay, I've tried everything that makes sense to me right now if [ "$remoteAccount" == "" ]; then if [ "$REMOTE_GIT_USER" != "" ]; then remoteAccount="$REMOTE_GIT_USER" else echo "Can't do anything without a user name ( -u ). Quitting." exit 99 fi fi if [ "$useGithub" == "true" -a "$githubToken" == "" ]; then fromConfig=`git config --get github.token` # really, not sure it's healthy if we find this if [ "$GITHUB_TOKEN" != "" ]; then githubToken="$GITHUB_TOKEN" elif [ "$fromConfig" != "" ]; then githubToken="$fromConfig" fi fi if [ "$useGithub" == "true" -a "$password" == "" ]; then fromConfig=`git config --get github.password` # really, not sure it's healthy if we find this if [ "$GITHUB_PASSWORD" != "" ]; then password="$<PASSWORD>" elif [ "$fromConfig" != "" ]; then password="$<PASSWORD>" fi fi if [ "$useGithub" == "true" ]; then # using v3 of the API gitHubAuthFlag="-u" # default to assuming username/password if [ "$githubToken" != "" ]; then gitHubAuthFlag="-H" # this is the only case where it's not "-u", so override gitHubAuthString="Authorization: token $githubToken" elif [ "$password" != "" ]; then gitHubAuthString="$remoteAccount:$password" else # this will just prompt interactively for the password gitHubAuthString="$remoteAccount" fi fi # the "-n <projectname>" option will create a new git repo here, with just a simple README, and then push it out to the remote site if [ "$newProjectName" != "" ]; then if [ -d "$newProjectName" ]; then echo "There was already a directory with the name '$newProjectName'." else echo "Making brand new '$newProjectName' project." mkdir "$newProjectName" if [ $? -ne 0 ]; then echo "Failed to create a directory with the name '$newProjectName' (and not because it already existed, at least not as a directory). This may go badly." fi fi cd "$newProjectName" if [ $? -ne 0 ]; then # if we can't go there, then we're almost certainly going to do something bad, like create a ridiculously # large git repo one directory up from where it was supposed to go. let's not do that. echo "Failed to cd into '$newProjectName'. I'm going to give up here, before I do more damage." exit 66 fi if [ "$projectType" != "" ]; then pullGitignore "$projectType" "You are starting a(n) $projectType project." fi echo "This is $newProjectName" > README git init git add . git commit -m "Initial commit." fi cwd=`pwd` projectName=`basename "$cwd"` # we checked for ".git/" above, so assume we're in the right place # I think this is the most generic way of testing for spaces (i.e. doesn't require 'bash' but # should even work in plan vanilla/vintage 'sh' # TODO: look for other possibly-problematic characters? # Truth is, coded defensively enough, even spaces aren't a big deal, but I do consider them to # be a form of just asking for trouble in the future, depending on the rest of your toolchain case "$projectName" in *[[:space:]]*) echo "Please don't give me spaces in the project name! (I see: '$projectName') Quitting." exit 99 ;; esac if [ ! -d ".git" ]; then # no .git here? This gets a little dicey, but let's allow it if [ "$initialize" != "true" ]; then # okay, no, not if they didn't say to initialize it echo "I don't see a '.git/' directory here and you didn't pass me the '-i' option. Quitting." exit 99 fi # Not all that sure how to identify different kinds of projects. But it seems worthwhile # to at least try to have an appropriate .gitignore file if [ -f "AndroidManifest.xml" ]; then projectType="Android"; elif [ -f "Gemfile" ]; then # "config.ru" or "Rakefile" are probably just as reasonable projectType="Rails"; fi if [ "$projectType" != "" ]; then pullGitignore "$projectType" "Looks like a(n) $projectType project." else echo "Not sure what kind of project this is, so not pulling any .gitignore file" fi if [ `ls | wc -l` = 0 ]; then # handle case where they've asked to initialize a totally empty directory... # this is a dumb thing to do, but it's more confusing if we just ignore it. echo "You asked me to initialize an entirely empty directory. Creating a README file to unconfuse GIT." echo "This is $projectName" > README fi git init git add . git commit -m "Initial commit." fi # GitHub API v3 if [ "$useGithub" == "true" ]; then # we need to hold on to what GitHub tells us, in case there's some kind of error. # we're logging the info up one directory, so it's out of the source location gitHubResultFile="../tmp$$.json" # we've set up either "-u" and "username:password", or "-H" and the token header, for the auth flag and auth string, earlier on curl -o "$gitHubResultFile" -d '{"name":"'"$projectName"'"}' "$gitHubAuthFlag" "$gitHubAuthString" "https://api.github.com/user/repos" resultMessages=`fgrep '"message": ' "$gitHubResultFile"` # let's take a look if everything went smoothly if [ -n "$resultMessages" ]; then # if it's nonzero, then we had some kind of message in the results. we assume this is bad news. # "Bad Credentials" means that's a bogus token, or the username/password does not compute badCredentials=`echo "$resultMessages" | fgrep "Bad Credentials"` notFound=`echo "$resultMessages" | fgrep "Not found"` validationFailed=`echo "$resultMessages" | fgrep "Validation Failed"` if [ -n "$badCredentials" ]; then if [ "$githubToken" = "" ]; then # we didn't have a token, so it must be username/password if [ "$password" != "" ]; then # the password is already stored somewhere, so I don't feel too bad emitting it. # Though if the output gets logged somewhere, this might not be such a good idea echo "This username/password combination is invalid, according to GitHub: $gitHubAuthString" else # they just gave the username, and typed the password by hand echo "Either this isn't the correct username ('$gitHubAuthString') or you mistyped the password, according to GitHub." fi else # we had a token echo "This isn't a valid token, according to GitHub: $githubToken" fi elif [ -n "$notFound" ]; then # "Not found" means the token (perhaps also username/password combo?) are valid, but doesn't allow access to this action # (in this case, probably the token doesn't have the required scope) echo "While your authentication succeeded, you are probably not authorized to create a new repo." elif [ -n "$validationFailed" ]; then # "Validation Failed" has additional info nameAlreadyExists=`echo "$resultMessages" | fgrep "name already exists on this account"` if [ -n "$nameAlreadyExists" ]; then # I've seen it say this when trying to create a repo with a name that already exists echo "It looks like you probably already have a GitHub repo by that name ('$projectName')." else # TODO: find out if there are other modes of failure that will take us here (i.e. "Validation Failed") echo "Validation failed for some reason I'm not familiar with." fi else # TODO: find out if there are other situations that will give us "message:" in the result, # and whether they can be okay, or definitely bad news echo "Something went wrong that I'm not prepared to interpret." fi echo "I'm going to stop here and give you the result GitHub returned. Best of luck!" gitHubResultHumanReadableFileName="$gitHubResultFile" if [ "$newProjectName" != "" ]; then # fix this up (strip the leading "../") since from the user's perspective, it's actually where the command was run from gitHubResultFileName=`echo "$gitHubResultFile" | cut -c4-` fi echo "The result is also stored in '$gitHubResultFileName', but here it is right now:" echo cat "$gitHubResultFile" exit 77 fi /bin/rm -f "$gitHubResultFile" # clean up, nothing bad happened remoteBase="<EMAIL>:$remoteAccount" else # else we're not using GitHub, so we set this up on a private server somewhere # this is fragile! # 1) if "$remoteDir" doesn't already exist, it'll try to create it. # If it fails, bad things will probably happen. It'll put it directly in the user's home dir? # 2) if "$projectName.git" already exists, we shouldn't do this either ssh "$remoteAccount@$remoteSystem" " if [ ! -d '$remoteDir' ]; then echo '$remoteDir not found. Creating...' mkdir -p '$remoteDir' if [ $? -ne 0 ]; then echo 'Unable to mkdir -p $remoteDir. This may not end well.' exit fi fi cd '$remoteDir' if [ -d '$projectName.git' ]; then echo '$projectName.git already existed in $remoteDir. I hope this was what you were expecting...' else mkdir '$projectName.git' if [ ! -d '$projectName.git' ]; then echo 'Apparently unable to mkdir $projectName.git in $remoteDir. This will DEFINITELY not end well.' exit fi fi cd '$projectName.git' git --bare init " remoteBase="$remoteAccount@$remoteSystem:$remoteDir" fi git remote add origin "$remoteBase/$projectName.git" git config master.remote origin git config master.merge refs/heads/master # and shovel it all out there git push -u origin master # TODO: learn what "-u" option really means and whether it should be used here # okay, let's also dump it out on Heroku if that was requested if [ "$useHeroku" == "true" ]; then # in case we don't already have heroku # echo "Installing heroku gem, in case it's not already here:" # gem install heroku # echo "DONE Installing heroku gem." echo "Creating a Heroku app named '$projectName':" tmpHeroku="tmpHeroku$$" heroku apps:create $projectName > $tmpHeroku.out 2> $tmpHeroku.err result=$? if [ $result -ne 0 ]; then echo "Failed to create Heroku app with name $projectName (returned: $result)" else echo "Successfully created Heroku app with name $projectName" fi echo "Here's what Heroku had to say:" cat $tmpHeroku.out $tmpHeroku.err echo "----- That's all Heroku had to say" # TODO: stop trying to create directly with the desired name # TODO: instead, create with temporary name, then try to rename. More gets done that way even if there's a name conflict. # create ( heroku apps:create <NAME1> ) returns the following # ON SUCCESS- 0 # OUT: Creating <NAME1>... done, stack is cedar # http://<NAME1>.herokuapp.com/ | [email protected]:<NAME1>.git # ERR: <empty> # ON FAILURE- 1 (when there is already an app with the name NAME1) # OUT: <empty> # ERR: ! Name is already taken # create ( heroku apps:create ) (i.e. without a designated name) returns the following # ON SUCCESS- 0 # OUT: Creating <NAME>... done, stack is cedar # http://<NAME>.herokuapp.com/ | [email protected]:<NAME>.git # ERR: <empty> # ON FAILURE... Haven't yet found a failure mode # OUT: # ERR: # rename ( heroku apps:rename <NAME2> --app <NAME1> ) returns the following # ON SUCCESS- 0 # OUT: Renaming <NAME1> to <NAME2>... done # http://<NAME2>.herokuapp.com/ | git<EMAIL>:<NAME2>.git # Don't forget to update your Git remotes on any local checkouts. # ERR: <empty> # ON FAILURE- 1 (when there is already an app with the name NAME2) # OUT: Renaming <NAME1> to <NAME2>... failed # ERR: ! is already taken # ON FAILURE- 1 (when there is no app with the name NAME1) # OUT: Renaming <NAME1> to <NAME2>... failed # ERR: ! App not found. /bin/rm -f $tmpHeroku.out $tmpHeroku.err echo echo "Pushing to Heroku..." git push heroku master echo "DONE Pushing to Heroku." echo echo "Running 'rake db:setup' on Heroku..." heroku run rake db:setup echo "DONE Running 'rake db:setup' on Heroku." echo echo "Running 'heroku ps'..." heroku ps echo "DONE Running 'heroku ps'..." echo echo "Running 'heroku logs'..." heroku logs echo "DONE Running 'heroku logs'..." fi
53c924e2a4b1ba2c682a0470f18df7fabb290965
[ "Markdown", "Shell" ]
2
Markdown
philipp/newgit
10e54549da2aa36ac8d1f090982abb16ed41125d
a96b19a47e3a6726dfe7fc5dd732f17413d334fe
refs/heads/master
<repo_name>chengmuy/euler<file_sep>/6.py import numpy as np seq = np.arange(100) + 1 seq.shape = (100, 1) ans = sum(sum(seq.dot(seq.T))) - seq.T.dot(seq) print(ans) ans = 0 seqSum = sum(seq) for i in range(1, 101): ans += seqSum * i - i**2 print(ans)<file_sep>/4.py maxPalindrome = 0 for i in range(100, 1000): for j in range(i, 1000): num = i * j if num > maxPalindrome and str(num) == str(num)[::-1]: maxPalindrome = num print(i, j, num) print('max: ', maxPalindrome) <file_sep>/1.py import math sum = sum([i*3 for i in range(0, math.ceil(1000/3))]) + \ sum([i*5 for i in range(0, math.ceil(1000/5))]) - \ sum([i*15 for i in range(0, math.ceil(1000/15))]) print(sum) print(3 * (333 + 1) * 333 / 2 + 5 * (199 + 1) * 199 / 2 - 15 * (66 + 1) * 66 / 2)<file_sep>/joelibtest.py import joelib as jl import timeit import itertools # # print(timeit.timeit('sum((itertools.islice(jl.generatePrimesErat(), 1000)))', setup = 'import joelib as jl', number=10)) # print(sum((itertools.islice(jl.generatePrimesErat(), 1000)))) # print(timeit.timeit('sum((itertools.islice(jl.generatePrimesDivTest(), 100000)))', setup = 'import joelib as jl', number=1)) # print(sum((itertools.islice(jl.generatePrimesDivTest(), 1000)))) # print(timeit.timeit('sum((itertools.islice(jl.generatePrimesErat2(), 100000)))', setup = 'import joelib as jl', number=1)) # print(sum((itertools.islice(jl.generatePrimesDivTest(), 1000)))) print(timeit.timeit('sum((itertools.islice(jl.generatePrimesErat3(), 10000000)))', setup = 'import joelib as jl', number=1)) print(sum((itertools.islice(jl.generatePrimesErat3(), 1000)))) # for p in itertools.islice(jl.generatePrimesErat(), 1000): # print(p) # sum([0,1,2,3])<file_sep>/5.py import joelib as jl import numpy as np # least common multiple n = 20 lcm = 1 for p in jl.generatePrimesErat3(): if p > n: break mult = 1 while (mult * p) <= n: mult *= p print(mult) lcm *= mult print(lcm)<file_sep>/2.py import timeit # Sol 1 - even fib generator def even_fib(limit): a = 1 b = 2 while (a < limit): if not a % 2: yield a a, b = b, a+b print("Sol 1 - even fib generator") print(sum(even_fib(4 * 10**6))) print(timeit.timeit("sum(even_fib(4 * 10**20))", 'from __main__ import even_fib', number=1000)) # Sol 2 - even fib generator, no even check def even_fib(limit): a = 0 b = 1 c = 2 while (c < limit): yield c a = b + c b = c + a c = a + b print("Sol 2") print(sum(even_fib(4 * 10**6))) print(timeit.timeit("sum(even_fib(4 * 10**20))", 'from __main__ import even_fib', number=1000))<file_sep>/joelib.py from collections import defaultdict import itertools as it def primeFactorize(n): """ Return prime factorization of n :param n: number to prime factorize :return: dictionary with keys as factors and values as order """ div = 2 rem = n factors = defaultdict(int) while rem > 1: if rem % div == 0: rem /= div factors[div] += 1 else: div += 1 return factors def isPrime(n): """ Check if n is prime :param n: number to check :return: True if n is prime, False otherwise """ if n == 0 or n == 1: return False for div in range(2, int(n**0.5)+1): if not n % div: return False return True def generatePrimesDivTest(): for i in it.count(2): if isPrime(i): yield i def generatePrimesErat(): primes = defaultdict(int) yield 2 primes[2] = 4 yield 3 primes[3] = 6 ct = 2 for i in it.count(4): isPrime = True for p, val in primes.items(): if val == i: isPrime = False if val < i: primes[p] += p if isPrime: primes[i] = i yield i def generatePrimesErat2(): primes = defaultdict(int) yield 2 primes[2] = 4 yield 3 primes[3] = 9 for i in it.count(5, 2): isPrime = True for p, val in primes.items(): if val == i: isPrime = False if val < i: primes[p] += 2*p if isPrime: primes[i] = i**2 yield i def generatePrimesErat3(): primes = defaultdict(int) yield 2 yield 3 primes[9] = 3 for i in it.count(5, 2): basePrime = primes.pop(i, None) if basePrime: # i is composite next = i + 2 * basePrime while next in primes: next += 2 * basePrime primes[next] = basePrime else: # i is prime yield i primes[i**2] = i <file_sep>/3.py def check_prime(n): for factor in range(2, int(n**.5) + 1): if not n % factor: return False return True # generate composites up to n def generate_composites(n): top = int(n ** 0.5) comps = set() for factor in range(2, top + 1): for mult in range(1, int(n / factor)): comps.add(factor * mult) return comps num = 600851475143 comps = generate_composites(num // 2) sorted_comps = sorted(list(comps)) index = len(sorted_comps) - 1 for div in range(num // 2, 1, -1): if div == sorted_comps[index]: # don't test this, because it's a prime index -= 1 else: # for div in range(num // 2 + 1, 1, -1): # if not div % 10**6: # print(div) # if not (num % div): # if check_prime(div): # print("prime factor: " + div) # break
0e40870843351b69c7a39fb41d3b6e4908115633
[ "Python" ]
8
Python
chengmuy/euler
49b265040714893e6de00875920e41b67e542572
c800b9d2121e75022402ff97fec6e3a196130071
refs/heads/master
<repo_name>lutrarutra/Library-Stalker<file_sep>/Library-Stalker/Assets/Scripts/GameLogic.cs using UnityEngine; using System.Collections; public class GameLogic : MonoBehaviour { static public float Score = 0; static public int Hardness = 2; static public bool Lost = false; static public bool Eating = false; static public State currentState = State.idle; // Use this for initialization void Start () { } // Update is called once per frame void Update () { } public enum State { idle, lost, busted, eating, restarting, waiting, } public static void changeState(State state) { currentState = state; } public static bool checkState(State state) { return currentState == state; } } <file_sep>/README.md # Library-Stalker The beginning of 8Pack. <file_sep>/Library-Stalker/Assets/Scripts/StalkerScript.cs using UnityEngine; using System.Collections; public class StalkerScript : MonoBehaviour { public float freezeTime = 0; public float randomNumber = 0; public bool stalking = false; bool creeping = false; InputScript player; public int rnd; Animator animator; // Use this for initialization void Start () { animator = GetComponent<Animator>(); player = GameObject.Find("Player").GetComponent<InputScript>(); randomNumber = Time.time + Random.Range(5, 10); } // Update is called once per frame void Update() { #region Random Movement float time = Time.time; if (GameLogic.currentState != GameLogic.State.lost) { //generate random number if (randomNumber <= time && !stalking && !creeping) { Debug.Log("Creeping"); animator.SetTrigger("Creep"); creeping = true; randomNumber = time + 1; } else if (randomNumber <= time && (stalking || creeping)) { rnd = Random.Range(1, 20); if (rnd > 1 && !stalking) { Debug.Log("hello"); animator.SetTrigger("Turn"); stalking = true; freezeTime = time + 1.2f; } else { animator.SetTrigger("Idle"); creeping = false; stalking = false; } randomNumber = time + Random.Range(3f, 7f); } } #endregion } } <file_sep>/Library-Stalker/Assets/Scripts/restartScript.cs using UnityEngine; using System.Collections; using UnityEngine.UI; public class restartScript : MonoBehaviour { InputScript player; // Use this for initialization void Start () { player = GameObject.Find("Player").GetComponent<InputScript>(); } // Update is called once per frame void Update () { } void OnEnable() { GetComponent<Button>().onClick.AddListener(btnClicked); } void btnClicked() { player.restart = true; } } <file_sep>/Library-Stalker/Assets/Scripts/GUI.cs using UnityEngine; using System.Collections; using UnityEngine.UI; public class GUI : MonoBehaviour { InputScript player; bool rip; float cd; // Use this for initialization void Start () { player = GameObject.Find("Player").GetComponent<InputScript>(); rip = false; cd = 0; } // Update is called once per frame void Update () { switch (GameLogic.currentState) { case GameLogic.State.lost: cd = Time.time + 0.5f; GameLogic.changeState(GameLogic.State.waiting); break; case GameLogic.State.waiting: GetComponent<Text>().text = "BUSTED!"; break; default: return; } } } <file_sep>/Library-Stalker/Assets/Scripts/InputScript.cs using UnityEngine; using System.Collections; using UnityEngine.UI; public class InputScript : MonoBehaviour { StalkerScript stalker; Component image; Animator animator; Button restartButton; public bool restart = false; public float cd; float i = 0; bool waitingInput = true; // Use this for initialization void Start () { #region Find Components etc. restartButton = GameObject.Find("Button").GetComponent<Button>(); stalker = GameObject.Find("Stalker").GetComponent<StalkerScript>(); animator = GetComponent<Animator>(); #endregion #region Hide Button restartButton.enabled = false; restartButton.GetComponentInChildren<CanvasRenderer>().SetAlpha(0); restartButton.GetComponentInChildren<Text>().color = Color.clear; #endregion } // Update is called once per frame void Update() { switch (GameLogic.currentState) { case GameLogic.State.idle: #region InputCommands //Get the user input if ((Input.GetMouseButtonDown(0) || Input.GetKeyDown("space")) && !GameLogic.Eating) { i = 1; animator.SetTrigger("ChangeState"); GameLogic.Eating = true; } else if ((Input.GetMouseButtonUp(0) || Input.GetKeyUp("space")) && GameLogic.Eating) { i = 0; animator.SetTrigger("ChangeState"); GameLogic.Eating = false; } GameLogic.Score += i / GameLogic.Hardness; #endregion if (stalker.stalking && GameLogic.Eating && Time.time > stalker.freezeTime) { Debug.Log("You lose!"); cd = Time.time; GameLogic.changeState(GameLogic.State.lost); } break; case GameLogic.State.lost: if(cd + 1.5f < Time.time && !restartButton.enabled) { onLost(); } if (restart) { restart = false; Application.LoadLevel(Application.loadedLevel); } break; default: return; } { } } void onLost() { #region Show Button restartButton.enabled = true; restartButton.GetComponentInChildren<CanvasRenderer>().SetAlpha(1); restartButton.GetComponentInChildren<Text>().color = Color.black; #endregion } } <file_sep>/Library-Stalker/Assets/Scripts/StatusBarScript.cs using UnityEngine; using System.Collections; using UnityEngine.UI; public class StatusBarScript : MonoBehaviour { Image image; InputScript player; Text text; Text score; bool used = false; float points; int level = 1; // Use this for initialization void Start () { image = GetComponent<Image>(); text = GameObject.Find("Text").GetComponent<Text>(); score = GameObject.Find("Text (1)").GetComponent<Text>(); points = 0; } // Update is called once per frame void Update () { if (GameLogic.Score >= 100 && GameLogic.Score != 0) { level += 1; GameLogic.Score = 0; GameLogic.Hardness += 1; } score.text = "Score: " + ((int)GameLogic.Score + ((level-1) * 100)); text.text = "" + level; image.fillAmount = ((GameLogic.Score % 100) / 100); } }
3cdcbfc35f886fd78d281103e9a7e7bde55f665a
[ "Markdown", "C#" ]
7
C#
lutrarutra/Library-Stalker
e31169b16105bd2cb957f03c463169b47480901c
8510efbfb7df1cce12b081e82e42d83edab2131e
refs/heads/master
<repo_name>kurosuinc/SettingsGUI<file_sep>/src/Redux/reducer/Main.js // @flow import { handleActions } from 'redux-actions'; import * as types from '../constant'; const initState = { name: 'AppConfig', // schemaPath: './config.json', filePath: '', // load and export config file location defaultFilePath: '', }; const reducer = handleActions({ }, initState); export default reducer; <file_sep>/src/Redux/saga/appInitialization.js // @flow import { put } from 'redux-saga/effects'; import * as types from '../constant'; import schemaNormalizer from '../../Normalizer/schemaNormalizer' import testdata from '../../testdata/schemaSample'; import type {field} from "../../types/field"; export default function* (action) { try { const appData = testdata.config; const schema = schemaNormalizer(testdata.schema); // とりあえずtestdataを読ませています yield put({ type: types.APP.APPLICATION_INITIALIZE_SUCCEEDED, payload: { appData }, }); yield put({ type: types.APP.SCHEMA_LOADED, payload: { schema }, }); const fields = schema.map((): field => ({ text: '', // TODO: initialize default parameter error: null, })); yield put({ type: types.APP.FIELD_CREATED, payload: { fields } }); } catch(e) { yield put({ type: types.APP.APPLICATION_INITIALIZE_FAILED, error: e }); } }; <file_sep>/src/Redux/reducer/Schema.js // @flow import { handleActions } from 'redux-actions'; import * as types from '../constant'; const initState = []; const reducer = handleActions({ [types.APP.SCHEMA_LOADED]: (state: Array<Object>, action: Object) => ( action.payload.schema ) }, initState); export default reducer; <file_sep>/src/Component/Molecule/Row.jsx import React from 'react'; import styled from 'styled-components'; import { pure } from 'recompose'; import ListItem from '@material-ui/core/ListItem'; import TextField from '../Atom/Row/TextField'; import ErrorMessage from '../Atom/Row/ErrorMessage'; import type { spreadField } from "../../types/field"; const StyledListItem = styled(ListItem)` display: flex; flex-direction: column; align-items: flex-start !important; `; type Props = { field: spreadField, index: number, checkText: Function, } const Row = (props: Props) => ( <StyledListItem> <TextField field={props.field} index={props.index} checkText={props.checkText}/> {props.field.error ? <ErrorMessage message={props.field.error} /> : <div />} </StyledListItem> ); export default pure(Row); <file_sep>/src/Redux/action/index.js // @flow import { createActions } from 'redux-actions'; import * as types from '../constant'; export const appActions = createActions( types.APP.REQUEST_APP_INITIALIZE, types.FILE.REQUEST_EXPORT_FILE, ); export const fieldActions = createActions( types.RULE_CHECK.REQUEST_RULE_CHECK, ); <file_sep>/src/Component/Atom/Row/ErrorMessage.jsx import React from 'react'; import styled from 'styled-components'; import { pure } from 'recompose'; import Typography from '@material-ui/core/Typography'; const StyledTypography = styled(Typography)` color: red !important; padding-top: 12px; `; type Props = { message: string, }; const ErrorMessage = (props: Props) => ( <StyledTypography noWrap> {props.message} </StyledTypography> ); export default pure(ErrorMessage); <file_sep>/src/Normalizer/schemaNormalizer.js // @flow import { rule } from '../types/rule'; export default (schema: Array<Object>): Array<Object> => ( schema.map((item: Object): Object => ({ field: { key: item.key, label: item.label, placeholder: item.placeholder, description: item.description, required: item.required, }, rules: item.rules.map((rule: Object): rule => ({ required: item.required, type: item.type, regex: rule.regex, errMessage: rule.error, })) })) ); <file_sep>/src/types/rule.js export type rule = { required: boolean, type: string, regex: string, errorMessage: string, } <file_sep>/src/Redux/reducer/Notification.js // @flow import { handleActions } from 'redux-actions'; import * as types from '../constant'; const initState = {}; const reducer = handleActions({}, initState); export default reducer;<file_sep>/src/Redux/selector/index.js // @flow import { createSelector } from 'reselect'; import type {field, spreadField} from "../../types/field"; const fieldState = (state: Object): Array<field> => state.Field; const schemaState = (state: Object): Array<Object> => state.Schema; const mainState = (state: Object): Object => state.Main; export const fields = createSelector( [fieldState, schemaState], (_field: Array<field>, _schema: Array<Object>): Array<spreadField> => _field.map((item, index) => ({...item, ..._schema[index].field}) ) ); export const applicationName = createSelector( [mainState], (_main: Object): string => _main.name ); <file_sep>/src/Redux/api/ruleChecker.js // @flow import type {rule} from "../../types/rule"; export const ruleCheck = (rules: Array<rule>, text: string | number): ?rule => ( rules[rules.map((item: rule): boolean => text.toString().replace(RegExp(item.regex.toString()), '') === '') .indexOf(false) ] ); // toStringするのはどうなの… export default (rules: Array<rule>, text: string): Promise<rule> => new Promise(resolve => resolve(ruleCheck(rules, text)) ); <file_sep>/src/Redux/reducer/Field.js // @flow import { handleActions } from 'redux-actions'; import * as types from '../constant'; import type { field } from '../../types/field'; export type State = Array<field>; const initState: State = []; const reducer = handleActions({ [types.RULE_CHECK.UPDATE_FORM_TEXT]: (state: State, action: Object) => ( state.map((item, index) => (index === action.payload.index) ? {...item, text: action.payload.target} : item) ), [types.RULE_CHECK.RULE_CHECK_SUCCEEDED]: (state: State, action: Object) => ( state.map((item, index) => (index === action.payload.index) ? {...item, error: null} : item) ), [types.RULE_CHECK.RULE_CHECK_FAILED]: (state: State, action: Object) => ( state.map((item, index) => (index === action.payload.index) ? {...item, error: action.payload.error} : item) ), [types.APP.FIELD_CREATED]: (state: State, action: Object) => ( action.payload.fields ) }, initState); export default reducer; <file_sep>/src/Redux/saga/index.js // @flow import { takeEvery, takeLatest } from 'redux-saga/effects'; import * as types from '../constant'; import appInitialization from './appInitialization'; import exportConfigFile from './exportConfigFile'; import ruleCheck from './ruleCheck'; function* rootSaga(): any { yield takeEvery(types.RULE_CHECK.REQUEST_RULE_CHECK, ruleCheck); yield takeLatest(types.APP.REQUEST_APP_INITIALIZE, appInitialization); yield takeLatest(types.FILE.REQUEST_EXPORT_FILE, exportConfigFile); } export default rootSaga; <file_sep>/src/Component/Atom/Row/TextField.jsx // @flow import React from 'react'; import { onlyUpdateForKeys } from 'recompose'; import _TextField from '@material-ui/core/TextField' import type { spreadField } from '../../../types/field'; type Props = { field: spreadField, index: number, checkText: Function, }; const onChangeEvent = (props: Props) => (e: Object) => { props.checkText({ index: props.index, target: e.target.value, }); }; const TextField = (props: Props) => ( <_TextField fullWidth required={props.field.required} error={!!props.field.error} id={props.field.key} label={props.field.label} placeholder={props.field.placeholder} helperText={props.field.description} value={props.field.text} onChange={onChangeEvent(props)}/> ); export default onlyUpdateForKeys(['field'])(TextField); <file_sep>/src/Redux/saga/ruleCheck.js // @flow import { put, call, select } from 'redux-saga/effects'; import * as types from '../constant'; import rulechecker from '../api/ruleChecker'; export default function*(action: Object): any { try{ const {target, index} = action.payload; yield put({ type: types.RULE_CHECK.UPDATE_FORM_TEXT, payload: action.payload }); const rules = yield select((state: Object) => state.Schema[index].rules); const result = yield call(rulechecker, rules, target); if(!result){ yield put({ type: types.RULE_CHECK.RULE_CHECK_SUCCEEDED, payload: { index } }); }else{ yield put({ type: types.RULE_CHECK.RULE_CHECK_FAILED, payload: { index, error: result.errMessage, } }); } }catch(e){ throw e; } }; <file_sep>/src/Component/Atom/Header/AppName.jsx import React from 'react'; import {pure} from 'recompose'; import styled from 'styled-components'; import Typography from '@material-ui/core/Typography'; const Styled = { Text: styled(Typography)` padding-left: 46px; -webkit-app-region: drag; -webkit-user-select: none; cursor: default; `, }; type Props = { appName: string, }; const AppName = (props: Props) => ( <Styled.Text variant="title" color="inherit"> {props.appName} </Styled.Text> ); export default pure(AppName); <file_sep>/src/Component/Page.jsx // @flow import React from 'react'; import {pure} from 'recompose'; import styled from 'styled-components'; import WindowAppBar from './Organism/WindowAppBar'; import ItemList from './Organism/ItemList'; const Styled = { Root: styled.div` height: 100%; display: flex; flex-direction: column; `, Body: { Header: styled(WindowAppBar)` -webkit-app-region: drag; `, Main: styled(ItemList)` flex-grow: 1; `, }, }; type Props = { fields: Array<Object>, appName: string, checkText: Function, }; const Page = (props: Props) => ( <Styled.Root> <Styled.Body.Header appName={props.appName} /> <Styled.Body.Main fields={props.fields} checkText={props.checkText} /> </Styled.Root> ); export default pure(Page); <file_sep>/src/testdata/schemaSample.js export default { config: { path: './config.json', // load/saveする設定ファイルのパス(相対パスの場合、ConfigGUI実行ファイルの位置からの相対位置と解釈) default: './default.json', // 起動時に config.json が存在しない場合、このファイルからデフォルト値をロードする name: 'XYZアプリ設定画面' // タイトルバーなどに表示 }, schema: [ { key: 'username', // jsonオブジェクトにおけるキー名 label: 'ユーザ名', // GUI内での入力項目ラベル名 placeholder: 'ユーザ名をここに入力', // 入力フォーム(inputタグ)のplaceholder属性 description: '項目の説明文です', // 入力フォーム直下に小さいフォントで説明を表示 type: 'string', // 値の型(jsonにおける有効なデータ型) required: true, // falseの場合は空でも良い rules: [ // Validationルール(複数指定可能) { regex: '[a-zA-Z]*', error: '半角英字のみが使用できます' // regexにマッチしなかった際のエラー文 }, { regex: '.{5,10}', error: '5文字以上、10文字以下で指定してください' } ] }, { key: 'userid', label: 'ユーザID', placeholder: 'ユーザ名をここに入力', description: '入力フォームの下に小さい文字で表示される説明文です', type: 'number', required: false, rules: [ { regex: '[^-]*', error: '負値は指定できません' }, { regex: '[0-9]*', error: '半角数字のみが使用できます' } ] } ] } <file_sep>/src/Redux/reducer/index.js // @flow import {combineReducers} from 'redux'; import Main from './Main'; import Schema from './Schema'; import Field from './Field'; import Notification from './Notification'; export default combineReducers({ Main, Schema, Field, Notification, }); <file_sep>/src/Component/Atom/stories/index.js import React from 'react'; import { storiesOf } from '@storybook/react'; import { withKnobs, boolean } from '@storybook/addon-knobs'; import TextField from '../Row/TextField.jsx'; import MoreInfoIcons from '../Row/MoreInfoIcon.jsx'; storiesOf('Settings Import Field', module) .add('basic', () => <TextField schema={schemaSampleData.schema[0]}/>); storiesOf('Icon Buttons', module) .addDecorator(withKnobs) .add('basic', () => <MoreInfoIcons open={boolean('open', false)}/>);
40805db628719db369d6eef0a02771a294f32088
[ "JavaScript" ]
20
JavaScript
kurosuinc/SettingsGUI
c23ad7d6a7391694ba975fb749c9bba963f969a9
72971ef11f44c283cda0c1aeeaece230f2e9e40c
refs/heads/master
<repo_name>cloudier/cs2911<file_sep>/Lab06/test/graph/TestStringGraph.java package graph; import static org.junit.Assert.*; import org.junit.Test; public class TestStringGraph { @Test public void testAddNode() { Graph<String> g = new AdjacencyListGraph<String>(); // make nodes and check size assertEquals(0, g.size()); g.addNode("A"); g.addNode("B"); assertEquals(2, g.size()); g.addNode("C"); g.addNode("D"); assertEquals(4, g.size()); // check that nodes exist assertTrue(g.contains("A")); assertTrue(g.contains("B")); assertTrue(g.contains("C")); assertTrue(g.contains("D")); } @Test public void testAddRemoveConnection() { Graph<String> g = new AdjacencyListGraph<String>(); // make nodes g.addNode("A"); g.addNode("B"); g.addNode("C"); g.addNode("D"); // make connections g.addConnection("A", "B"); g.addConnection("A", "D"); g.addConnection("C", "D"); // check that connections exist assertTrue(g.isConnected("A", "B")); assertTrue(g.isConnected("A", "D")); assertTrue(g.isConnected("C", "D")); // remove connections g.removeConnection("A", "B"); g.removeConnection("A", "D"); g.removeConnection("C", "D"); // check that connections don't exist assertFalse(g.isConnected("A", "B")); assertFalse(g.isConnected("A", "D")); assertFalse(g.isConnected("C", "D")); } @Test public void testRemoveNode() { Graph<String> g = new AdjacencyListGraph<String>(); // make nodes g.addNode("A"); g.addNode("B"); g.addNode("C"); g.addNode("D"); // remove nodes and check size g.removeNode("A"); g.removeNode("B"); assertEquals(2, g.size()); g.removeNode("C"); g.removeNode("D"); assertEquals(0, g.size()); } } <file_sep>/Lab04/src/enrol/Student.java package enrol; import java.util.*; public class Student { private List<Session> timetable; private HashMap<String, PastCourse> pastCourses; // string is the course code public Student() { this.timetable = new ArrayList<Session>(); this.pastCourses = new HashMap<String, PastCourse>(); } public List<Session> getTimetable() { return timetable; } public void addTimetable(Session s) { timetable.add(s); } public HashMap<String, PastCourse> getPastCourses() { return new HashMap<String, PastCourse>(pastCourses); } public void addPastCourse(PastCourse p) { pastCourses.put(p.getCourseCode(), p); } public Grade getGrade(String courseCode) { return pastCourses.get(courseCode).getGrade(); } public boolean enrol(Course c, Session s) { if (!c.doesSessionExist(s)) return false; if (!c.checkPrerequisites(this)) return false; if (s.doesSessionOverlap(this)) return false; c.addStudent(this); this.addTimetable(s); return true; } } <file_sep>/Lab06/src/graph/AdjacencyListGraph.java package graph; import java.util.*; public class AdjacencyListGraph<E> implements Graph<E>{ private int size; private HashMap<E, Node<E>> nodes; public AdjacencyListGraph() { super(); this.size = 0; this.nodes = new HashMap<E, Node<E>>(); } @Override public int size() { return size; } @Override public void addNode(E a) { Node<E> n = new Node<E>(a); nodes.put(n.getNodeObj(), n); size++; } @Override public void removeNode(E a) { List<Node<E>> connections = nodes.get(a).getConnections(); for (Node<E> n: connections) { n.removeConnection(nodes.get(a)); } nodes.remove(nodes.get(a)); size--; } @Override public void addConnection(E from, E to) { nodes.get(from).addConnection(nodes.get(to)); nodes.get(to).addConnection(nodes.get(from)); } @Override public void removeConnection(E from, E to) { nodes.get(from).removeConnection(nodes.get(to)); nodes.get(to).removeConnection(nodes.get(from)); } @Override public boolean contains(E a) { return nodes.get(a) != null; } @Override public boolean isConnected(E a, E b) { return nodes.get(a).isConnected(b); } public Set<Node<E>> getNodes() { return new HashSet<Node<E>>(nodes.values()); } } class Node<E>{ private E nodeObj; private HashMap<E, Node<E>> connections; public Node(E nodeObj) { super(); this.nodeObj = nodeObj; this.connections = new HashMap<E, Node<E>>(); } public List<Node<E>> getConnections() { return new ArrayList<Node<E>>(connections.values()); } public boolean isConnected(E a){ return connections.containsKey(a); } public boolean isConnected(Node<E> n) { return connections.containsValue(n); } public void addConnection(Node<E> connectedNode) { connections.put(connectedNode.getNodeObj(), connectedNode); } public void removeConnection(Node<E> connectedNode) { connections.remove(connectedNode); } public E getNodeObj() { return nodeObj; } }<file_sep>/Lab07/src/Graph.java import java.util.*; public interface Graph<E> { // Graph has objects of arbitrary type E public void addNode(E a); public void removeNode(E a); public void addConnection(E from, E to, Integer weight); public void removeConnection(E from, E to); public boolean contains(E a); // handy because standard Java term public boolean isConnected(E a, E b); // a and b are graph Node public void getConnections(E a); // used for debugging public int size(); public List<E> greedy(E source, E destination, Heuristic<E> h); public List<E> aStar(E source, E destination, Heuristic<E> h); } <file_sep>/Ass1/src/HotelBookingSystem.java import java.io.BufferedReader; import java.io.FileReader; import java.io.Reader; public class HotelBookingSystem { /** * Reads in a list of commands and processes them according to the * assignment specification. Uses BufferedReader for the ReadLine method. * Also allows for faster performance when reading large files. * * @param args * [1] Name of input text file. */ public static void main(String[] args) { try { Reader file = new FileReader(args[0]); BufferedReader reader = new BufferedReader(file); HotelManager hotels = new HotelManager(); for (String line = reader.readLine(); line != null; line = reader .readLine()) { String[] inputs = line.split(" "); if (inputs[0].equals("Hotel")) { String hotelName = inputs[1]; int roomNumber = Integer.parseInt(inputs[2]); int capacity = Integer.parseInt(inputs[3]); hotels.makeNewRoom(hotelName, roomNumber, capacity); } else if (inputs[0].equals("Booking")) { int bookingID = Integer.parseInt(inputs[1]); String month = inputs[2]; int startDate = Integer.parseInt(inputs[3]); int duration = Integer.parseInt(inputs[4]); int roomsSingle = 0; int roomsDouble = 0; int roomsTriple = 0; for (int i = 5; i < inputs.length; i = i + 2) { String type = inputs[i]; int numRooms = Integer.parseInt(inputs[i + 1]); if (type.equals("single")) { roomsSingle = numRooms; } else if (type.equals("double")) { roomsDouble = numRooms; } else if (type.equals("triple")) { roomsTriple = numRooms; } } hotels.bookRooms(bookingID, month, startDate, duration, roomsSingle, roomsDouble, roomsTriple); } else if (inputs[0].equals("Change")) { int bookingID = Integer.parseInt(inputs[1]); String month = inputs[2]; int startDate = Integer.parseInt(inputs[3]); int duration = Integer.parseInt(inputs[4]); int roomsSingle = 0; int roomsDouble = 0; int roomsTriple = 0; for (int i = 5; i < inputs.length; i = i + 2) { String type = inputs[i]; int numRooms = Integer.parseInt(inputs[i + 1]); if (type.equals("single")) { roomsSingle = numRooms; } else if (type.equals("double")) { roomsDouble = numRooms; } else if (type.equals("triple")) { roomsTriple = numRooms; } } hotels.changeRooms(bookingID, month, startDate, duration, roomsSingle, roomsDouble, roomsTriple); } else if (inputs[0].equals("Cancel")) { int bookingID = Integer.parseInt(inputs[1]); hotels.cancelBooking(bookingID); } else if (inputs[0].equals("Print")) { String hotelName = inputs[1]; hotels.findHotelByName(hotelName).print(); } } reader.close(); } catch (Exception e) { System.err.format("Exception occured while trying to read '%s'.", args[0]); e.printStackTrace(); } } }<file_sep>/Lab05/src/sets/Set.java package sets; import java.util.*; interface Set<E> extends Iterable<E>{ // set membership operations public void add(E s); public void remove(E s); public boolean contains (E s); // accessors public int getSize(); public Class<E> getType(); public List<E> getItems(); // basic operations on sets public boolean subset(Set<E> ms); public Set<E> intersect(Set<E> ms); public Set<E> union(Set<E> ms); // other public boolean equals(Object o); public int hashCode(); public Iterator<E> iterator(); } <file_sep>/Ass1/src/HotelManager.java import java.util.ArrayList; public class HotelManager { private static final int SINGLE = 1; private static final int DOUBLE = 2; private static final int TRIPLE = 3; private ArrayList<Hotel> hotels; /** * Creates a new Hotels object. This Hotels object contains an empty * ArrayList of Hotel objects. */ public HotelManager() { super(); this.hotels = new ArrayList<Hotel>(); } /** * Makes a new Room in Hotel with name hotelName. * * @param hotelName * The name of the Hotel as a string. * @param roomNumber * The number of the Room as an int. * @param capacity * The capacity of the Room as an int. This capacity ranges from * 1 (single) to 3 (triple). */ public void makeNewRoom(String hotelName, int roomNumber, int capacity) { Hotel hotel = findHotelByName(hotelName); if (hotel != null) { hotel.makeNewRoom(roomNumber, capacity); } else { Hotel newHotel = new Hotel(hotelName); newHotel.makeNewRoom(roomNumber, capacity); hotels.add(newHotel); } } /** * Finds a Hotel with a given name. * * @param hotelName * Name of the hotel you want to find as a String. * @return the Hotel if found, and null otherwise. */ public Hotel findHotelByName(String hotelName) { for (Hotel h : hotels) { if (h.getName().equals(hotelName)) { return h; } } return null; } /** * Checks if any hotel has the required number of rooms available with * specified capacities on the given dates. If a hotel is found, that hotel * is returned. Otherwise returns null. * * @param month * The month that the booking occurs in as a 3-letter String. * @param startDate * The date that the booking begins on as an int. * @param duration * The duration or length of the booking in days as an int. * @param roomsSingle * The number of single rooms to book as an int. A single room is * a room with capacity 1. * @param roomsDouble * The number of double rooms to book as an int. A double room is * a room with capacity 2. * @param roomsTriple * The number of triple rooms to book as an int. A triple room is * a room with capacity 3. * @return a hotel that has the required number of rooms in specified * capacities on the given dates, if one is available. Otherwise, * returns null. */ public Hotel checkRoomsAvailable(String month, int startDate, int duration, int roomsSingle, int roomsDouble, int roomsTriple) { for (Hotel h : hotels) { if (h.isAvailable(SINGLE, month, startDate, duration, roomsSingle) && h.isAvailable(DOUBLE, month, startDate, duration, roomsDouble) && h.isAvailable(TRIPLE, month, startDate, duration, roomsTriple)) { return h; } } return null; } /** * Checks if any hotel has the required number of rooms available with * specified capacities on the given dates, ignoring any bookings with the * given booking ID. If a hotel is found, that hotel is returned. Otherwise * returns null. * * @param month * The month that the booking occurs in as a 3-letter String. * @param startDate * The date that the booking begins on as an int. * @param duration * The duration or length of the booking in days as an int. * @param roomsSingle * The number of single rooms to book as an int. A single room is * a room with capacity 1. * @param roomsDouble * The number of double rooms to book as an int. A double room is * a room with capacity 2. * @param roomsTriple * The number of triple rooms to book as an int. A triple room is * a room with capacity 3. * @param bookingID * The booking ID to ignore when checking availabilities as an * int. * @return a hotel that has the required number of rooms in specified * capacities on the given dates, if one is available. Otherwise, * returns null. */ public Hotel checkChangeAvailable(String month, int startDate, int duration, int roomsSingle, int roomsDouble, int roomsTriple, int bookingID) { for (Hotel h : hotels) { if (h.isChangeAvailable(SINGLE, month, startDate, duration, roomsSingle, bookingID) && h.isChangeAvailable(DOUBLE, month, startDate, duration, roomsDouble, bookingID) && h.isChangeAvailable(TRIPLE, month, startDate, duration, roomsTriple, bookingID)) { return h; } } return null; } /** * Cancels any bookings in any hotel with the specified booking ID. * * @param bookingID * The ID of the booking that will be cancelled as an int. */ public void cancelBooking(int bookingID) { for (Hotel h : hotels) { h.cancelBooking(bookingID); } System.out.printf("Cancel %d\n", bookingID); } /** * Checks to see if the given number of rooms with specified capacities are * available within any one hotel. If they are available, the rooms are * booked. Otherwise, the function prints out 'Booking rejected'. * * @param bookingID * The ID of the booking as an int. * @param month * The month of the booking as a 3-letter String. * @param startDate * The date the booking begins as an int. * @param duration * The length of the booking in days as an int. * @param roomsSingle * The number of rooms with capacity 1 (single) to book. * @param roomsDouble * The number of rooms with capacity 2 (double) to book. * @param roomsTriple * The number of rooms with capacity 3 (triple) to book. */ public void bookRooms(int bookingID, String month, int startDate, int duration, int roomsSingle, int roomsDouble, int roomsTriple) { Hotel hotelToBook = checkRoomsAvailable(month, startDate, duration, roomsSingle, roomsDouble, roomsTriple); if (hotelToBook != null) { hotelToBook.bookRooms(bookingID, SINGLE, month, startDate, duration, roomsSingle); hotelToBook.bookRooms(bookingID, DOUBLE, month, startDate, duration, roomsDouble); hotelToBook.bookRooms(bookingID, TRIPLE, month, startDate, duration, roomsTriple); System.out.print("Booking"); hotelToBook.printBookedRooms(bookingID, month, startDate, hotelToBook.getName()); } else { System.out.print("Booking rejected"); } System.out.print("\n"); } /** * Checks to see if the given number of rooms with specified capacities are * available within any one hotel. This function treats bookings with the * given booking ID as un-booked. If they are available, the previous * bookings are cancelled and the new rooms are booked. Otherwise, the * function prints out 'Booking rejected'. * * @param bookingID * The ID of the booking to change as an int. * @param month * The month of the booking as a 3-letter String. * @param startDate * The date the booking begins as an int. * @param duration * The length of the booking in days as an int. * @param roomsSingle * The number of rooms with capacity 1 (single) to book. * @param roomsDouble * The number of rooms with capacity 2 (double) to book. * @param roomsTriple * The number of rooms with capacity 3 (triple) to book. */ public void changeRooms(int bookingID, String month, int startDate, int duration, int roomsSingle, int roomsDouble, int roomsTriple) { Hotel hotelToBook = checkChangeAvailable(month, startDate, duration, roomsSingle, roomsDouble, roomsTriple, bookingID); if (hotelToBook != null) { for (Hotel h : hotels) { h.cancelBooking(bookingID); } hotelToBook.bookRooms(bookingID, SINGLE, month, startDate, duration, roomsSingle); hotelToBook.bookRooms(bookingID, DOUBLE, month, startDate, duration, roomsDouble); hotelToBook.bookRooms(bookingID, TRIPLE, month, startDate, duration, roomsTriple); System.out.print("Change"); hotelToBook.printBookedRooms(bookingID, month, startDate, hotelToBook.getName()); } else { System.out.print("Change rejected"); } System.out.print("\n"); } } <file_sep>/Ass2/src/Graph.java import java.util.*; /** * This graph provides a representation of the airports, their delays and the * length of the flights between airports. Airports are represented using nodes * while flights are represented using edges. * * @author <NAME> z3459448 * */ public class Graph { private HashMap<String, Node> nodes; private HashSet<Edge> edges; /** * Creates a new empty graph. */ public Graph() { this.nodes = new HashMap<String, Node>(); this.edges = new HashSet<Edge>(); } /** * Adds a node to the graph. The node represents an airport. The name * parameter represents the name of the airport, for example "Sydney". The * delay parameter is the delay experienced in minutes at the airport before * a plane can leave for another airport. * * @param name * The name of the node as a string. * @param delay * The delay experienced at the node as an int. */ public void addNode(String name, int delay) { Node n = new Node(name, delay); nodes.put(name, n); } /** * Returns the node (or airport) with the given name. * * @param name * The name of the node as a string. * @return The corresponding node. */ public Node getNode(String name) { return nodes.get(name); } /** * Returns all the nodes from this graph as a HashMap. * * @return The nodes from this graph as a HashSet. */ public HashMap<String, Node> getNodes() { return new HashMap<String, Node>(nodes); } /** * Adds an edge to the graph. The edge represents a flight between two given * airports. The weight of the edge is the length of the flight in minutes. * * @param from * The name of the node that the edge starts from as a string. * @param to * The name of the node that the edge ends at as a string. * @param weight * The weight of the edge as an int. */ public void addEdge(String from, String to, Integer weight) { Node fromNode = nodes.get(from); Node toNode = nodes.get(to); fromNode.addConnection(toNode, weight); toNode.addConnection(fromNode, weight); Edge e = new Edge(fromNode, toNode, weight); edges.add(e); e = new Edge(toNode, fromNode, weight); edges.add(e); } /** * Returns the edge between the two nodes which are identified using their * names, or the flight between two airports. * * @param from * The name of the node at which the edge begins as a string. * @param to * The name of the node at which the edge ends as a string. * @return The edge between the nodes corresponding to the given names. */ public Edge getEdge(String from, String to) { for (Edge e : edges) { if (e.from().getName().equals(from) && e.to().getName().equals(to)) { return e; } } return null; } /** * Returns the edge between two given nodes, or the flight between two * airports. * * @param from * The node at which the edge begins. * @param to * The node at which the edge ends. * @return The edge between the given nodes. */ public Edge getEdge(Node from, Node to) { for (Edge e : edges) { if (e.from().equals(from) && e.to().equals(to)) { return e; } } return null; } /** * Returns all the edges from this graph as a HashSet. * * @return All the edges from this graph as a HashSet. */ public HashSet<Edge> getEdges() { return new HashSet<Edge>(edges); } /** * Returns all the edges from a given node as a HashSet. * * @param n * The node to find edges from. * @return All the edges from the given node as a HashSet. */ public HashSet<Edge> getEdgesFrom(Node n) { HashSet<Edge> edgesFrom = new HashSet<Edge>(); for (Edge e : edges) { if (e.from().equals(n)) edgesFrom.add(e); } return edgesFrom; } }<file_sep>/Lab04/test/enrol/TestGrade.java package enrol; import static org.junit.Assert.*; import org.junit.Test; public class TestGrade { @Test public void testEqual() { Grade highd = new Grade("HD"); Grade highd2 = new Grade("HD"); Grade dist = new Grade("DN"); Grade dist2 = new Grade("DN"); Grade cred = new Grade("CR"); Grade cred2 = new Grade("CR"); Grade pass = new Grade("PS"); Grade pass2 = new Grade("PS"); Grade fail = new Grade("FL"); Grade fail2 = new Grade("FL"); assertEquals(0, highd.compareTo(highd2)); assertEquals(0, dist.compareTo(dist2)); assertEquals(0, cred.compareTo(cred2)); assertEquals(0, pass.compareTo(pass2)); assertEquals(0, fail.compareTo(fail2)); } @Test public void testGreaterThan() { Grade highd = new Grade("HD"); Grade dist = new Grade("DN"); Grade cred = new Grade("CR"); Grade pass = new Grade("PS"); Grade fail = new Grade("FL"); assertEquals(1, highd.compareTo(dist)); assertEquals(1, highd.compareTo(cred)); assertEquals(1, highd.compareTo(pass)); assertEquals(1, highd.compareTo(fail)); assertEquals(1, dist.compareTo(cred)); assertEquals(1, dist.compareTo(pass)); assertEquals(1, dist.compareTo(fail)); assertEquals(1, cred.compareTo(pass)); assertEquals(1, cred.compareTo(fail)); assertEquals(1, pass.compareTo(fail)); } @Test public void testLessThan() { Grade highd = new Grade("HD"); Grade dist = new Grade("DN"); Grade cred = new Grade("CR"); Grade pass = new Grade("PS"); Grade fail = new Grade("FL"); assertEquals(-1, dist.compareTo(highd)); assertEquals(-1, cred.compareTo(highd)); assertEquals(-1, pass.compareTo(highd)); assertEquals(-1, fail.compareTo(highd)); assertEquals(-1, cred.compareTo(dist)); assertEquals(-1, pass.compareTo(dist)); assertEquals(-1, fail.compareTo(dist)); assertEquals(-1, pass.compareTo(cred)); assertEquals(-1, fail.compareTo(cred)); assertEquals(-1, fail.compareTo(pass)); } } <file_sep>/README.md # cs2911 *Engineering Design in Computing – Semester 1, 2015* [Course Website](http://www.cse.unsw.edu.au/~cs2911/). ### Assignments + **Assignment 1: Hotel Booking System.** Stores information about hotel bookings provided in a particular format. + **Assignment 2: Flight Scheduler.** Implements A* to solve a travelling-salesman-style problem with a consistent heuristic. ### Labs + **Lab 2: Classes and Inheritance.** + **Lab 3: Preconditions and Postconditions.** Includes JUnit tests. + **Lab 4: Object-Oriented Design.** Enrolment system. Includes JUnit tests. + **Lab 5: Generic Types and Polymorphism.** Generic set implemented using ArrayLists. Includes JUnit tests. + **Lab 6: Basic Search Algorithms.** Depth-first search and breadth-first search on the Romania map using a generic graph implementation. Includes JUnit tests. + **Lab 7: Problem Solving Algorithms.** A* search on the Romania map using straight-line distance heuristic. + **Lab 10: Composite and Decorator Design Patterns.** Includes JUnit tests. + **Lab 11: Concurrency.** Thread-safe wrap-around queue. ### Notes Includes questions from and solutions to the labs. <file_sep>/Lab04/src/enrol/Grade.java package enrol; public class Grade implements Comparable<Grade>{ private String grade; public Grade(String grade) { this.grade = grade; } public String getGrade() { return grade; } @Override /** * Order: HD > DN > CR > PS > FL */ public int compareTo(Grade g) { int thisGrade = 0; int otherGrade = 0; if (this.grade.equals("HD")) thisGrade = 5; if (this.grade.equals("DN")) thisGrade = 4; if (this.grade.equals("CR")) thisGrade = 3; if (this.grade.equals("PS")) thisGrade = 2; if (this.grade.equals("FL")) thisGrade = 1; if (g.getGrade().equals("HD")) otherGrade = 5; if (g.getGrade().equals("DN")) otherGrade = 4; if (g.getGrade().equals("CR")) otherGrade = 3; if (g.getGrade().equals("PS")) otherGrade = 2; if (g.getGrade().equals("FL")) otherGrade = 1; if (thisGrade == otherGrade) return 0; if (thisGrade > otherGrade) return 1; if (thisGrade < otherGrade) return -1; else { System.out.println("Invalid Grade " + this.grade + " compared to " + g.getGrade()); return 0; } } } <file_sep>/Ass2/src/BasicHeuristic.java import java.util.*; /** This heuristic is admissible because we can be certain that the weights of an * edge will definitely be in the total cost (of weights and delays) when the * search has completed, i.e. it will never overestimate the cost from the * current state to the goal state. * * In addition, this heuristic is consistent. In consistent heuristics, the * estimated distance from the current node to the goal is less than or equal to * the known distance to any neighbour node plus the estimated distance from * that neighbour to the goal. In other words, * <tt>EstimatedDistance(node, goal) <= * Distance(neighbour) + EstimatedDistance(neighbour, goal)</tt>. The distance * between a given node and its neighbour equals the weight of the edge (length * of the flight) plus the delay at the current node. However, I do not include * delay in my heuristic estimate. As a result, our estimated * distance will always be less than the distance between the current node and its * neighbour plus the estimated distance from the neighbour to the goal unless * we are at the last node. * * The running time of this heuristic is O(E) where E is the number of remaining * edges for a given state. * * @author <NAME> z3459448 * */ public class BasicHeuristic implements Heuristic { @Override /** * Estimates the distance left by summing the weights of the remaining edges. */ public int distanceLeft(Node n, HashSet<Edge> edgesLeft, Graph g) { int totalWeight = 0; for (Edge e : edgesLeft) { totalWeight += e.getWeight(); } return totalWeight; } }<file_sep>/Lab03/test/banking/BankAccount_general.java package banking; import static org.junit.Assert.*; import org.junit.*; import banking.BankAccount; import java.util.Calendar; public class BankAccount_general { @Test public void testGetLastWithdrawalDate() { BankAccount testBankAccount = new BankAccount(500); try { testBankAccount.withdraw(300); } catch (Exception e) { fail("Exception thrown"); } Calendar now = Calendar.getInstance(); if (now.get(Calendar.HOUR_OF_DAY) != 23 && now.get(Calendar.MINUTE) != 59) { assertEquals(now.get(Calendar.DATE), testBankAccount.getLastWithdrawalDate().get(Calendar.DATE)); } else { fail("Do not run testGetLastWithdrawalDate between days."); } try { testBankAccount.withdraw(200); } catch (Exception e) { fail("Exception thrown"); } Calendar now2 = Calendar.getInstance(); if (now.get(Calendar.HOUR_OF_DAY) != 23 && now.get(Calendar.MINUTE) != 59) { assertEquals(now2.get(Calendar.DATE), testBankAccount.getLastWithdrawalDate().get(Calendar.DATE)); } else { fail("Do not run testGetLastWithdrawalDate between days."); } } @Test public void testGetBalance() { BankAccount testBankAccount = new BankAccount(500); assertEquals(500, testBankAccount.getBalance()); try { testBankAccount.withdraw(300); } catch (Exception e) { fail("Exception thrown"); } assertEquals(200, testBankAccount.getBalance()); try { testBankAccount.withdraw(200); } catch (Exception e) { fail("Exception thrown"); } assertEquals(0, testBankAccount.getBalance()); } @Test public void testGetDailyWithdrawals() { BankAccount testBankAccount = new BankAccount(500); try { testBankAccount.withdraw(300); } catch (Exception e) { fail("Exception thrown"); } assertEquals(300, testBankAccount.getBalance()); } } <file_sep>/Lab04/test/enrol/TestCourse.java package enrol; import static org.junit.Assert.*; import org.junit.Test; public class TestCourse { @Test public void testCheckPrerequisites() { Session s1 = new Session(1, 2015, 3, "<NAME>", 1400, 1600); Course comp1917 = new Course("COMP1917"); Course comp1927 = new Course("COMP1927"); comp1927.addPrerequisite(comp1917, new Grade("PS")); Course comp2911 = new Course("COMP2911"); comp2911.addPrerequisite(comp1927, new Grade("PS")); comp2911.addSession(s1); Course comp2121 = new Course("COMP2121"); comp2121.addPrerequisite(comp1927, new Grade("PS")); Student claudia = new Student(); claudia.addPastCourse(new PastCourse("COMP1917", new Grade("DN"), 2013, 1)); claudia.addPastCourse(new PastCourse("COMP1927", new Grade("DN"), 2013, 2)); assertTrue(comp1917.checkPrerequisites(claudia)); assertTrue(comp1927.checkPrerequisites(claudia)); assertTrue(comp2911.checkPrerequisites(claudia)); assertTrue(comp2121.checkPrerequisites(claudia)); Course comp3891 = new Course("COMP3891"); comp3891.addPrerequisite(comp1927, new Grade("CR")); comp3891.addPrerequisite(comp2121, new Grade("CR")); assertFalse(comp3891.checkPrerequisites(claudia)); } } <file_sep>/Lab03/test/banking/BankAccount_deposit.java package banking; import static org.junit.Assert.*; import org.junit.*; import banking.BankAccount; public class BankAccount_deposit { @Test public void deposit_positivevalue_balanceincrease() { BankAccount testBankAccount = new BankAccount(0); try { testBankAccount.deposit(100); } catch (Exception e) { fail("Exception thrown"); } assertEquals(100, testBankAccount.getBalance()); } @Test public void deposit_negativevalue_balanceincrease() { BankAccount testBankAccount = new BankAccount(0); try { testBankAccount.deposit(-100); } catch (Exception e) { assertEquals(0, testBankAccount.getBalance()); } } }<file_sep>/Ass2/src/SkipExpansion.java import java.util.*; /** * Implements an expansion strategy that chooses a required path (or flight) * that is connected to the current node if it exists. Otherwise, it creates a * state to the beginning node of a required path and creates a subsequent state * to the end node of the required path. This second state is added to the list * of states that are returned. * * @author <NAME> z3459448 * */ public class SkipExpansion implements Expansion { @Override public List<State> nextStates(State s, Heuristic h, Graph gr) { List<State> states = new ArrayList<State>(); State n = null; State m = null; int g, estimate; // Indicates if there are required edges connected to this node. boolean areEdgesConnected = false; // Add required paths connected to the current node if they exist. for (Edge e : s.getEdgesLeft()) { if (e.from().equals(s.getNode())) { areEdgesConnected = true; HashSet<Edge> newEdgesLeft = new HashSet<Edge>(s.getEdgesLeft()); newEdgesLeft.remove(e); estimate = h.distanceLeft(e.to(), newEdgesLeft, gr); // Ignore delay if this is the last node. if (newEdgesLeft.isEmpty()) { g = s.g() + e.getWeight(); } else { g = s.g() + e.getWeight() + e.to().getDelay(); } n = new State(newEdgesLeft, s, g, estimate, e.to()); states.add(n); } } // If no required paths are connected to the current node, // add the first node of all the required paths. if (!areEdgesConnected) { for (Edge e : s.getEdgesLeft()) { HashSet<Edge> newEdgesLeft = new HashSet<Edge>(s.getEdgesLeft()); // Create state to the first node of the required path e estimate = h.distanceLeft(e.from(), newEdgesLeft, gr); g = s.g() + s.getNode().distanceTo(e.from()) + e.from().getDelay(); n = new State(newEdgesLeft, s, g, estimate, e.from()); // Create second state to the final node of the required path e newEdgesLeft.remove(e); estimate = h.distanceLeft(e.from(), newEdgesLeft, gr); // Ignore delay if this is the last node if (newEdgesLeft.isEmpty()) { g = n.g() + e.getWeight(); } else { g = n.g() + e.getWeight() + e.to().getDelay(); } m = new State(newEdgesLeft, n, g, estimate, e.to()); states.add(m); } } return states; } } <file_sep>/Lab03/test/banking/ChequeAccount_general.java package banking; import static org.junit.Assert.*; import java.util.Calendar; import org.junit.Test; import banking.ChequeAccount; public class ChequeAccount_general { @Test public void testGetLastChequeDate() { ChequeAccount testChequeAccount = new ChequeAccount(30, 0); Calendar now = Calendar.getInstance(); try { testChequeAccount.chequeWithdraw(30); } catch (Exception e) { fail("Exception thrown"); } if (now.get(Calendar.HOUR_OF_DAY) != 23 && now.get(Calendar.MINUTE) != 59) { assertEquals(now.get(Calendar.DATE), testChequeAccount.getLastChequeDate().get(Calendar.DATE)); } else { fail("Do not run testGetLastChequeDate between days."); } try { testChequeAccount.withdraw(200); } catch (Exception e) { fail("Exception thrown"); } Calendar now2 = Calendar.getInstance(); if (now.get(Calendar.HOUR_OF_DAY) != 23 && now.get(Calendar.MINUTE) != 59) { assertEquals(now2.get(Calendar.DATE), testChequeAccount.getLastChequeDate().get(Calendar.DATE)); } else { fail("Do not run testGetLastChequeDate between days."); } } @Test public void testGetMonthlyCheques() { ChequeAccount testChequeAccount = new ChequeAccount(100, 0); try { testChequeAccount.chequeWithdraw(30); } catch (Exception e) { fail("Exception thrown"); } assertEquals(1, testChequeAccount.getMonthlyCheques()); try { testChequeAccount.chequeWithdraw(30); } catch (Exception e) { fail("Exception thrown"); } assertEquals(2, testChequeAccount.getMonthlyCheques()); try { testChequeAccount.chequeWithdraw(30); } catch (Exception e) { fail("Exception thrown"); } assertEquals(3, testChequeAccount.getMonthlyCheques()); try { testChequeAccount.chequeWithdraw(30); } catch (Exception e) { // Success! } assertEquals(3, testChequeAccount.getMonthlyCheques()); } }<file_sep>/Lab07/src/ZeroHeuristic.java public class ZeroHeuristic<E> implements Heuristic<E>{ @Override public int distanceLeft(Node<E> source, Node<E> dest) { return 0; } } <file_sep>/Lab06/src/graph/StringBFS.java package graph; import java.text.Collator; import java.util.*; public class StringBFS implements StringSearch { public List<String> search(String source, String destination, Graph<String> g) { // find source node Node<String> sourceNode = null; for (Node<String> n: g.getNodes()) { if (n.getNodeObj().equals(source)) { sourceNode = n; } } // initialise queue and add source node onto the queue // initialise visited hash set and parents hash map ArrayDeque<Node<String>> q = new ArrayDeque<Node<String>>(); HashSet<Node<String>> nodesVisited = new HashSet<Node<String>>(); HashMap<Node<String>, Node<String>> parents = new HashMap<Node<String>, Node<String>>(); // map child to parent if (sourceNode != null) { q.add(sourceNode); } // loop as long as queue is not empty Node<String> curr = null; while (!q.isEmpty()) { curr = q.remove(); // break if destination reached if (curr.getNodeObj().equals(destination)) { break; } // add to visited hash set nodesVisited.add(curr); // find all the children List<Node<String>> connections = curr.getConnections(); final Collator col = Collator.getInstance(); Comparator<Node<String>> com = new Comparator<Node<String>>() { @Override public int compare(Node<String> n, Node<String> m) { if (col.compare(n.getNodeObj(), m.getNodeObj()) == -1) return -1; if (n.getNodeObj().equals(m.getNodeObj())) return 0; if (col.compare(n.getNodeObj(), m.getNodeObj()) == 1) return 1; System.out.println("Node<String> objects cannot be compared"); return 0; } }; Collections.sort(connections, com); // add children to queue if they're unvisited for (Node<String> n: connections) { if (!nodesVisited.contains(n)) { q.add(n); parents.put(n, curr); } } } List<String> route = new ArrayList<String>(); // backtrack through parents hash map to find route Node<String> backtrack = curr; // curr should be the destination here while (!backtrack.getNodeObj().equals(source)) { route.add(backtrack.getNodeObj()); backtrack = parents.get(backtrack); } route.add(source); Collections.reverse(route); return route; } } <file_sep>/Ass2/src/Expansion.java import java.util.*; /** * Interface used to encapsulate different methods of expanding nodes. * @author <NAME> z3459448 * */ public interface Expansion { /** * Returns the possible subsequent states from the given state. The concrete * strategy determines which states are added and which states are ignored. * * @param s * The state to calculate possible subsequent states from. * @return A list of possible subsequent states. */ public List<State> nextStates(State s, Heuristic h, Graph gr); } <file_sep>/Lab04/test/enrol/TestSession.java package enrol; import static org.junit.Assert.*; import org.junit.Test; public class TestSession { @Test public void testOverlapsWith() { Session s1 = new Session(1, 2015, 3, "<NAME>", 1400, 1600); Session s2 = new Session(1, 2015, 3, "<NAME>", 1400, 1500); assertTrue(s1.overlapsWith(s2)); Session s3 = new Session(2, 2014, 3, "Physics Theatre", 1200, 1400); assertFalse(s1.overlapsWith(s3)); assertFalse(s2.overlapsWith(s3)); Session s4 = new Session(2, 2015, 3, "Physics Theatre", 1200, 1400); assertFalse(s1.overlapsWith(s4)); assertFalse(s2.overlapsWith(s4)); Session s5 = new Session(1, 2015, 2, "Physics Theatre", 1200, 1400); assertFalse(s1.overlapsWith(s5)); assertFalse(s2.overlapsWith(s5)); Session s6 = new Session(1, 2015, 3, "Physics Theatre", 1200, 1400); assertFalse(s1.overlapsWith(s6)); assertFalse(s2.overlapsWith(s6)); Session s7 = new Session(1, 2015, 3, "Physics Theatre", 1500, 1800); assertTrue(s1.overlapsWith(s7)); assertFalse(s2.overlapsWith(s7)); Session s8 = new Session(1, 2015, 3, "Physics Theatre", 1400, 1600); assertTrue(s1.overlapsWith(s8)); assertTrue(s2.overlapsWith(s8)); } } <file_sep>/Ass2/src/Heuristic.java import java.util.*; /** * Interface used to encapsulate different methods of estimating the distance * between a given state and the goal state. * * @author <NAME> z3459448 * */ public interface Heuristic { /** * Returns an estimate of the cost left between the current state and the * goal state. * * @param edgesLeft * A HashSet of edges left to traverse. * @return An integer representing the estimated cost left between the * current state and the goal state. */ public int distanceLeft(Node n, HashSet<Edge> edgesLeft, Graph g); } <file_sep>/Lab03/test/banking/BankAccount_withdrawal.java package banking; import static org.junit.Assert.*; import org.junit.*; import banking.BankAccount; public class BankAccount_withdrawal { @Test public void withdraw_normal_balancedecrease() { BankAccount testBankAccount = new BankAccount(100); try { testBankAccount.withdraw(100); } catch (Exception e) { fail(); } assertEquals(0, testBankAccount.getBalance()); } @Test public void withdraw_nofunds_exception() { BankAccount testBankAccount = new BankAccount(0); try { testBankAccount.withdraw(100); } catch (Exception e) { // Success! } assertEquals(0, testBankAccount.getBalance()); } @Test public void withdraw_insufficientfunds_exception() { BankAccount testBankAccount = new BankAccount(50); try { testBankAccount.withdraw(100); } catch (Exception e) { // Success! } assertEquals(50, testBankAccount.getBalance()); } @Test public void withdraw_negativeval_exception() { BankAccount testBankAccount = new BankAccount(600); try { testBankAccount.withdraw(-100); } catch (Exception e) { // Success! } assertEquals(600, testBankAccount.getBalance()); } @Test public void withdraw_exceeddailylimit_exception() { BankAccount testBankAccount = new BankAccount(1000); try { testBankAccount.withdraw(900); } catch (Exception e) { // Success! } assertEquals(1000, testBankAccount.getBalance()); } } <file_sep>/Ass2/src/Edge.java /** * Edges represent the flights between airports. The from node is the airport * that the flight departs from and the to node is the airport that the flight * arrives at. The weight of the edge is the length of the flight in minutes (as * provided by the input). * * @author <NAME> z3459448 * */ public class Edge { private Node from; private Node to; private int weight; /** * Makes a new directed edge. * * @param from * The node (airport) that the edge (flight) starts from. * @param to * The node that the edge ends at. * @param weight * The weight of the edge, or length of the flight in minutes. */ public Edge(Node from, Node to, int weight) { this.from = from; this.to = to; this.weight = weight; } /** * Returns the node that the edge starts from, or the airport that the * flight departs from. * * @return The node that the edge starts from. */ public Node from() { return from; } /** * Returns the node that the edge ends at, or the airport that the flight * arrives at. * * @return */ public Node to() { return to; } /** * Returns the weight of the edge, or the flight time in minutes. * * @return Returns the weight of the edge. */ public int getWeight() { return weight; } /** * Returns true if two edges have the start and end nodes, and false * otherwise. */ @Override public boolean equals(Object o) { if (o == null) return false; if (!(o instanceof Edge)) return false; Edge e = (Edge) o; if (this.from.equals(e.from()) && this.to.equals(e.to())) return true; return false; } /** * Returns a hash code value for the edge. Based on start and end node of * the edge. * * @return The hash code value for the edge. */ @Override public int hashCode() { int hash = 5; hash += this.from.hashCode() * 37; hash += this.to.hashCode() * 19; return hash; } } <file_sep>/Lab05/test/sets/TestSet.java package sets; import static org.junit.Assert.*; import java.util.ArrayList; import java.util.List; import org.junit.Test; import java.io.*; public class TestSet { @Test public void testContains() { Set<Integer> ts1 = new MySet<Integer>(Integer.class); ts1.add(1); ts1.add(2); assertTrue(ts1.contains(1)); assertTrue(ts1.contains(2)); } @Test public void testSize() { Set<Integer> ts1 = new MySet<Integer>(Integer.class); assertEquals(0, ts1.getSize()); ts1.add(1); ts1.add(2); assertEquals(2, ts1.getSize()); } @Test public void testRemove() { Set<Integer> ts1 = new MySet<Integer>(Integer.class); ts1.add(1); ts1.add(2); ts1.remove(1); assertEquals(1, ts1.getSize()); ts1.remove(2); assertEquals(0, ts1.getSize()); } @Test public void testIntersect() { Set<Integer> ts1 = new MySet<Integer>(Integer.class); Set<Integer> ts2 = new MySet<Integer>(Integer.class); Set<Integer> ts3 = new MySet<Integer>(Integer.class); ts1.add(1); ts1.add(2); ts2.add(2); ts2.add(0); ts2.add(3); ts3 = ts1.intersect(ts2); assertEquals(1, ts3.getSize()); assertTrue(ts3.contains(2)); assertFalse(ts3.contains(1)); assertFalse(ts3.contains(0)); assertFalse(ts3.contains(3)); } @Test public void testSubset() { Set<Integer> ts1 = new MySet<Integer>(Integer.class); Set<Integer> ts2 = new MySet<Integer>(Integer.class); Set<Integer> ts3 = new MySet<Integer>(Integer.class); ts1.add(1); ts1.add(2); ts2.add(2); ts2.add(0); ts2.add(3); ts3 = ts1.intersect(ts2); assertTrue(ts1.subset(ts3)); assertTrue(ts2.subset(ts3)); assertFalse(ts3.subset(ts1)); assertFalse(ts3.subset(ts2)); ts3 = ts1.union(ts2); assertTrue(ts3.subset(ts1)); assertTrue(ts3.subset(ts2)); ts1.remove(1); assertTrue(ts3.subset(ts1)); } @Test public void testUnion() { Set<Integer> ts1 = new MySet<Integer>(Integer.class); Set<Integer> ts2 = new MySet<Integer>(Integer.class); Set<Integer> ts3 = new MySet<Integer>(Integer.class); ts1.add(1); ts1.add(2); ts2.add(2); ts2.add(0); ts2.add(3); ts3 = ts1.intersect(ts2); ts3 = ts1.union(ts2); assertEquals(4, ts3.getSize()); assertTrue(ts3.contains(2)); assertTrue(ts3.contains(1)); assertTrue(ts3.contains(0)); assertTrue(ts3.contains(3)); } @Test public void testGetItems() { Set<Integer> ts1 = new MySet<Integer>(Integer.class); Set<Integer> ts2 = new MySet<Integer>(Integer.class); Set<Integer> ts3 = new MySet<Integer>(Integer.class); ts1.add(1); ts1.add(2); ts2.add(2); ts2.add(0); ts2.add(3); ts3 = ts1.intersect(ts2); ts3 = ts1.union(ts2); ts1.remove(1); // test get items // check size is equal // check elements are the same List<Integer> it1 = ts1.getItems(); assertEquals(it1.size(), ts1.getSize()); for (Integer i: it1) { assertTrue(ts1.contains(i)); } List<Integer> it2 = ts2.getItems(); assertEquals(it2.size(), ts2.getSize()); for (Integer i: it2) { assertTrue(ts2.contains(i)); } List<Integer> it3 = ts3.getItems(); assertEquals(it3.size(), ts3.getSize()); for (Integer i: it3) { assertTrue(ts3.contains(i)); } } @Test public void testEquals() { Set<Integer> ts1 = new MySet<Integer>(Integer.class); Set<Integer> ts2 = new MySet<Integer>(Integer.class); ts1.add(1); assertFalse(ts1.equals(ts2)); assertFalse(ts2.equals(ts1)); ts2.add(1); assertTrue(ts1.equals(ts2)); assertTrue(ts2.equals(ts1)); ts1.add(2); assertFalse(ts1.equals(ts2)); assertFalse(ts2.equals(ts1)); ts2.add(2); assertTrue(ts1.equals(ts2)); assertTrue(ts2.equals(ts1)); ts1.add(3); assertFalse(ts1.equals(ts2)); assertFalse(ts2.equals(ts1)); ts2.add(3); assertTrue(ts1.equals(ts2)); assertTrue(ts2.equals(ts1)); ts1.add(5); assertFalse(ts1.equals(ts2)); assertFalse(ts2.equals(ts1)); ts1.add(5); assertFalse(ts1.equals(ts2)); assertFalse(ts2.equals(ts1)); ts2.add(5); assertTrue(ts1.equals(ts2)); assertTrue(ts2.equals(ts1)); ts2.add(6); assertFalse(ts1.equals(ts2)); assertFalse(ts2.equals(ts1)); } @Test public void testScanner() { ArrayList<Set<Character>> sets = new ArrayList<Set<Character>>(); try { BufferedReader br = new BufferedReader(new FileReader("testScanner.txt")); for (String line = br.readLine(); line != null; line = br.readLine()) { Set<Character> set = new MySet<Character>(Character.class); for (int i = 0; i < line.length(); i++) { set.add(line.charAt(i)); } sets.add(set); } br.close(); } catch (IOException e) { e.printStackTrace(); } } } <file_sep>/Lab10/src/assembler/Assembly.java package assembler; import java.util.*; public class Assembly implements Part { private int price; private ArrayList<Part> children; public Assembly (int price) { this.price = price; this.children = new ArrayList<Part>(); } public int getPrice() { int sumPrices = this.price; for (Part p: children) { sumPrices += p.getPrice(); } return sumPrices; } @Override public void addPart(Part p) { children.add(p); } @Override public void removePart(Part p) { children.remove(p); } @Override public Part getChild(int index) { return children.get(index); } } <file_sep>/Lab02/src/Tester.java import java.util.Calendar; public class Tester { public static void main(String args[]) { // Set up test Employees (first, second) and Manager (third). Employee first = new Employee("Bob", 1500); Employee second = new Employee("Bob", 1500); Calendar hireDate = Calendar.getInstance(); hireDate.set(Calendar.YEAR, 2000); hireDate.set(Calendar.MONTH, 5); hireDate.set(Calendar.DATE, 22); Manager third = new Manager("Bob", 1500, hireDate); System.out.println("======== Test Employee getters and toString"); System.out.println("Expected: class Employee, Result: " + first.getClass()); System.out.println("Expected: 1500, Result: " + first.getSalary()); System.out.println("Expected: Bob, Result: " + first.getEmployeeName()); System.out.println("Expected: Employee.Bob(1500), Result: " + first.toString()); System.out.println("Expected: class Employee, Result: " + second.getClass()); System.out.println("Expected: 1500, Result: " + second.getSalary()); System.out.println("Expected: Bob, Result: " + second.getEmployeeName()); System.out.println("Expected: Employee.Bob(1500), Result: " + second.toString()); System.out.println("======== Lab Questions"); System.out.println(" What output do you expect when getClass().getName() is called in the toString method of Employee with a Manager object?"); System.out.println("Expected: class Manager, Result: " + third.getClass()); System.out.println(" What do you expect when you test whether an Employee is equal to a clone of the Employee?"); Employee firstClone = (Employee) first.clone(); System.out.println("Expected: true, Result: " + first.equals(firstClone)); System.out.println(" What do you expect when you test whether a Manager is equal to an Employee with the same name and salary (and vice versa)?"); System.out.println("Expected: false, Result: " + first.equals(third)); System.out.println("Expected: false, Result: " + third.equals(first)); System.out.println(" What do you expect when you test whether the name of an Employee is equal to the name of a clone of the Employee?"); System.out.println("Expected: true, Result: " + (first.getEmployeeName() == firstClone.getEmployeeName())); System.out.println(" If you change the hire date of a clone of a Manager, is the hire date of the original Manager also changed?"); Manager thirdClone = (Manager) third.clone(); Calendar testHireDate = Calendar.getInstance(); testHireDate.set(Calendar.YEAR, 1999); testHireDate.set(Calendar.MONTH, 5); testHireDate.set(Calendar.DATE, 22); thirdClone.setHireDate(testHireDate); System.out.println("Expected: false, Result: " + (third.getHireDate() == thirdClone.getHireDate())); } } <file_sep>/Lab10/src/assembler/Item.java package assembler; public class Item implements Part { private int price; public Item (int price) { this.price = price; } public int getPrice() { return this.price; } @Override public void addPart(Part p) { } @Override public void removePart(Part p) { } @Override public Part getChild(int index) { return null; } }
f353415c3cecc6825a7b726720729240a18ed7f0
[ "Markdown", "Java" ]
28
Java
cloudier/cs2911
1e67da4262dac53d29a509f1f1284a3f4d98189c
d455f247adfb1f5309bbdea489c094a9aaceeff0
refs/heads/master
<file_sep>package tests; import org.openqa.selenium.By; import org.openqa.selenium.WebDriver; import org.testng.annotations.Test; import base.config; import locators.locators; import values.values; @SuppressWarnings("unused") public class Ageneral extends config { locators loc = new locators(); values val = new values(); @Test public void tc01(){ // Verify that all fields are visible with the search button boolean displayver[] = new boolean [8]; displayver[0] = displayverification(loc.locationfrom); displayver[1] = displayverification(loc.locationto); displayver[2] = displayverification(loc.departdate); displayver[3] = displayverification(loc.returndate); displayver[4] = displayverification(loc.passenger); displayver[5] = displayverification(loc.onewaybtn); displayver[6] = displayverification(loc.rndtrpbtn); displayver[7] = displayverification(loc.srchbtn); if (displayver[0]==true){System.out.println("PASS: 1st textbox is displayed");} else {System.out.println("FAIL: 1st textbox is NOT displayed");} if (displayver[1]==true){System.out.println("PASS: 2nd textbox is displayed");} else {System.out.println("FAIL: 2nd textbox is NOT displayed");} if (displayver[2]==true){System.out.println("PASS: 'Depart' textbox is displayed");} else {System.out.println("FAIL: 'Depart' textbox is NOT displayed");} if (displayver[3]==true){System.out.println("PASS: 'Return' textbox is displayed");} else {System.out.println("FAIL: 'Return' textbox is NOT displayed");} if (displayver[4]==true){System.out.println("PASS: 'Guests/Passengers' textbox is displayed");} else {System.out.println("FAIL: 'Guests/Passengers' textbox is NOT displayed");} if (displayver[5]==true){System.out.println("PASS: 'One way' radio-button is displayed");} else {System.out.println("FAIL: 'One way' radio-button is NOT displayed");} if (displayver[6]==true){System.out.println("PASS: 'Round trip' radio-button is displayed");} else {System.out.println("FAIL: 'Round trip' radio-button is NOT displayed");} if (displayver[7]==true){System.out.println("PASS: 'Search' button is displayed");} else {System.out.println("FAIL: 'Search' button is NOT displayed");} } }
2e9549555e0087b332bf9cd3a77bdce52b9f1d16
[ "Java" ]
1
Java
rahmed7331/MockFrameWork
16d9f3f98e33814e90fa0aa80678f5f15174fcde
175168711bb16de5d157210cbbdbf3cc140ee11b
refs/heads/master
<repo_name>kylebarron/demquery<file_sep>/demquery/cli.py import json from pathlib import Path import click import cligj import geojson from .demquery import NoDataException, Query @click.command() @cligj.features_in_arg @click.option( '-d', '--dem', type=click.Path(exists=True, file_okay=True, dir_okay=True, readable=True), required=True, help='Paths to DEM files.') @click.option( '-g', '--dem-glob', type=str, required=False, default=None, help='Glob expression for DEM paths if folder is provided.') @click.option( '-b', '--band', type=int, required=False, default=1, show_default=True, help='Band of rasters to use') @click.option( '-i', '--interp-kind', type=str, required=False, default=None, show_default=True, help= 'either None, "linear", "cubic", or "quintic". None will do no interpolation and choose the value in the DEM closest to the provided point. linear creates a 3x3 grid and runs linear interpolation; cubic creates a 5x5 grid and runs cubic interpolation; quintic creates a 7x7 grid and runs quintic interpolation.' ) def main(features, dem, dem_glob, band, interp_kind): """Assign elevations to GeoJSON """ dem_path = Path(dem) if dem_path.is_dir(): if dem_glob is not None: dem_paths = list(dem_path.glob(dem_glob)) else: dem_paths = list(dem_path.iterdir()) else: dem_paths = [dem_path] query = Query(dem_paths=dem_paths, band=band) click.echo( json.dumps({ 'type': 'FeatureCollection', 'features': list(process_features(features, query, interp_kind)) })) def process_features(features, query, interp_kind): """Assign elevations to individual GeoJSON features """ for feature in features: f = geojson.loads(json.dumps(feature)) yield geojson.utils.map_tuples( lambda t: _add_elevation_to_tuple( t, query=query, interp_kind=interp_kind), f) def _add_elevation_to_tuple(t, query, interp_kind): try: ele = query.query_points([t], interp_kind=interp_kind)[0] except NoDataException: if len(t) == 3: ele = t[2] else: ele = -9999 return (t[0], t[1], ele) if __name__ == '__main__': main() <file_sep>/requirements.txt click>=7.0 cligj>=0.5.0 gdal>=2.4.0 numpy>=1.16 rasterio>=1.0 scipy>=1.0.0 <file_sep>/demquery/__init__.py """Top-level package for demquery.""" __author__ = """<NAME>""" __email__ = '<EMAIL>' __version__ = '0.3.1' from .demquery import Query <file_sep>/CHANGELOG.md # Changelog ## [0.3.1] - 2020-08-19 - No changes: try to get conda-forge package to work correctly ## [0.3.0] - 2020-01-28 - Add CLI script ## [0.2.1] - 2019-12-04 - Include requirements.txt and requirements_dev.txt in manifest bundle ## [0.2.0] - 2019-12-02 - Fix virtual raster issues. ## [0.1.0] - 2019-11-27 - Initial release on PyPI <file_sep>/demquery/demquery.py ################################################################################ # Module: demquery.py # Description: Wrapper around rasterio to query a Digital Elevation Model # License: MIT, see full license in LICENSE # Web: https://github.com/kylebarron/demquery ################################################################################ import os import os.path import tempfile from pathlib import Path import numpy as np import rasterio from osgeo import gdal from scipy.interpolate import interp2d class NoDataException(Exception): pass class Query: def __init__(self, dem_paths, band=1): """Query Digital Elevation Model Parameters ---------- dem_paths : list list of paths to DEM files. DEM files can be any format readable by GDAL. band : int band of DEM file to query data from; 1 by default. """ super(Query, self).__init__() self.band = band if len(dem_paths) > 1: self.dem_path = self._build_vrt(dem_paths=dem_paths) else: self.dem_path = dem_paths[0] def query_points(self, points, interp_kind=None): """Query points in DEM Parameters ---------- points : list of float or int list of tuples **in longitude, latitude order** representing points to query from the DEM interp_kind : None or str one of None, 'linear', 'cubic', 'quintic'. None will do no interpolation and choose the value in the DEM closest to the provided point. linear creates a 3x3 grid and runs linear interpolation; cubic creates a 5x5 grid and runs cubic interpolation; quintic creates a 7x7 grid and runs quintic interpolation Returns ------- List[float]: queried elevation values, in the units of the DEM """ # interp_kind: num_buffer (number of bordering cells required for # interpolation) interp_allowed = {None: 0, 'linear': 1, 'cubic': 2, 'quintic': 3} num_buffer = interp_allowed.get(interp_kind) if num_buffer is None: msg = ( 'interp_kind must be one of ' + ', '.join(map(str, interp_allowed.keys()))) raise ValueError(msg) with rasterio.open(self.dem_path) as dem: self._check_bounds(dem, points, num_buffer=num_buffer) # This must be a list comprehension and not a generator, because # with a generator, when it tries to create the values, the dem # object is already closed. return [ self._query_point( dem, point, num_buffer=num_buffer, interp_kind=interp_kind) for point in points ] def _build_vrt(self, dem_paths): """Create virtual raster using gdal Parameters ---------- dem_paths : list list of strings or pathlib.Path to DEM paths Returns ------- str : path to virtual raster file """ # Make sure all dem_paths exist # An obscure error is given if the files don't exist for dem_path in dem_paths: if not Path(dem_path).exists(): raise FileNotFoundError(dem_path) tmpdir = tempfile.mkdtemp() vrt_path = os.path.join(tmpdir, 'dem.vrt') # Setting vrt to None is weird but required # https://gis.stackexchange.com/a/314580 # https://gdal.org/tutorials/raster_api_tut.html#using-createcopy # The dem_paths must be str, not pathlib.Path! vrt = gdal.BuildVRT(vrt_path, list(map(str, dem_paths))) vrt = None # Check that vrt_path actually was created if not Path(vrt_path).exists(): raise ValueError('Unable to create virtual raster') return vrt_path def _check_bounds(self, dem, points, num_buffer): """Check lon, lat is within bounds Note that this doesn't check that these values are non-missing. With a mosaic of tiles, the lon/lat could be within bounds of the virtual raster, but have no data. Parameters ---------- dem : rasterio.DatasetReader open rasterio DatasetReader points : List[tuple] list of tuples in longitude, latitude order num_buffer : int number of bordering cells around point to check """ for point in points: # Split after for line to allow Z in source points lon, lat = point[0], point[1] # Find row, column of elevation square inside raster # Note that row should be thought of as the "y" value; it's the # number _across_ rows, and col should be thought of as the "y" # value _across_ columns. row, col = dem.index(lon, lat) minrow, maxrow = row - num_buffer, row + num_buffer mincol, maxcol = col - num_buffer, col + num_buffer msg = 'longitude outside DEM bounds' msg += '\npoints should be provided in longitude, latitude order.' assert minrow >= 0, msg assert maxrow <= dem.height msg = 'latitude outside DEM bounds' msg += '\npoints should be provided in longitude, latitude order.' assert mincol >= 0, msg assert maxcol <= dem.width def _get_buffer_grid(self, dem, point, num_buffer): """Get array of longitude, latitude, and elevation values from DEM file Parameters ---------- dem : rasterio.DatasetReader open rasterio DatasetReader point : tuple tuple of int or float representing longitude and latitude num_buffer : int number of bordering cells around point to retrieve Returns ------- array : 3D Numpy array (array of longitude values, array of latitude values, array of elevation values) """ # Find row, column of elevation square inside raster # Note that row should be thought of as the "y" value; it's the number # _across_ rows, and col should be thought of as the "y" value _across_ # columns. lon, lat = point[0], point[1] row, col = dem.index(lon, lat) # Make window include cells around it # The number of additional cells depends on the value of num_buffer # When num_buffer==1, an additional 8 cells will be loaded and # interpolated on; # When num_buffer==2, an additional 24 cells will be loaded and # interpolated on, etc. # When using kind='linear' interpolation, I'm not sure if having the # extra cells makes a difference; ie if it creates the plane based only # on the closest cells or from all. When using kind='cubic', it's # probably more accurate with more cells. minrow, maxrow = row - num_buffer, row + num_buffer mincol, maxcol = col - num_buffer, col + num_buffer # Add +1 to deal with range() not including end maxrow += 1 maxcol += 1 # Retrieve just this window of values from the DEM window = ([minrow, maxrow], [mincol, maxcol]) val_arr = dem.read(self.band, window=window) # Check the nodata value for the given band against retrieved values try: nodataval = dem.nodatavals[self.band - 1] if np.any(val_arr == nodataval): msg = ( 'Raster nodata value found near lon: {}, lat: {}'.format( lon, lat)) raise NoDataException(msg) except IndexError: # nodataval is not required to exist for each band pass # Check shape expected_rows = 2 * num_buffer + 1 expected_cols = 2 * num_buffer + 1 msg = 'unexpected array shape' assert val_arr.shape == (expected_rows, expected_cols), msg lons, lats = self._lon_lat_grid(dem, minrow, maxrow, mincol, maxcol) # Array with longitudes, latitudes, values # I.e. x, y, z return np.array([np.array(lons), np.array(lats), val_arr]) def _lon_lat_grid(self, dem, minrow, maxrow, mincol, maxcol): """Create grids of longitude and latitude values from column indices Each value corresponds to the center of the given cell. Parameters ---------- dem : rasterio.DatasetReader open rasterio DatasetReader minrow : int min row to query maxrow : int max row to query mincol : int min col to query maxcol : int max col to query Returns ------- List[float]: queried elevation values, in the units of the DEM """ # Create array of latitude/longitude pairs for each cell center lons = [] lats = [] for row in range(minrow, maxrow): lon_cols = [] lat_cols = [] for col in range(mincol, maxcol): lon, lat = dem.xy(row, col) lon_cols.append(lon) lat_cols.append(lat) lons.append(lon_cols) lats.append(lat_cols) return lons, lats def _query_point(self, dem, point, num_buffer, interp_kind): """Query elevation data for given point Parameters ---------- dem : rasterio.DatasetReader point : tuple tuple of int or float representing longitude and latitude num_buffer : int number of bordering cells around point to use when interpolating interp_kind : str kind of interpolation. Passed to scipy.interpolate.interp2d. Can be ['linear', 'cubic', 'quintic']. Note that 'cubic' requires 'num_buffer' of at least 3 and 'quintic' requires 'num_buffer' of at least 5. Returns ------- value : float elevation in terms of the unit of the DEM (usually meters) """ arr = self._get_buffer_grid(dem=dem, point=point, num_buffer=num_buffer) # Don't attempt interpolation if not necessary. # arr[2, 0, 0] selects the single z value. arr[2] is the 2D array of z # values; there's only one value there so it's [0, 0] if interp_kind is None: return arr[2, 0, 0] # Take responses and create lists of lat/lons/values to interpolate over x = arr[0].flatten() y = arr[1].flatten() z = arr[2].flatten() # Interpolate over the values # fun() returns an array of length 1 fun = interp2d(x=x, y=y, z=z, kind=interp_kind, bounds_error=True) return fun(point[0], point[1])[0] <file_sep>/requirements_dev.txt bump2version coverage flake8 pip pytest-runner pytest tox twine watchdog wheel <file_sep>/README.md # demquery [![Pypi](https://img.shields.io/pypi/v/demquery.svg)](https://pypi.python.org/pypi/demquery) [![Downloads](https://img.shields.io/travis/kylebarron/demquery.svg)](https://travis-ci.org/kylebarron/demquery) [![Supported Python Versions](https://img.shields.io/pypi/pyversions/demquery.svg)](https://pypi.org/project/demquery/#supported-versions) Wrapper around rasterio to query points on a Digital Elevation Model. ## Features - Use multiple raster files without having to merge them into a new file - Query many points at once - Optional 2D interpolation (linear, cubic, or quintic) - Reasonably performant by reading the minimal data required from raster ## Install I recommend first installing dependencies with Conda, then installing demquery itself with pip. ``` conda install gdal rasterio numpy scipy -c conda-forge ``` ``` pip install demquery ``` ## CLI Script ``` > demquery --help Usage: demquery [OPTIONS] FEATURES... Assign elevations to GeoJSON Options: -d, --dem PATH Paths to DEM files. [required] -g, --dem-glob TEXT Glob expression for DEM paths if folder is provided. -b, --band INTEGER Band of rasters to use [default: 1] -i, --interp-kind TEXT either None, "linear", "cubic", or "quintic". None will do no interpolation and choose the value in the DEM closest to the provided point. linear creates a 3x3 grid and runs linear interpolation; cubic creates a 5x5 grid and runs cubic interpolation; quintic creates a 7x7 grid and runs quintic interpolation. --help Show this message and exit. ``` ```bash echo \ '{"type":"Feature","properties":{"name": "Glacier Peak"},"geometry":{"type":"Point","coordinates":[-121.2436843,48.0163834]}}' \ | demquery -d /path/to/dem/files ``` Outputs: ```json {"type": "FeatureCollection", "features": [{"type": "Feature", "geometry": {"type": "Point", "coordinates": [-121.243684, 48.016383, 1431.5755615234375]}, "properties": {"name": "<NAME>"}}]} ``` ## Documentation ```py from demquery import Query dem_paths = ['dem1.tif', 'dem2.tif'] query = Query(dem_paths) # Points must be in longitude, latitude order! # These points are in Manhattan, not Antarctica points = [(-73.985564, 40.757965), (-73.968520, 40.778912)] elevations = query.query_points(points, interp_kind='linear') ``` ## Data Download For a great visual tool to download worldwide SRTM data, check out these sites: - 30m resolution: http://dwtkns.com/srtm30m/ - 90m resolution: http://dwtkns.com/srtm/ ## Releasing To upload a new release to PyPI ```bash python setup.py sdist twine upload dist/demquery-0.3.0.tar.gz ``` <file_sep>/tests/__init__.py """Unit test package for demquery.""" <file_sep>/tests/test_demquery.py #!/usr/bin/env python """Tests for `demquery` package.""" from urllib.request import urlretrieve from zipfile import ZipFile from demquery import Query # Download sample data stubs = ['USGS_NED_13_n33w117_IMG', 'USGS_NED_13_n34w117_IMG'] for stub in stubs: url = 'https://prd-tnm.s3.amazonaws.com/StagedProducts/Elevation/13/IMG/' url += stub url += '.zip' urlretrieve(url, stub + '.zip') # Extract file with ZipFile(stub + '.zip') as z: z.extractall('.') def test_create_query(): dem_paths = [x + '.img' for x in stubs] query = Query(dem_paths)
e7a604f7ea3f6a9534a2ea39b61e8f0464ba61b6
[ "Markdown", "Python", "Text" ]
9
Python
kylebarron/demquery
5871ad22541c7645b456a8fb998dc2be57b5ef91
ed0fb87ed5529314bfb4df5fca553316f6c6488b
refs/heads/master
<repo_name>redraushan/sinaps-gauge<file_sep>/utils/scale.js import { Dimensions } from 'react-native'; const { width, height } = Dimensions.get('window'); // Guideline sizes are based on standard ~5" screen mobile device const guidelineBaseWidth = 350; const guidelineBaseHeight = 680; const scale = size => (width / guidelineBaseWidth) * size; const scaleVertical = size => (height / guidelineBaseHeight) * size; const scaleModerate = (size, factor = 0.5) => size + ((scale(size) - size) * factor); const vw = size => size * (width/100); const vh = size => size * (height/100); const vmin = size => size * Math.min((width/100),(height/100)); const vmax = size => size * Math.max((width/100),(height/100)); export { scale, scaleVertical, scaleModerate, vw, vh, vmin, vmax }; <file_sep>/components/reaction-gauge.js import React from "react"; import { StyleSheet, Image, TouchableOpacity, View } from "react-native"; export default function ReactionGauge({ style, onPress, selectedReaction }) { return ( <TouchableOpacity style={styles.container} onPress={() => onPress()}> <View style={{ ...styles.container, ...style }}> {!selectedReaction ? ( <React.Fragment> <Image style={{ ...styles.emoticonC }} source={require("../assets/trigger.png")} /> </React.Fragment> ) : null} {selectedReaction === 1 && ( <Image style={styles.emoticon} source={require("../assets/1.png")} /> )} {selectedReaction === 2 && ( <Image style={styles.emoticon} source={require("../assets/2.png")} /> )} {selectedReaction === 3 && ( <Image style={styles.emoticon} source={require("../assets/3.png")} /> )} {selectedReaction === 4 && ( <Image style={styles.emoticon} source={require("../assets/4.png")} /> )} {selectedReaction === 5 && ( <Image style={styles.emoticon} source={require("../assets/5.png")} /> )} {selectedReaction === 6 && ( <Image style={styles.emoticon} source={require("../assets/6.png")} /> )} {selectedReaction === 7 && ( <Image style={styles.emoticon} source={require("../assets/7.png")} /> )} </View> </TouchableOpacity> ); } ReactionGauge.defaultProps = { onPress: () => {} }; const styles = StyleSheet.create({ container: { flex: 1, height: 42, justifyContent: "flex-start", alignItems: "center", flexDirection: "row", transform: [{ translateX: 2 }] }, emoticonB: { transform: [{ translateX: -18 }], zIndex: -1 }, emoticonC: { width: 80, height: 40, transform: [{ scaleX: 0.7 }, { scaleY: 0.7 }, { translateX: -17 }] }, emoticon: { width: 30, height: 30 } }); <file_sep>/components/reaction-bar.js import React from "react"; import { StyleSheet, View } from "react-native"; import { REACTION_COLOR } from "./constant"; export default function ReactionBar({ reactions }) { const totalReactions = reactions.reduce((pv, cv) => { return pv + cv.value; }, 0); const getPercentage = reaction => { return (reaction / totalReactions) * 100; }; return ( <View style={styles.container}> {reactions.map((reaction, index, arr) => { return reaction.value > 0 ? ( <View key={reaction.id} style={{ ...styles.bar, backgroundColor: REACTION_COLOR[reaction.id], width: `${getPercentage(reaction.value)}%`, borderTopLeftRadius: index === 0 ? 500 : 0, borderBottomLeftRadius: index === 0 ? 500 : 0, borderTopRightRadius: index === arr.length - 1 ? 500 : 0, borderBottomRightRadius: index === arr.length - 1 ? 500 : 0 }} ></View> ) : null; })} </View> ); } ReactionBar.defaultProps = { onPress: () => {} }; const styles = StyleSheet.create({ container: { justifyContent: "center", flexDirection: "row", paddingTop: 20, width: "100%" }, bar: { height: 10 } }); <file_sep>/components/constant.js export const REACTION_COLOR = { 1: "#56CCF2", 2: "#2D9CDB", 3: "#2F80ED", 4: "#936FCF", 5: "#E167FF", 6: "#FF6767", 7: "#D02222" }; <file_sep>/App.js import React from "react"; import Reactions from "./components/reactions"; import { StyleSheet, View, ScrollView } from "react-native"; const reactions = [ { id: 1, value: 33 }, { id: 2, value: 22 }, { id: 3, value: 10 }, { id: 4, value: 50 }, { id: 5, value: 22 }, { id: 6, value: 40 }, { id: 7, value: 60 } ]; export default function App() { return ( <ScrollView style={styles.container}> <View style={{ ...style.reactionContainer }}> <Reactions reactions={reactions} style={styles.reactionGauge} /> </View> </ScrollView> ); } const styles = StyleSheet.create({ container: { flex: 1, backgroundColor: "#fff" }, reactionContainer: { margin: 20, borderRadius: 15, backgroundColor: "#F1F3F7", justifyContent: "center", padding: 30, marginTop: 100 }, reactionGauge: { alignItems: "center", justifyContent: "center", flexDirection: "row", paddingLeft: 10 } }); <file_sep>/components/reactions.js import React from "react"; import PropTypes from "prop-types"; import ReactionGauge from "./reaction-gauge"; import ReactionPicker from "./reaction-picker"; import ReactionBar from "./reaction-bar"; import { StyleSheet, View, TouchableOpacity } from "react-native"; export default class Reactions extends React.Component { state = { isOpen: false }; toggleReaction = () => { this.setState(prevState => ({ isOpen: !prevState.isOpen })); }; handleReaction = reactionId => { this.setState(prevState => ({ isOpen: !prevState.isOpen, reactionId: prevState.reactionId === reactionId ? null : reactionId, reactions: prevState.reactionId === reactionId ? this.reactions : [{ id: reactionId, value: 100 }] })); }; render() { const { style, reactions } = this.props; const { isOpen, reactionId } = this.state; return ( <React.Fragment> <View style={{ ...style }}> <ReactionGauge selectedReaction={reactionId} onPress={this.toggleReaction} style={{ flex: 1, flexGrow: 5 }} /> {isOpen ? ( <TouchableOpacity onPress={this.toggleReaction} style={{ ...style.reactionPicker }} > <ReactionPicker selectedReaction={reactionId} onPress={this.handleReaction} style={{ flex: 1, flexGrow: 3 }} /> </TouchableOpacity> ) : null} </View> <ReactionBar reactions={reactions} /> </React.Fragment> ); } } Reactions.defaultProps = { onClick: () => {}, reactions: [{ id: 1, value: 100 }] }; Reactions.propTypes = { onClick: PropTypes.func, reactions: PropTypes.arrayOf( PropTypes.shape({ id: PropTypes.number, value: PropTypes.number }) ) }; const style = StyleSheet.create({ reactionPicker: { position: "absolute", backgroundColor: "transparent", paddingBottom: 400, paddingTop: 400, paddingLeft: 400, right: 0 } });
087c9a0fe9ee82e43b430c85c78b83eeeabefdfd
[ "JavaScript" ]
6
JavaScript
redraushan/sinaps-gauge
fe9385c1bc41e1f3724490634c1f06738e49eac9
e3432d1d6c982c21f263889218c746421888a083
refs/heads/master
<file_sep>import React from 'react' import { View, TouchableOpacity, StyleSheet } from 'react-native' import { FontAwesome, Ionicons } from '@expo/vector-icons' export function BackBtn ({ onPress }) { return ( <TouchableOpacity onPress={ onPress } style={ styles.backHeader }> <FontAwesome name='arrow-left' size={ 30 } color='white' /> </TouchableOpacity> ) } const styles = StyleSheet.create({ backHeader: { alignSelf: 'stretch', paddingLeft: 10, paddingTop: 5, height: 45, backgroundColor: 'black', } })<file_sep>import React, { Component } from 'react'; import { View, TouchableOpacity, Text, Platform, StyleSheet, TextInput, KeyboardAvoidingView, Keyboard } from 'react-native' import { connect } from 'react-redux' import { BackBtn } from './BackBtn' import { SubmitBtn } from './SubmitBtn' import { addDeck, setCurrentDeck } from '../actions' class NewDeck extends Component { state = { title: '' } newUID = () => { return Math.floor(Math.random()*8999999999999999+1000000000000000).toString(); } handleSubmit () { Keyboard.dismiss() this.setState({ title: '' }) const deck = { 'title': this.state.title, 'id': this.newUID(), questions: [] } this.props.addDeck(deck) this.props.setCurrentDeck(deck) this.props.navigation.navigate('DeckDetail', { id: deck.id } ) } render () { const { navigation } = this.props return ( <KeyboardAvoidingView style={{ flex: 1 }} behavior="padding" keyboardVerticalOffset={ Platform.select({ ios: () => 100, android: () => 120 })() } enabled > <BackBtn onPress={() => navigation.navigate('Decks')}/> <View style={ styles.container }> <Text style={ styles.header }> What is the name </Text> <Text style={ styles.header }> of your new deck? </Text> <TextInput style={ styles.input } multiline={ true } onChangeText={(title) => this.setState({title})} value={this.state.title} /> <SubmitBtn onPress={ this.handleSubmit.bind(this) } /> </View> </KeyboardAvoidingView> ) } } const styles = StyleSheet.create({ container: { flex: 1, justifyContent: 'center', alignItems: 'center', flexDirection: 'column' }, header: { fontSize: 38, color: 'black', textAlign: 'center' }, input: { borderWidth: 2, borderColor: 'black', backgroundColor: 'white', borderRadius: 7, marginTop: 50, marginBottom: 50, height: 50, fontSize: 25, marginLeft: 40, marginRight: 40, alignSelf: 'stretch', textAlign: 'center', paddingTop: 8 }, }) const mapDispatchToProps = dispatch => ({ addDeck: (title) => dispatch(addDeck(title)), setCurrentDeck: (deck) => dispatch(setCurrentDeck(deck)) }); export default connect(null, mapDispatchToProps)(NewDeck)<file_sep>import * as Api from '../utils/api' export const GET_DECKS = 'GET_DECKS' export const ADD_DECK = 'ADD_DECK' export const GET_DECK = 'GET_DECK' export const ADD_CARD = 'ADD_CARD' export const SET_CURRENT_DECK = 'SET_CURRENT_DECK' export const getDecks = () => dispatch => { Api.getDecks().then(decks => dispatch({ type: GET_DECKS, decks }) ).catch(error => console.log(error)) } export const addDeck = (deck) => dispatch => { Api.addDeck(deck).then(deck => dispatch({ type: ADD_DECK, deck }) ).catch(error => console.log(error)) } export const getDeck = (id) => dispatch => { dispatch({ type: GET_DECK, id }) } export const setCurrentDeck = (deck) => dispatch => ( dispatch({ type: SET_CURRENT_DECK, deck }) ) export const addCard = (cardObj) => dispatch => { Api.addCard(cardObj).then(deck => { dispatch({ type: ADD_CARD, deck }) } ).catch(error => console.log(error)) }<file_sep>import React from 'react' import { TouchableOpacity, StyleSheet, Text } from 'react-native' export function SubmitBtn({ onPress }) { return ( <TouchableOpacity style={ styles.submit } onPress={ onPress } > <Text style={ styles.submitText }>Submit</Text> </TouchableOpacity> ) } const styles = StyleSheet.create({ submit: { padding: 5, height: 53, backgroundColor: 'red', alignSelf: 'stretch', alignItems: 'center', justifyContent: 'center', marginRight: 60, marginLeft: 60, borderRadius: 7 }, submitText: { color: 'white', fontSize: 28, } })<file_sep>import React, { Component } from 'react'; import { View, TouchableOpacity, Text, Platform, StyleSheet } from 'react-native'; import { connect } from 'react-redux' import { getDeck } from '../actions' import { BackBtn } from './BackBtn'; class DeckDetail extends Component { componentWillMount() { this.props.getDeck(this.props.navigation.state.params.id) } render() { const { deck, navigation } = this.props return ( <View style={ styles.container }> <BackBtn onPress={() => navigation.navigate('Decks')}/> <View style={ styles.subContainer }> { !deck && <View> <Text>Sorry, this deck couldn't be found</Text> </View> } { deck && <View style={ styles.contentContainer }> <View style={ styles.deck }> <Text style={{ fontSize: 35, textAlign: 'center' }}>{ deck.title }</Text> <Text style={{ fontSize: 30 }}>{ deck.questions.length }</Text> </View> <View style={ styles.btnContainer }> <TouchableOpacity onPress={() => navigation.navigate('Quiz', { deck: deck })} style={[ styles.quizBtn, styles.btn ]} > <Text style={ styles.btnText }> Start Quiz </Text> </TouchableOpacity> <TouchableOpacity onPress={() => navigation.navigate('AddCard', { id: deck.id })} style={[ styles.addBtn, styles.btn ]} > <Text style={ styles.btnText }> Add Card </Text> </TouchableOpacity> </View> </View> } </View> </View> ) } } const styles = StyleSheet.create({ container: { flex: 1, alignItems: 'center', flexDirection: 'column', alignItems: 'stretch', borderWidth: 3, borderColor: 'black' }, subContainer: { flex: 1, alignItems: 'stretch', justifyContent: 'space-between', backgroundColor: 'white', borderRadius: Platform.OS === 'ios' ? 16 : 2, padding: 10, paddingTop: 50, paddingBottom: 50, margin: 20, shadowRadius: 3, shadowOpacity: 0.8, shadowColor: 'rgba(0,0,0,0.75)', alignSelf: 'stretch', shadowOffset: { width: 0, height: 3 } }, contentContainer: { flex: 1, alignSelf: 'stretch', justifyContent: 'space-between', }, deck: { flex: 1, alignItems: 'center', alignSelf: 'stretch' }, btnContainer: { flex: 1, alignItems: 'flex-end', justifyContent: 'space-around', flexDirection: 'row' }, notFound: { alignItems: 'center', backgroundColor: 'red', alignSelf: 'stretch', color: 'white', fontSize: 30 }, btn: { height: 60, alignItems: 'center', justifyContent: 'center', paddingLeft: 20, paddingRight: 20, borderRadius: Platform.OS === 'ios' ? 16 : 2, }, quizBtn: { backgroundColor: 'blue', }, addBtn: { backgroundColor: 'red', }, btnText: { color: 'white', fontSize: 25 } }) const mapDispatchToProps = dispatch => ({ getDeck: (id) => dispatch(getDeck(id)) }) const mapStateToProps = state => ({ deck: state.currentDeck }) export default connect(mapStateToProps, mapDispatchToProps)(DeckDetail)<file_sep># Mobile Flashcard ### An app to study on the go using React Native and Redux ## Install 1. Clone the repo 2. `$ npm install` or `$ yarn install` ## Run 1. Using expo, start the IOS simulator 2. OR use `$ yarn start` or `$ npm start` ## Summary ### Create decks to study, add cards with questions and answers to each deck, and take the quiz! <file_sep>import { AsyncStorage } from 'react-native' import { Notifications, Permissions } from 'expo'; const NOTIFICATION_KEY = 'StudyNotifications' const DECK_KEY = 'DeckStorageKey' INIT_DECKS = { 'React': { title: 'React', id: '515050', questions: [ { question: 'What is React?', answer: 'A library for managing user interfaces' }, { question: 'Where do you make Ajax requests in React?', answer: 'The componentDidMount lifecycle event' } ] } } export function initDecks() { return AsyncStorage.setItem(DECK_KEY, JSON.stringify(INIT_DECKS)) .catch(error => console.log('error')); } export function getDecks() { return AsyncStorage.getItem(DECK_KEY) .then(result => { if (result) { return JSON.parse(result) } return null }) .catch(error => console.log(error)) } export function addDeck(deck) { return AsyncStorage.mergeItem(DECK_KEY, JSON.stringify( { [deck.title]: deck } )) .then(() => deck) .catch(error => console.log(error)) } export function addCard(cardObj) { return AsyncStorage.getItem(DECK_KEY) .then(decks => { const deckObj = JSON.parse(decks) let updatedDeck Object.keys(deckObj).forEach((deck) => { if (deckObj[deck].id === cardObj.id) { const question = { question: cardObj.question, answer: cardObj.answer } deckObj[deck].questions = [ ...deckObj[deck].questions, question ] AsyncStorage.clear() AsyncStorage.setItem(DECK_KEY, JSON.stringify(deckObj)) return updatedDeck = deckObj[deck] } }) return updatedDeck }) .catch(error => console.log(error)) } export function clearLocalNotifications() { return AsyncStorage.removeItem(NOTIFICATION_KEY) .then(Notifications.cancelAllScheduledNotificationsAsync) } function createNotification () { return { title: 'Study Time!', body: "🤙 Don't forget to study today!", ios: { sound: true }, android: { sound: true, priority: 'high', sticky: false, vibrate: true } } } export function setLocalNotification () { AsyncStorage.getItem(NOTIFICATION_KEY) .then(JSON.parse) .then((data) => { console.log('data ', data) if (data === null) { Permissions.askAsync(Permissions.NOTIFICATIONS) .then(({ status }) => { console.log('asking, ', status) if (status === 'granted') { Notifications.cancelAllScheduledNotificationsAsync(); let tomorrow = new Date(); tomorrow.setDate(tomorrow.getDate() + 1); tomorrow.setHours(20); tomorrow.setMinutes(0); Notifications.scheduleLocalNotificationAsync( createNotification(), { time: tomorrow, repeat: 'day', } ) AsyncStorage.setItem(NOTIFICATION_KEY, JSON.stringify(true)); } }) } }) .catch((error) => console.log(error)) }<file_sep>import React, { Component } from 'react'; import { View, TouchableOpacity, Text, Platform, StyleSheet, List, FlatList } from 'react-native'; import { connect } from 'react-redux'; import { getDecks } from '../actions'; import * as Api from '../utils/api'; class Decks extends Component { componentDidMount () { Api.initDecks().then( () => this.props.getDecks() ) } render () { const { decks, navigation } = this.props return ( <View style={ styles.container }> { !decks && <Text> You have no decks at the moment, go create one! </Text> } { decks && <FlatList data={ decks } renderItem={({ item: deck }) => ( <TouchableOpacity onPress={() => navigation.navigate('DeckDetail', { id: deck.id }) } style={ styles.deck } key={ deck.id } > <Text style={{ fontSize: 24 }}>{ deck.title }</Text> <Text style={{ fontSize: 20 }}>{ deck.questions.length }</Text> </TouchableOpacity> )} keyExtractor={ deck => deck.id.toString() } /> } </View> ) } } styles = StyleSheet.create({ container: { flex: 1, alignItems: 'stretch', }, deck: { borderRadius: Platform.OS === 'ios' ? 16 : 2, flexDirection: 'column', alignItems: 'center', backgroundColor: 'white', padding: 10, marginTop: 30, marginBottom: 30, marginLeft: 10, marginRight: 10, shadowRadius: 3, shadowOpacity: 0.8, shadowColor: 'rgba(0,0,0,0.75)', flex: 1, alignSelf: 'stretch', shadowOffset: { width: 0, height: 3 } } }) const mapDispatchToProps = dispatch => ({ getDecks: () => dispatch(getDecks()) }); const mapStateToProps = state => ({ decks: state.decks }); export default connect(mapStateToProps, mapDispatchToProps)(Decks); <file_sep>import React, { Component } from 'react' import { View, TouchableOpacity, Text, Platform, StyleSheet, } from 'react-native'; import { BackBtn } from './BackBtn' import { clearLocalNotifications, setLocalNotification } from '../utils/api' class Quiz extends Component { state = { qIndex: 0, correct: 0, showAnswer: false, displayResults: false } step = (isCorrect) => { const qIndex = this.state.qIndex const questionsLength = this.props.navigation.state.params.deck.questions.length const correct = this.state.correct console.log('iscorrect ', isCorrect) if (isCorrect) { this.setState({ correct: correct + 1 }) } if (!isCorrect && this.state.correct > 0) { this.setState({ incorrect: correct - 1}) } if (this.state.qIndex < questionsLength - 1) { const step = this.state.qIndex this.setState({ qIndex: step + 1 }) } else { this.setState({ displayResults: true }) } console.log(this.state) } showAnswer () { const show = !this.state.showAnswer this.setState({ showAnswer: show }) clearLocalNotifications() .then(setLocalNotification) } resetQuiz () { this.setState({ qIndex: 0, correct: 0, showAnswer: false, displayResults: false }) } render () { const { navigation } = this.props const { qIndex, correct, displayResults, showAnswer } = this.state const questions = navigation.state.params.deck.questions return ( <View style={ styles.container }> { !showAnswer && !displayResults && <Text style={[ styles.longText, { fontSize: 35 } ]}> { questions[qIndex].question } </Text> } { showAnswer && !displayResults && <Text style={[ styles.longText, { fontSize: 35 } ]}> { questions[qIndex].answer } </Text> } { !displayResults && <View> <TouchableOpacity onPress={ () => this.showAnswer() } style={ styles.answer } > <Text style={{ fontSize: 15, color: 'orange' }}> { !showAnswer && 'answer' } { showAnswer && 'question' } </Text> </TouchableOpacity> <View style={ styles.btnContainer }> <TouchableOpacity onPress={ () => this.step(true) } style={[ styles.btn, styles.correct ]} > <Text style={{ fontSize: 25, color: 'white' }}> Correct </Text> </TouchableOpacity> <TouchableOpacity onPress={ () => this.step(false) } style={[ styles.btn, styles.incorrect ]} > <Text style={{ fontSize: 25, color: 'white' }}> Incorrect </Text> </TouchableOpacity> </View> </View> } { displayResults && <View style={ styles.container }> <Text style={{ fontSize: 40 }}> Quiz Complete! </Text> <Text style={{ fontSize: 30}}> You scored { correct }/{ questions.length } </Text> <TouchableOpacity onPress={ () => this.resetQuiz() } style={[ styles.btn, { backgroundColor: 'yellow' }]} > <Text style={{ fontSize: 25, color: 'white' }}> Try Again </Text> </TouchableOpacity> <TouchableOpacity onPress={ () => navigation.navigate('Decks') } style={[ styles.btn, { backgroundColor: 'purple' }]} > <Text style={{ fontSize: 25, color: 'white' }}> Pick New Deck </Text> </TouchableOpacity> </View> } </View> ) } } const styles = StyleSheet.create({ container: { flex: 1, justifyContent: 'space-around', alignItems: 'center' }, longText: { textAlign: 'center' }, answer: { height: 65, paddingLeft: 30, paddingRight: 30, alignItems: 'center', justifyContent: 'center' }, btnContainer: { justifyContent: 'space-around', flexDirection: 'row' }, correct: { backgroundColor: 'green' }, incorrect: { backgroundColor: 'red', }, btn: { borderRadius: Platform.OS === 'ios' ? 16 : 2, height: 65, paddingLeft: 20, paddingRight: 20, marginLeft: 20, marginRight: 20, alignItems: 'center', justifyContent: 'center' } }) export default Quiz;<file_sep>import { GET_DECKS, ADD_DECK, GET_DECK, ADD_CARD, SET_CURRENT_DECK } from "../actions"; function appState(state = { decks: [], currentDeck: null }, action) { switch (action.type) { case GET_DECKS: const decks = [] Object.keys(action.decks).forEach((deck) => decks.push(action.decks[deck]) ); return { ...state, decks: [ ...decks ] } case ADD_DECK: return { ...state, decks: [ ...state.decks, action.deck] } case GET_DECK: const deck = state.decks.find(deck => deck.id === action.id) if (state.currentDeck.id === action.id) { return { ...state } } return { ...state, currentDeck: { ...deck } } case ADD_CARD: return { ...state, decks: [ ...state.decks.filter( singleDeck => singleDeck.id !== action.deck.id ), action.deck ], currentDeck: { ...action.deck } } case SET_CURRENT_DECK: return { ...state, currentDeck: { ...action.deck } } default: return state } } export default appState;<file_sep>import React, { Component } from 'react' import { View, TouchableOpacity, Text, TextInput, Platform, StyleSheet, KeyboardAvoidingView, Keyboard } from 'react-native'; import { connect } from 'react-redux' import { addCard } from '../actions' import { BackBtn } from './BackBtn' import { SubmitBtn } from './SubmitBtn' class AddCard extends Component { state = { question: '', answer: '' } handleSubmit() { const id = this.props.navigation.state.params.id cardObj = { id: id, question: this.state.question, answer: this.state.answer } Keyboard.dismiss(); this.props.addCard(cardObj); this.props.navigation.navigate('DeckDetail', { id: id }) this.setState({ question: '', answer: '' }) } render() { const { navigation } = this.props return ( <KeyboardAvoidingView style={ styles.container } behavior="padding" keyboardVerticalOffset={ Platform.select({ ios: () => 80, android: () => 60 })() } enabled > <BackBtn onPress={ () => navigation.navigate( 'DeckDetail', { id : navigation.state.params.id} ) } /> <View style={ styles.container }> <Text style={ styles.label }> Question </Text> <TextInput style={ styles.input } multiline={ true } onChangeText={(question) => this.setState({question})} value={this.state.question} /> <Text style={ styles.label }> Answer </Text> <TextInput style={[ styles.input, { marginBottom: 40 } ]} multiline={ true } onChangeText={(answer) => this.setState({answer})} value={this.state.answer} /> <SubmitBtn onPress={ this.handleSubmit.bind(this) } /> </View> </KeyboardAvoidingView> ) } } const styles = StyleSheet.create({ container: { flex: 1, justifyContent: 'center', alignItems: 'center', alignSelf: 'stretch' }, label: { alignSelf: 'stretch', fontSize: 38, color: 'black', textAlign: 'left', marginBottom: 10, marginLeft: 40, marginRight: 40, }, input: { alignSelf: 'stretch', borderWidth: 2, borderColor: 'black', backgroundColor: 'white', borderRadius: 7, marginBottom: 15, height: 50, fontSize: 25, marginLeft: 40, marginRight: 40, alignSelf: 'stretch', textAlign: 'center', paddingTop: 8 } }) const mapDispatchToProps = dispatch => ({ addCard: (cardObj) => dispatch(addCard(cardObj)) }) export default connect(null, mapDispatchToProps)(AddCard)
3170253ff65565fb705ad897fb2189036a54cc43
[ "JavaScript", "Markdown" ]
11
JavaScript
CHBaker/mobile-flashcards
9214ecd26d3b1ec11551e475117d390e4ee112ff
13acf455ddf75ad07ce34c933f9b1a863c7ccd3e
refs/heads/master
<file_sep><?php namespace App\Http\Controllers; use App\Customer; use App\Index; use App\Order; use App\Product; use App\Supplier; use Illuminate\Http\Request; class IndexController extends Controller { /** * Display a listing of the resource. * * @return \Illuminate\Http\Response */ public function index() { $customers = Customer::count(); $suppliers = Supplier::count(); $orders = Order::count(); $products = Product::where('stock','>',0)->count(); return view('index',compact('customers','suppliers','orders','products')); } /** * Show the form for creating a new resource. * * @return \Illuminate\Http\Response */ public function create() { // } /** * Store a newly created resource in storage. * * @param \Illuminate\Http\Request $request * @return \Illuminate\Http\Response */ public function store(Request $request) { // } /** * Display the specified resource. * * @param \App\Index $index * @return \Illuminate\Http\Response */ public function show(Index $index) { // } /** * Show the form for editing the specified resource. * * @param \App\Index $index * @return \Illuminate\Http\Response */ public function edit(Index $index) { // } /** * Update the specified resource in storage. * * @param \Illuminate\Http\Request $request * @param \App\Index $index * @return \Illuminate\Http\Response */ public function update(Request $request, Index $index) { // } /** * Remove the specified resource from storage. * * @param \App\Index $index * @return \Illuminate\Http\Response */ public function destroy(Index $index) { // } } <file_sep><?php namespace App\Http\Controllers; use App\sallery; use Illuminate\Http\Request; class SalleryController extends Controller { /** * Display a listing of the resource. * * @return \Illuminate\Http\Response */ public function index() { // } /** * Show the form for creating a new resource. * * @return \Illuminate\Http\Response */ public function create() { // } /** * Store a newly created resource in storage. * * @param \Illuminate\Http\Request $request * @return \Illuminate\Http\Response */ public function store(Request $request) { // } /** * Display the specified resource. * * @param \App\sallery $sallery * @return \Illuminate\Http\Response */ public function show(sallery $sallery) { // } /** * Show the form for editing the specified resource. * * @param \App\sallery $sallery * @return \Illuminate\Http\Response */ public function edit(sallery $sallery) { // } /** * Update the specified resource in storage. * * @param \Illuminate\Http\Request $request * @param \App\sallery $sallery * @return \Illuminate\Http\Response */ public function update(Request $request, sallery $sallery) { // } /** * Remove the specified resource from storage. * * @param \App\sallery $sallery * @return \Illuminate\Http\Response */ public function destroy(sallery $sallery) { // } } <file_sep><?php use Illuminate\Database\Migrations\Migration; use Illuminate\Database\Schema\Blueprint; use Illuminate\Support\Facades\Schema; class CreateSalleriesTable extends Migration { /** * Run the migrations. * * @return void */ public function up() { Schema::create('salleries', function (Blueprint $table) { $table->bigIncrements('id'); $table->unsignedBigInteger('user_id'); $table->unsignedBigInteger('staff_id'); $table->double('price',8,2); $table->unsignedBigInteger('quantity'); $table->double('total',8,2); $table->integer('status'); $table->year('year'); $table->timestamp('month'); $table->timestamps(); $table->foreign('user_id')->references('id')->on('users'); $table->foreign('staff_id')->references('id')->on('staff'); }); } /** * Reverse the migrations. * * @return void */ public function down() { Schema::dropIfExists('salleries'); } }
2196d3274570b956d933fa535c5b404081c3fdef
[ "PHP" ]
3
PHP
Hasan525/Foyej-Seed-Company
5da38a744c0a9f4708bad0d6e94ff0cd57919d53
abf325246a979876e6cdc470022a08ab85989f8b
refs/heads/master
<file_sep>package _02_robot_graffiti; import org.jointheleague.graphical.robot.Robot; public class RobotGraffiti { public static void main(String[] args) { Robot jim = new Robot(); jim.penDown(); jim.setSpeed(100); jim.move(200); jim.turn(130); jim.move(100); jim.turn(50); jim.move(50); jim.turn(50); jim.move(100); } }
04f76d6fe790ea0691231aa37bd889023da1918f
[ "Java" ]
1
Java
League-Level0-Student/level-0-module-0-dshan102
e8f67456db872144eb72f6ed8ad479bc16630853
35937fb21ceb0b180a93629bc50c2d2db81c5b3c
refs/heads/master
<repo_name>marcusfgardiner/makers_exercises<file_sep>/spec/sort_array_add_one_spec.rb require "sort_array_add_one" describe 'sort_array' do it "sorts array" do expect(sort_array([2,4,3,1,5])).to eq([1,2,3,4,5]) end end describe "add one" do it "adds one" do expect(add_one_to_array([1,2,3,4,5])).to eq([2,3,4,5,6]) end end describe "sort array and add" do it "sorts array and then adds one to each element" do expect(sort_array_add_one([2,4,3,1,5])). to eq ([2,3,4,5,6]) end end <file_sep>/lib/sort_array.rb def sort_array(array) array.sort end<file_sep>/spec/sum_of_array_spec.rb require 'sum_of_array.rb' describe 'sum of arrays' do it 'when passed an array of integers it adds all the integers together' do expect(sum_of_array([1,2,3,4,5])).to eq(15) end end describe 'times an integer by two' do it "times two" do expect(times_two(15)).to eq(30) end end describe 'sum array and times an integer by two' do it "sum array times two" do expect(sum_array_times_two([1,2,3,4,5])).to eq(30) end end <file_sep>/lib/array_add_one.rb def array_add_one(array) array.map { |number| number + 1} end <file_sep>/spec/add_one_to_values_spec.rb require "add_one_to_values" describe "adds on to hash values" do it "adds one to hash value" do expect(add_one_to_value({a: 1})).to eq({a: 2}) end it "adds one to hash values" do expect(add_one_to_value({a: 1, b: 2})).to eq({a: 2, b: 3}) end end<file_sep>/spec/sort_array_spec.rb require 'sort_array.rb' describe 'sort_array' do it 'when passed an array with two integer elements they are sorted' do expect(sort_array([5,2])).to eq([2,5]) end it 'when passed [1, 3, 5, 4, 2], the integer elements are sorted' do expect(sort_array([1, 3, 5, 4, 2])).to eq([1,2,3,4,5]) end end<file_sep>/lib/add_one_to_values.rb def add_one_to_value(hash) h = Hash.new hash.each do|key, value| h[key] = (value += 1) end h end<file_sep>/lib/sort_array_add_one.rb def sort_array(array3) array3.sort end def add_one_to_array(array2) array2.map{|number| number += 1} end def sort_array_add_one(array1) array2 = sort_array(array1) add_one_to_array(array2) end <file_sep>/spec/array_add_one_spec.rb # Use test-driven development to write a method that: # * Takes an array of numbers. # * Returns an array of the same numbers, except each number has had 1 added to it. # * e.g. # * Input: [1, 2, 3, 4, 5] # * Return: [2, 3, 4, 5, 6] # * Make sure to create a separate project directory for your code. require "array_add_one.rb" describe "array_add_one_spec" do it "returns 2, 3 when given 1, 2" do expect(array_add_one([1, 2])).to eq([2, 3]) end it "returns 2..6 when given 1..5" do expect(array_add_one([1, 2, 3, 4, 5])).to eq([2, 3, 4, 5, 6]) end it "returns an array with plus one to each value" do expect(array_add_one([11, 21, 31, 41, 51])).to eq([12, 22, 32, 42, 52]) end it "returns nil when given an empty array" do expect(array_add_one([])).to eq([]) end end <file_sep>/lib/sum_of_array.rb def sum_of_array(array) array.reduce(0,:+) end def times_two(n) n * 2 end def sum_array_times_two(array) array2 = sum_of_array(array) times_two(array2) end
f6484075b775835218b69a4109149dfff9fb9d62
[ "Ruby" ]
10
Ruby
marcusfgardiner/makers_exercises
fa563522b31b4905bae260f2cd6df6618fce2581
3aa1c259c938deda45352154a72f228a04a5250a
refs/heads/master
<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef CHECKORDER_VECTOR_H__ #define CHECKORDER_VECTOR_H__ #include "..\vector\vector.h" template <typename T> void checkOrder ( Vector<T> & V ) { //判断向量是否整体有序 int unsorted = 0; //逆序计数器 V.traverse ((CheckOrder<T> &)CheckOrder<T> ( unsorted, V[0] ) ); //进行遍历 if ( 0 < unsorted ) printf ( "Unsorted with %d adjacent disordered pair(s)\n", unsorted ); else printf ( "Sorted\n" ); } //template <typename T> void checkOrder(Vector<T> & V) { // if (0 < V.disordered()) // printf("Unsorted with %d adjacent disordered pair(s)\n", V.disordered()); // else // printf("Sorted\n"); //} #endif // !CHECKORDER_VECTOR_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef AVL_H__ #define AVL_H__ #include "../BST/BST.h" /* 基于BST实现AVL树 */ /****************************************************************************************** * 在左、右孩子中取更高者 * 在AVL平衡调整前,借此确定重构方案 ******************************************************************************************/ #define tallerChild(x) ( \ stature( (x)->lc ) > stature( (x)->rc ) ? (x)->lc : ( /*左高*/ \ stature( (x)->lc ) < stature( (x)->rc ) ? (x)->rc : ( /*右高*/ \ IsLChild( * (x) ) ? (x)->lc : (x)->rc /*等高:与父亲x同侧者(zIg-zIg或zAg-zAg)优先*/ \ ) \ ) \ ) /* 由BST派生AVL树模板类 */ template <typename T> class AVL : public BST<T> { public: BinNodePosi(T) insert ( const T& e ); /* 插入(重写) */ bool remove ( const T& e ); /* 删除(重写) */ /* BST::search()等其余接口可直接沿用 */ }; /* 将关键码e插入AVL树中 */ template <typename T> BinNodePosi(T) AVL<T>::insert(const T& e) { BinNodePosi(T) & x = this->search(e); if (x) /* 确认目标节点不存在 */ return x; BinNodePosi(T) xx = x = new BinNode<T>(e, this->_hot); /* 创建新节点x */ this->_size++; /* 此时,x的父亲_hot若增高,则其祖父有可能失衡 */ for (BinNodePosi(T) g = this->_hot; g; g = g->parent) /* 从x之父出发向上,逐层检查各代祖先g */ { if (!AvlBalanced(*g)) { /* 一旦发现g失衡,则(采用“3 + 4”算法)使之复衡,并将子树 */ FromParentTo(*g) = this->rotateAt(tallerChild(tallerChild(g))); /* 重新接入原树 */ break; /* g复衡后,局部子树高度必然复原;其祖先亦必如此,故调整随即结束 */ } else /* 否则(g依然平衡),只需简单地 */ this->updateHeight(g); /* 更新其高度(注意:即便g未失衡,高度亦可能增加) */ } /* 至多只需一次调整;若果真做过调整,则全树高度必然复原 */ return xx; /* 返回新节点位置 */ } /* 无论e是否存在于原树中,总有AVL::insert(e)->data == e */ /* 从AVL树中删除关键码e */ template <typename T> bool AVL<T>::remove(const T& e) { BinNodePosi(T) & x = this->search(e); if (!x) return false; /* 确认目标存在(留意_hot的设置) */ removeAt(x, this->_hot); /* 先按BST规则删除之(此后,原节点之父_hot及其祖先均可能失衡) */ this->_size--; for (BinNodePosi(T) g = this->_hot; g; g = g->parent) /* 从_hot出发向上,逐层检查各代祖先g */ { if (!AvlBalanced(*g)) /* 一旦发现g失衡,则(采用“3 + 4”算法)使之复衡,并将该子树联至 */ g = FromParentTo(*g) = this->rotateAt(tallerChild(tallerChild(g))); /* 原父亲 */ this->updateHeight(g); /* 并更新其高度(注意:即便g未失衡,高度亦可能降低) */ } /* 可能需做Omega(logn)次调整——无论是否做过调整,全树高度均可能降低 */ return true; /* 删除成功 */ } /* 若目标节点存在且被删除,返回true;否则返回false */ #endif // !AVL_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef CRC_ELEM_H__ #define CRC_ELEM_H__ template <typename T> struct Crc { //函数对象:累计T类对象的特征(比如总和),以便校验对象集合 T& c; Crc ( T& crc ) : c ( crc ) {} virtual void operator() ( T& e ) { c += e; } //假设T可直接相加 }; #endif <file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef CHECKORDER_ELEM_H__ #define CHECKORDER_ELEM_H__ template <typename T> struct CheckOrder { //函数对象:判断一个T类对象是否局部有序 T pred; int& u; CheckOrder ( int& unsorted, T& first ) : pred ( first ), u ( unsorted ) { } virtual void operator() ( T& e ) { if ( pred > e ) u++; pred = e; } }; #endif // !CHECKORDER_ELEM_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ /****************************************************************************************** * Test of B-Tree ******************************************************************************************/ #include<iostream> using namespace std; #include "../_share/util.h" #include "../UniPrint/print.h" #include "BTree.h" /****************************************************************************************** * Test a BTree ******************************************************************************************/ template <typename T> void testBTree ( int order, int n ) { BTree<T> bt ( order ); while ( bt.size() < n ) { T e = dice ( ( T ) n * 3 ); //[0, 3n)范围内的e switch ( dice ( 3 ) ) { case 0: { //查找,成功率 <= 33.3% printf ( "Searching for " ); print ( e ); printf ( " ... " ); BTNodePosi(T) p = bt.search ( e ); printf ( p ? "Found\n" : "Not found\n" ); break; } case 1: { //删除,成功率 <= 33.3% printf ( "Removing " ); print ( e ); printf ( " ... " ); bt.remove ( e ) ? printf ( "Done\n" ), print ( bt ) : (void)printf ( "Not exists\n" ); break; } default: {//插入,成功率 == 100% printf ( "Inserting " ); print ( e ); printf ( " ... " ); int oldSize = bt.size(); bt.insert ( e ) ? printf ( "Done\n" ), print ( bt ) : (void)printf ( "Dup key\n" ); break; } } } while ( bt.size() > 0 ) { T e = dice ( ( T ) n * 3 ); //[0, 3n)范围内的e printf ( "Removing " ); print ( e ); printf ( " ... " ); bt.remove ( e ) ? printf ( "Done\n" ), print ( bt ) : (void)printf ( "not exists\n" ); } } /****************************************************************************************** * 测试主入口 ******************************************************************************************/ int main ( int argc, char* argv[] ) { int order = 4; if ( order < 3 ) { printf ( "Make sure the order (%d) is no less than 3.\a\a\n", order ); return 1; } int size = 10; srand ( ( unsigned int ) time ( NULL ) ); testBTree<int> (order, size); //元素类型、比较器可以在这里任意选择 return 0; }<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef BTNODE_H__ #define BTNODE_H__ #include "../vector/vector.h" #define BTNodePosi(T) BTNode<T>* /* B-树节点位置 */ /* B-树节点模板类 */ template <typename T> struct BTNode { /* 成员(为简化描述起见统一开放,读者可根据需要进一步封装) */ BTNodePosi(T) parent; /* 父节点 */ Vector<T> key; /* 关键码向量 */ Vector<BTNodePosi(T)> child; /* 孩子向量(其长度总比key多一) */ /* 构造函数(注意:BTNode只能作为根节点创建,而且初始时有0个关键码和1个空孩子指针) */ BTNode() { parent = NULL; child.insert ( 0, NULL ); } BTNode ( T e, BTNodePosi(T) lc = NULL, BTNodePosi(T) rc = NULL ) { parent = NULL; /* 作为根节点,而且初始时 */ key.insert ( 0, e ); /* 只有一个关键码,以及 */ child.insert ( 0, lc ); /* 两个孩子 */ child.insert ( 1, rc ); if ( lc ) lc->parent = this; if ( rc ) rc->parent = this; } }; #endif // !BTNODE_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef PQ_COMPLHEAP_H__ #define PQ_COMPLHEAP_H__ #include "../Vector/Vector.h" //借助多重继承机制,基于向量 #include "../PQ/PQ.h" //按照优先级队列ADT实现的 #define InHeap(n, i) ( ( ( -1 ) < ( i ) ) && ( ( i ) < ( n ) ) ) //判断PQ[i]是否合法 #define Parent(i) ( ( i - 1 ) >> 1 ) //PQ[i]的父节点(floor((i-1)/2),i无论正负) #define LastInternal(n) Parent( n - 1 ) //最后一个内部节点(即末节点的父亲) #define LChild(i) ( 1 + ( ( i ) << 1 ) ) //PQ[i]的左孩子 #define RChild(i) ( ( 1 + ( i ) ) << 1 ) //PQ[i]的右孩子 #define ParentValid(i) ( 0 < i ) //判断PQ[i]是否有父亲 #define LChildValid(n, i) InHeap( n, LChild( i ) ) //判断PQ[i]是否有一个(左)孩子 #define RChildValid(n, i) InHeap( n, RChild( i ) ) //判断PQ[i]是否有两个孩子 #define Bigger(PQ, i, j) ( lt( PQ[i], PQ[j] ) ? j : i ) //取大者(等时前者优先) #define ProperParent(PQ, n, i) /*父子(至多)三者中的大者*/ \ ( RChildValid(n, i) ? Bigger( PQ, Bigger( PQ, i, LChild(i) ), RChild(i) ) : \ ( LChildValid(n, i) ? Bigger( PQ, i, LChild(i) ) : i \ ) \ ) //相等时父节点优先,如此可避免不必要的交换 //完全二叉堆 template <typename T> class PQ_ComplHeap : public PQ<T>, public Vector<T> { /*DSA*/friend class UniPrint; //演示输出使用,否则不必设置友类 protected: Rank percolateDown ( Rank n, Rank i ); //下滤 Rank percolateUp ( Rank i ); //上滤 void heapify ( Rank n ); //Floyd建堆算法 public: PQ_ComplHeap() { } //默认构造 PQ_ComplHeap ( T* A, Rank n ) //批量构造 { Vector<T> ::copyFrom ( A, 0, n ); heapify ( n ); } void insert ( T ); //按照比较器确定的优先级次序,插入词条 T getMax(); //读取优先级最高的词条 T delMax(); //删除优先级最高的词条 }; //PQ_ComplHeap #include "../_share/release.h" #include "../_share/util.h" //对向量前n个词条中的第i个实施下滤,i < n template <typename T> Rank PQ_ComplHeap<T>::percolateDown(Rank n, Rank i) { Rank j; //i及其(至多两个)孩子中,堪为父者 while (i != (j = ProperParent(this->_elem, n, i))) //只要i非j,则 { swap(this->_elem[i], this->_elem[j]); i = j; } //二者换位,并继续考查下降后的i return i; //返回下滤抵达的位置(亦i亦j) } //对向量中的第i个词条实施上滤操作,i < _size template <typename T> Rank PQ_ComplHeap<T>::percolateUp(Rank i) { while (ParentValid(i)) { //只要i有父亲(尚未抵达堆顶),则 Rank j = Parent(i); //将i之父记作j if (lt(this->_elem[i], this->_elem[j])) break; //一旦当前父子不再逆序,上滤旋即完成 swap(this->_elem[i], this->_elem[j]); //否则,父子交换位置,并继续考查上一层 i = j; } //while return i; //返回上滤最终抵达的位置 } //Floyd建堆算法,O(n)时间 template <typename T> void PQ_ComplHeap<T>::heapify(Rank n) { for (int i = LastInternal(n); InHeap(n, i); i--) //自底而上,依次 /*DSA*/ { percolateDown(n, i); //下滤各内部节点 #if 0 /*DSA*/ for (int k = 0; k < n; k++) { /*DSA*/ int kk = k; while (i < kk) kk = (kk - 1) / 2; /*DSA*/ i == kk ? print(_elem[k]) : print(" "); /*DSA*/ }; printf("\n"); /*DSA*/ #endif } } //将词条插入完全二叉堆中 template <typename T> void PQ_ComplHeap<T>::insert(T e) { Vector<T>::insert(e); //首先将新词条接至向量末尾 percolateUp(this->_size - 1); //再对该词条实施上滤调整 } //取优先级最高的词条 template <typename T> T PQ_ComplHeap<T>::getMax() { return this->_elem[0]; } //删除非空完全二叉堆中优先级最高的词条 template <typename T> T PQ_ComplHeap<T>::delMax() { T maxElem = this->_elem[0]; this->_elem[0] = this->_elem[--this->_size]; //摘除堆顶(首词条),代之以末词条 percolateDown(this->_size, 0); //对新堆顶实施下滤 return maxElem; //返回此前备份的最大词条 } #endif<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef DICTIONARY_H__ #define DICTIONARY_H__ /* 词典Dictionary模板类 */ template <typename K, typename V> struct Dictionary { virtual int size() const = 0; /* 当前词条总数 */ virtual bool put ( K, V ) = 0; /* 插入词条(禁止雷同词条时可能失败) */ virtual V* get ( K k ) = 0; /* 读取词条 */ virtual bool remove ( K k ) = 0; /* 删除词条 */ }; #endif<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learnĄ˘add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef COMPARATOR_H__ #define COMPARATOR_H__ template <typename T> static bool lt ( T* a, T* b ) { return lt ( *a, *b ); } //less than template <typename T> static bool lt ( T& a, T& b ) { return a < b; } //less than template <typename T> static bool eq ( T* a, T* b ) { return eq ( *a, *b ); } //equal template <typename T> static bool eq ( T& a, T& b ) { return a == b; } //equal #endif // COMPARATOR_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef HASHTABLE_H__ #define HASHTABLE_H__ #include "dictionary.h" /* 引入词典ADT */ #include "../bitmap/bitmap.h" /* 引入位图 */ #include "../prime/primeNLT.h" /* 引入素数 */ #include "hashtable_hashcode.h" /* 引入散列函数 */ /* 符合Dictionary接口的Hashtable模板类 */ template <typename K, typename V> /* key、value */ class Hashtable : public Dictionary<K, V> { /*DSA*/friend class UniPrint; private: Entry<K, V>** ht; /* 桶数组,存放词条指针 */ int M; /* 桶数组容量 */ int N; /* 词条数量 */ Bitmap* lazyRemoval; /* 懒惰删除标记 */ #define lazilyRemoved(x) (lazyRemoval->test(x)) #define markAsRemoved(x) (lazyRemoval->set(x)) protected: int probe4Hit ( const K& k ); /* 沿关键码k对应的查找链,找到词条匹配的桶 */ int probe4Free ( const K& k ); /* 沿关键码k对应的查找链,找到首个可用空桶 */ void rehash(); /* 重散列算法:扩充桶数组,保证装填因子在警戒线以下 */ public: Hashtable ( int c = 5 ); /* 创建一个容量不小于c的散列表(为测试暂时选用较小的默认值) */ ~Hashtable(); /* 释放桶数组及其中各(非空)元素所指向的词条 */ int size() const { return N; } /* 当前的词条数目 */ bool put ( K, V ); /* 插入(禁止雷同词条,故可能失败) */ V* get ( K k ); /* 读取 */ bool remove ( K k ); /* 删除 */ }; /* 创建散列表,容量为不小于c的素数M */ template <typename K, typename V> Hashtable<K, V>::Hashtable(int c) { /* 不小于c的素数M */ char * pcExternalFile = new char[64]; strncpy(pcExternalFile, "../_input/prime-1048576-bitmap.txt", 64); M = primeNLT(c, 1048576, pcExternalFile); //M = primeNLT(c, 1048576, "../_input/prime-1048576-bitmap.txt"); N = 0; ht = new Entry<K, V>*[M]; /* 开辟桶数组(还需核对申请成功),初始装填因子为N/M = 0% */ memset(ht, 0, sizeof(Entry<K, V>*) *M); /* 初始化各桶 */ lazyRemoval = new Bitmap(M); /* 懒惰删除标记比特图 */ //*DSA*/printf("A bucket array has been created with capacity = %d\n\n", M); } /* 析构前释放桶数组及非空词条 */ template <typename K, typename V> Hashtable<K, V>::~Hashtable() { for (int i = 0; i < M; i++) /* 逐一检查各桶 */ if (ht[i]) release(ht[i]); /* 释放非空的桶 */ release(ht); /* 释放桶数组 */ release(lazyRemoval); /* 释放懒惰删除标记 */ } /****************************************************************************************** * 沿关键码k对应的查找链,找到与之匹配的桶(供查找和删除词条时调用) * 试探策略多种多样,可灵活选取;这里仅以线性试探策略为例 ******************************************************************************************/ template <typename K, typename V> int Hashtable<K, V>::probe4Hit(const K& k) { int r = hashCode(k) % M; /* 从起始桶(按除余法确定)出发 */ //*DSA*/printf(" ->%d", r); while ((ht[r] && (k != ht[r]->key)) || (!ht[r] && lazilyRemoved(r))) r = (r + 1) % M; /* 沿查找链线性试探:跳过所有冲突的桶,以及带懒惰删除标记的桶 */ //*DSA*/printf(" ->%d", r); //*DSA*/printf("\n"); return r; /* 调用者根据ht[r]是否为空,即可判断查找是否成功,ht[r]为空:查找成功,ht[r]非空:查找失败(key雷同元素) */ } /****************************************************************************************** * 沿关键码k对应的查找链,找到首个可用空桶(仅供插入词条时调用) * 试探策略多种多样,可灵活选取;这里仅以线性试探策略为例 ******************************************************************************************/ template <typename K, typename V> int Hashtable<K, V>::probe4Free(const K& k) { int r = hashCode(k) % M; /* 从起始桶(按除余法确定)出发 */ //*DSA*/printf(" ->%d", r); /* 首个试探的桶单元地址 */* while (ht[r]) /* 沿查找链逐桶试探,直到首个空桶(无论是否带有懒惰删除标记) */ r = (r + 1) % M; //*DSA*/ while (ht[r]) { r = (r+1) % M; printf(" ->%d", r); } printf("\n"); return r; /* 为保证空桶总能找到,装填因子及散列表长需要合理设置 */ } /****************************************************************************************** * 重散列算法:装填因子过大时,采取“逐一取出再插入”的朴素策略,对桶数组扩容 * 不可简单地(通过memcpy())将原桶数组复制到新桶数组(比如前端),否则存在两个问题: * 1)会继承原有冲突;2)可能导致查找链在后端断裂——即便为所有扩充桶设置懒惰删除标志也无济于事 ******************************************************************************************/ template <typename K, typename V> void Hashtable<K, V>::rehash() { int old_capacity = M; Entry<K, V>** old_ht = ht; /* 容量至少加倍 */ char * pcExternalFile = new char[64]; strncpy(pcExternalFile, "../_input/prime-1048576-bitmap.txt", 64); M = primeNLT(2 * M, 1048576, pcExternalFile); //M = primeNLT(2 * M, 1048576, "../_input/prime-1048576-bitmap.txt"); N = 0; ht = new Entry<K, V>*[M]; memset(ht, 0, sizeof(Entry<K, V>*) * M); /* 新桶数组 */ release(lazyRemoval); lazyRemoval = new Bitmap(M); /* 新开懒惰删除标记比特图 */ //*DSA*/printf("A bucket array has been created with capacity = %d\n\n", M); */ for (int i = 0; i < old_capacity; i++) /* 扫描原桶数组 */ if (old_ht[i]) /* 将非空桶中的词条逐一 */ put(old_ht[i]->key, old_ht[i]->value); /* 插入至新的桶数组 */ release(old_ht); /* 释放原桶数组——由于其中原先存放的词条均已转移,故只需释放桶数组本身 */ } /* 散列表词条插入 */ template <typename K, typename V> bool Hashtable<K, V>::put(K k, V v) { if (ht[probe4Hit(k)]) /* probe4Hit函数仅仅给出应该放入的位置:r ,新元素的空间还需另外开辟,所以此处如果找到的位置已经不为空,则必然为雷同元素 */ return false; /* 雷同元素不必重复插入 */ int r = probe4Free(k); /* 为新词条找个空桶(只要装填因子控制得当,必然成功) */ ht[r] = new Entry<K, V>(k, v); /* 插入(注意:懒惰删除标记无需复位) */ ++N; if (N * 2 > M) rehash(); /* 装填因子高于50%后重散列 */ return true; } /* 散列表词条查找算法 */ template <typename K, typename V> V* Hashtable<K, V>::get(K k){ int r = probe4Hit(k); return ht[r] ? &(ht[r]->value) : NULL; /* 禁止词条的key值雷同 */ } /* 散列表词条删除算法 */ template <typename K, typename V> bool Hashtable<K, V>::remove(K k) { int r = probe4Hit(k); if (!ht[r]) return false; /* 对应词条不存在时,无法删除 */ release(ht[r]); /* 否则释放桶中词条,设置懒惰删除标记,并更新词条总数 */ ht[r] = NULL; markAsRemoved(r); N--; return true; } #endif <file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #include<iostream> using namespace std; #include "../_share/util.h" #include "../UniPrint/print.h" #include "[email protected]" int testID = 0; //测试编号 /****************************************************************************************** * 测试栈 ******************************************************************************************/ template <typename T> //元素类型 void testStack(int n) { Stack<T> S; printf("\n ==== Test %2d. Growing stack\n", testID++); while (S.size() < n) { T e = rand() % (T)2 * n; if (S.empty() || (30 < (rand() % 100))) {//70%概率入栈 printf("pushing "); print(e); printf(" ...\n"); S.push(e); } else { //30%概率出栈 printf("poping with ... "); print(S.pop()); printf("\n"); } print(S); } printf("\n ==== Test %2d. Shrinking stack\n", testID++); while (!S.empty()) { T e = dice((T)2 * n); if (70 < dice(100)) { //30%概率入栈 printf("pushing "); print(e); printf(" ...\n"); S.push(e); } else { //70%概率出栈 printf("poping with ... "); print(S.pop()); printf("\n"); } print(S); } } /****************************************************************************************** * 测试栈 ******************************************************************************************/ int main(int argc, char* argv[]) { string strNumber = "5"; srand((unsigned int)time(NULL)); testStack<int>(atoi(strNumber.c_str())); //元素类型可以在这里任意选择 return 0; } <file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ /****************************************************************************************** * Test of AVL Tree ******************************************************************************************/ #include<iostream> using namespace std; #include "../_share/util.h" #include "../UniPrint/print.h" #include "AVL.h" /****************************************************************************************** * Test an AVL ******************************************************************************************/ template <typename T> void testAVL ( int n ) { AVL<T> avl; while ( avl.size() < n ) { T e = dice ( ( T ) n * 3 ); //[0, 3n)范围内的e switch ( dice ( 3 ) ) { case 0: { //查找,成功率 <= 33.3% printf ( "Searching for " ); print ( e ); printf ( " ...\n" ); BinNodePosi(T) & p = avl.search ( e ); p ? printf ( "Found with" ), print ( p ), printf ( "\n" ) : printf ( "Not found\n" ); break; } case 1: { //删除,成功率 <= 33.3% printf ( "Removing " ); print ( e ); printf ( " ...\n" ); avl.remove ( e ) ? printf ( "Done\n" ), print ( avl ) : (void)printf ( "Not exists\n" ); break; } default: {//插入,成功率 == 100% printf ( "Inserting " ); print ( e ); printf ( " ...\n" ); BinNodePosi(T) p = avl.insert ( e ); if ( p->data != e ) { print ( p->data ); printf ( " <> " ); print ( e ); printf ( "\n" ); } printf ( "Done with" ), print ( p ), printf ( "\n" ), print ( avl ); break; } } } while ( avl.size() > 0 ) { T e = dice ( ( T ) n * 3 ); //[0, 3n)范围内的e printf ( "Removing " ); print ( e ); printf ( " ...\n" ); avl.remove ( e ) ? printf ( "Done\n" ), print ( avl ) : (void)printf ( "Not exists\n" ); } } /****************************************************************************************** * 测试主入口 ******************************************************************************************/ int main ( int argc, char* argv[] ) { int size = 10; srand ( ( unsigned int ) time ( NULL ) ); testAVL<int> (size); //元素类型可以在这里任意选择 return 0; }<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef CRC_VECTOR_H__ #define CRC_VECTOR_H__ #include "..\vector\vector.h" template <typename T> void crc ( Vector<T> & V ) { //统计向量的特征(所有元素之和) //T crc = 0; V.traverse ( Crc<T> ( crc ) ); //以crc为基本操作进行遍历 T crc = 0; V.traverse((Crc<T> &) Crc<T>(crc)); //以crc为基本操作进行遍历 printf ( "CRC =" ); print ( crc ); printf ( "\n" ); //输出统计得到的特征 } #endif // !CRC_VECTOR_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef QUEUE_H__ #define QUEUE_H__ /* 以List为基类 */ #include "../list/list.h" /* 队列模板类(继承List原有接口) */ template <typename T> class Queue : public List<T> { public: /* size()、empty()以及其它开放接口均可直接沿用 */ void enqueue(T const& e) { (this->insertAsLast(e)); } /* 入队:尾部插入 */ T dequeue() { return (this->remove( this->first() )); } /* 出队:首部删除 */ T& front() { return (this->first())->data; } /* 队首 */ }; #endif<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef UTIL_H__ #define UTIL_H__ #define _CRT_SECURE_NO_WARNINGS #include <iostream> using namespace std; /****************************************************************************************** * 测试、演示用小工具 ******************************************************************************************/ #include <stdio.h> #include <stdlib.h> #include <time.h> #include <math.h> #include <typeinfo.h> #include "comparator.h" #include "checkOrder_Elem.h" #include "checkOrder_list.h" #include "checkOrder_vector.h" #include "double_Elem.h" #include "increase_Elem.h" #include "hailstone_Elem.h" #include "increase_list.h" #include "increase_vector.h" #include "crc_Elem.h" #include "crc_list.h" #include "crc_vector.h" #include "rand.h" //随机数 #if defined(DSA_DEBUG) //编译开关,控制调试输出 #define DSA(x) { x } //输出 #else #define DSA(x) //不输出 #endif #endif // !UTIL_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef REDBLACK_H__ #define REDBLACK_H__ #include "../BST/BST.h" /* 基于BST实现RedBlack */ /* RedBlack树模板类 */ template <typename T> class RedBlack : public BST<T> { protected: void solveDoubleRed ( BinNodePosi(T) x ); /* 双红修正 */ void solveDoubleBlack ( BinNodePosi(T) x ); /* 双黑修正 */ int updateHeight ( BinNodePosi(T) x ); /* 更新节点x的高度 */ public: BinNodePosi(T) insert ( const T& e ); /* 插入(重写) */ bool remove ( const T& e ); /* 删除(重写) */ /* BST::search()等其余接口可直接沿用 */ }; /* 将e插入红黑树 */ template <typename T> BinNodePosi(T) RedBlack<T>::insert(const T& e) { BinNodePosi(T) & x = this->search(e); if (x) return x; /* 确认目标不存在(留意对_hot的设置) */ x = new BinNode<T>(e, this->_hot, NULL, NULL, -1); /* 创建红节点x:以_hot为父,黑高度-1 */ this->_size++; solveDoubleRed(x); /* 经双红修正后,即可返回 */ return x ? x : this->_hot->parent; } /* 无论e是否存在于原树中,返回时总有x->data == e */ /* 从红黑树中删除关键码e */ template <typename T> bool RedBlack<T>::remove(const T& e) { BinNodePosi(T) & x = this->search(e); if (!x) /* 确认目标存在(留意_hot的设置) */ return false; BinNodePosi(T) r = removeAt(x, this->_hot); /* 实施删除 */ if (!(--this->_size)) return true; /* assert: _hot某一孩子刚被删除,且被r所指节点(可能是NULL)接替。以下检查是否失衡,并做必要调整 */ if (!this->_hot) /* 若刚被删除的是根节点,则将其置黑,并更新黑高度 */ { this->_root->color = RB_BLACK; updateHeight(this->_root); return true; } /* assert: 以下,原x(现r)必非根,_hot必非空 */ if (BlackHeightUpdated(*(this->_hot))) /* 若所有祖先的黑深度依然平衡,则无需调整 */ return true; if (IsRed(r)) /* 否则,若r为红,则只需令其转黑 */ { r->color = RB_BLACK; r->height++; return true; } /* assert: 以下,原x(现r)均为黑色 */ // *DSA*/printBinTree(_hot, 0, 0); solveDoubleBlack(r); /* 经双黑调整后返回 */ return true; } /* 若目标节点存在且被删除,返回true;否则返回false */ /* 更新节点高度 */ template <typename T> int RedBlack<T>::updateHeight(BinNodePosi(T) x) { x->height = max(stature(x->lc), stature(x->rc));/* 孩子一般黑高度相等,除非出现双黑 */ // /*DSA*/ /* 红黑树中各节点左、右孩子的黑高度通常相等 */ // /*DSA*/ /* 这里之所以取更大值,是便于在删除节点后的平衡调整过程中,正确更新被删除节点父亲的黑高度 */ // /*DSA*/ /* 否则,rotateAt()会根据被删除节点的替代者(高度小一)设置父节点的黑高度 */ return IsBlack(x) ? x->height++ : x->height; /* 若当前节点为黑,则计入黑深度 */ } /* 因统一定义stature(NULL) = -1,故height比黑高度少一,好在不致影响到各种算法中的比较判断 */ /****************************************************************************************** * RedBlack双红调整算法:解决节点x与其父均为红色的问题。分为两大类情况: * RR-1:2次颜色翻转,2次黑高度更新,1~2次旋转,不再递归 * RR-2:3次颜色翻转,3次黑高度更新,0次旋转,需要递归 ******************************************************************************************/ template <typename T> void RedBlack<T>::solveDoubleRed(BinNodePosi(T) x) /* x当前必为红 */ { if (IsRoot(*x)) /* 若已(递归)转至树根,则将其转黑,整树黑高度也随之递增 */ { this->_root->color = RB_BLACK; this->_root->height++; return; } /* 否则,x的父亲p必存在 */ BinNodePosi(T) p = x->parent; if (IsBlack(p)) return; /* 若p为黑,则可终止调整。否则 */ BinNodePosi(T) g = p->parent; /* 既然p为红,则x的祖父必存在,且必为黑色 */ BinNodePosi(T) u = uncle(x); /* 以下,视x叔父u的颜色分别处理 */ if (IsBlack(u)) { /* u为黑色(含NULL)时 */ // *DSA*/printf(" case RR-1:\n"); if (IsLChild(*x) == IsLChild(*p)) /* 若x与p同侧(即zIg-zIg或zAg-zAg),则 */ p->color = RB_BLACK; /* p由红转黑,x保持红 */ else /* 若x与p异侧(即zIg-zAg或zAg-zIg),则 */ x->color = RB_BLACK; /* x由红转黑,p保持红 */ g->color = RB_RED; /* g必定由黑转红 */ /* 以上虽保证总共两次染色,但因增加了判断而得不偿失 */ /* 在旋转后将根置黑、孩子置红,虽需三次染色但效率更高 */ BinNodePosi(T) gg = g->parent; /* 曾祖父(great-grand parent) */ BinNodePosi(T) r = FromParentTo(*g) = this->rotateAt(x); /* 调整后的子树根节点 */ r->parent = gg; /* 与原曾祖父联接 */ } else { /* 若u为红色 */ // *DSA*/printf(" case RR-2:\n"); p->color = RB_BLACK; /* p由红转黑 */ p->height++; u->color = RB_BLACK; /* u由红转黑 */ u->height++; if (!IsRoot(*g)) /* g若非根,则转红 */ g->color = RB_RED; solveDoubleRed(g); /* 继续调整g(类似于尾递归,可优化为迭代形式) */ } } /****************************************************************************************** * RedBlack双黑调整算法:解决节点x与被其替代的节点均为黑色的问题 * 分为三大类共四种情况: * BB-1 :2次颜色翻转,2次黑高度更新,1~2次旋转,不再递归 * BB-2R:2次颜色翻转,2次黑高度更新,0次旋转,不再递归 * BB-2B:1次颜色翻转,1次黑高度更新,0次旋转,需要递归 * BB-3 :2次颜色翻转,2次黑高度更新,1次旋转,转为BB-1或BB2R ******************************************************************************************/ template <typename T> void RedBlack<T>::solveDoubleBlack(BinNodePosi(T) r) { BinNodePosi(T) p = r ? r->parent : this->_hot; if (!p) return; /* r的父亲 */ BinNodePosi(T) s = (r == p->lc) ? p->rc : p->lc; /* r的兄弟 */ if (IsBlack(s)) { /* 兄弟s为黑 */ BinNodePosi(T) t = NULL; /* s的红孩子(若左、右孩子皆红,左者优先;皆黑时为NULL) */ if (IsRed(s->rc)) t = s->rc; /* 右子 */ if (IsRed(s->lc)) t = s->lc; /* 左子 */ if (t) { /* 黑s有红孩子:BB-1 */ // *DSA*/printf(" case BB-1: Child ("); print(s->lc); printf(") of BLACK sibling ("); print(s); printf(") is RED\n"); */ RBColor oldColor = p->color; /* 备份原子树根节点p颜色,并对t及其父亲、祖父 */ /* 以下,通过旋转重平衡,并将新子树的左、右孩子染黑 */ BinNodePosi(T) b = FromParentTo(*p) = this->rotateAt(t); /* 旋转 */ if (HasLChild(*b)) { /* 左子 */ b->lc->color = RB_BLACK; updateHeight(b->lc); } if (HasRChild(*b)) { /* 右子 */ b->rc->color = RB_BLACK; updateHeight(b->rc); } b->color = oldColor; updateHeight(b); /* 新子树根节点继承原根节点的颜色 */ // *DSA*/printBinTree(b, 0, 0); } else { /* 黑s无红孩子 */ s->color = RB_RED; /* s转红 */ s->height--; if (IsRed(p)) { /* BB-2R */ // *DSA*/printf(" case BB-2R: Both children ("); print(s->lc); printf(") and ("); print(s->rc); printf(") of BLACK sibling ("); print(s); printf(") are BLACK, and parent ("); print(p); printf(") is RED\n"); /* s孩子均黑,p红 p->color = RB_BLACK; /* p转黑,但黑高度不变 */ // *DSA*/printBinTree(p, 0, 0); } else { /* BB-2B */ // *DSA*/printf(" case BB-2R: Both children ("); print(s->lc); printf(") and ("); print(s->rc); printf(") of BLACK sibling ("); print(s); printf(") are BLACK, and parent ("); print(p); printf(") is BLACK\n"); /* s孩子均黑,p黑 p->height--; /* p保持黑,但黑高度下降 */ // *DSA*/printBinTree(p, 0, 0); */ solveDoubleBlack(p); /* 递归上溯 */ } } } else /* 兄弟s为红:BB-3 */ { // *DSA*/printf(" case BB-3: sibling ("); print(s); printf(" is RED\n"); /* s红(双子俱黑) */ s->color = RB_BLACK; p->color = RB_RED; /* s转黑,p转红 */ BinNodePosi(T) t = IsLChild(*s) ? s->lc : s->rc; /* 取t与其父s同侧 */ this->_hot = p; FromParentTo(*p) = this->rotateAt(t); /* 对t及其父亲、祖父做平衡调整 */ // *DSA*/printBinTree<T>(s, 0, 0); solveDoubleBlack(r); /* 继续修正r处双黑——此时的p已转红,故后续只能是BB-1或BB-2R */ } } #endif // !REDBLACK_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #include<iostream> using namespace std; #include "../_share/util.h" #include "../UniPrint/print.h" #include "Splay.h" template <typename T> void testSplayPeriod ( int n ) { //周期性访问测试 Splay<T> splay; for ( int i = 0; i < n; i++ ) splay.insert ( ( T ) i ); print ( splay ); for ( int i = 0; i < n; i++ ) { splay.search ( ( T ) i ); print ( splay ); } } template <typename T> void testSplayRandom ( int n ) { //随机访问测试 Splay<T> splay; while ( splay.size() < n ) { T e = dice ( ( T ) n * 3 ); //[0, 3n)范围内的e switch ( dice ( 3 ) ) { case 0: { //查找,成功率 <= 33.3% printf ( "Searching for " ); print ( e ); printf ( " ...\n" ); splay.search ( e ) ? printf ( "Found with" ), print ( splay.root() ), printf ( "\n" ) : printf ( "Not found\n" ); break; } case 1: { //删除,成功率 <= 33.3% printf ( "Removing " ); print ( e ); printf ( " ...\n" ); splay.remove ( e ) ? (void)printf ( "Removal done\n" ) : print ( e ), (void)printf ( " not exists\n" ); break; } default: {//插入,成功率 == 100% printf ( "Inserting " ); print ( e ); printf ( " ...\n" ); splay.insert ( e ); ( e == splay.root()->data ) ? printf ( "Insertion done with" ), print ( splay.root() ), (void)printf ( "\n" ) : print ( e ), "duplicated"; break; } } //switch print ( splay ); //无论调用哪个接口,Splay都会自我调整形态,故需统一输出 } //while while ( splay.size() > 0 ) { T e = dice ( ( T ) n * 3 ); //[0, 3n)范围内的e printf ( "Removing " ); print ( e ); printf ( " ...\n" ); splay.remove ( e ) ? printf ( "Removal done\n" ), print ( splay ) : print ( e ), (void)printf ( " not exists\n" ); } } //课后:利用这一接口,针对不同分布的访问,验证课上对Splay分摊分析的结论 int main ( int argc, char* argv[] ) { //测试主入口 int size = 10; srand ( ( unsigned int ) time ( NULL ) ); testSplayRandom<int> (size); //元素类型可以在这里任意选择 testSplayPeriod<int> (size); //元素类型可以在这里任意选择 return 0; }<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ /****************************************************************************************** * Test of Hashtable ******************************************************************************************/ #include<iostream> using namespace std; #include "../_share/util.h" #include "../UniPrint/print.h" #include "Hashtable.h" /****************************************************************************************** * 测试散列表 ******************************************************************************************/ template <typename K, typename V> //key、value void testHashtable(int n) { Hashtable<K, V> ht(n); print(ht); while (ht.size() < 4 * n) { printf("\n"); switch (dice(3)) { case 0: { //查找,成功率 <= 33.3% K key = dice((K)n * 12); //[0, 3n)范围内的key printf("Searching "); print(key); printf("(%04d) :\n", hashCode(key)); V* pValue = ht.get(key); pValue ? printf("Found with "), print(*pValue) : (void)printf("Not found"); printf("\n"); break; } case 1: { //删除,成功率 <= 33.3% K key = dice((K)n * 12); //[0, 3n)范围内的key printf("Removing "); print(key); printf("(%04d) :\n", hashCode(key)); ht.remove(key) ? printf("Done\n"), print(ht) : (void)printf("Entry not exists\n"); break; } default: {//插入,成功率 == 100% K key = dice((K)n * 12); V v = (V) 'A' + dice(26); //在[0, 2n)*['A'~'Z']范围内的词条 printf("Inserting <"); print(key); printf("(%04d)", hashCode(key)); printf(","); print(v); printf(">\n"); ht.put(key, v) ? printf("Done\n"), print(ht) : (void)printf("Dup key\n"); break; } } //switch } //while while (ht.size() > 0) { printf("\n"); K key = dice((K)n * 12); //[0, 3n)范围内的key printf("Removing "); print(key); printf(" :\n"); ht.remove(key) ? printf("Done\n"), print(ht) : (void)printf("Entry not exists\n"); } } /****************************************************************************************** * 主程序 ******************************************************************************************/ int main(int argc, char* argv[]) { int iNumber = 6; srand((unsigned int)time(NULL)); testHashtable<int, char>(iNumber); //元素类型可以在这里任意选择 return 0; } <file_sep>## PriorityQueue——优先级队列 #### 优先级队列不使用BBST实现的原因是: 对于实现优先级队列的简单功能来说,BBST过于复杂 #### 优先级队列的一种实现方式是: 逻辑上,等同于完全二叉树、物理上,等同于向量 #### 优先级队列的堆序性是: 在数值上,只要0<i,必满足:H[i] <= H[Parent(i)] ### 1、完全二叉堆 ### 2、左式堆 #### NPL npl: NULL PATH LENGTH(空节点路径长度); npl(x) = x到外部节点的最近距离; npl(x) = 以x为根的最大满子树的高度; #### 定义 左倾:对任何内节点x,都有npl(lc(x)) >= npl(rc(x)); 推论:对任何内节点x,都有npl(x) = 1 + npl(rc(x)); 满足左倾性leftist property的堆,即是左式堆; 左倾性与堆序性,相容而不矛盾; 左式堆的子堆,必是左式堆; 左式堆倾向于更多节点分布于左侧分支; <file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #include <iostream> using namespace std; #include "../_share/util.h" #include "../UniPrint/print.h" #include "BinTree.h" int testID = 0; //测试编号 // 随机生成期望高度为h的二叉树 template <typename T> bool randomBinTree(BinTree<T> & bt, BinNodePosi(T) x, int h) { if (0 >= h) return false; //至多h层 if (0 < dice(h)) //以1/h的概率终止当前分支的生长 randomBinTree(bt, bt.insertAsLC(x, dice((T)h * h * h)), h - 1); if (0 < dice(h)) //以1/h的概率终止当前分支的生长 randomBinTree(bt, bt.insertAsRC(x, dice((T)h * h * h)), h - 1); return true; } // 在二叉树中随机确定一个节点位置 template <typename T> BinNodePosi(T) randomPosiInBinTree(BinNodePosi(T) root) { if (!HasChild(*root)) return root; if (!HasLChild(*root)) return dice(6) ? randomPosiInBinTree(root->rc) : root; if (!HasRChild(*root)) return dice(6) ? randomPosiInBinTree(root->lc) : root; return dice(2) ? randomPosiInBinTree(root->lc) : randomPosiInBinTree(root->rc); } template <typename T> void testBinTree(int h) { //测试二叉树 printf("\n ==== Test %2d. Generate a binTree of height <= %d \n", testID++, h); BinTree<T> bt; print(bt); bt.insertAsRoot(dice((T)h * h * h)); print(bt); randomBinTree<T>(bt, bt.root(), h); print(bt); printf("\n ==== Test %2d. Double and increase all nodes by traversal\n", testID++); bt.travPre((Double<T>&) Double<T>()); bt.travPre((Increase<T>&) Increase<T>()); print(bt); bt.travIn((Double<T>&) Double<T>()); bt.travIn((Increase<T>&) Increase<T>()); print(bt); bt.travPost((Double<T>&) Double<T>()); bt.travPost((Increase<T>&) Increase<T>()); print(bt); bt.travLevel((Double<T>&) Double<T>()); bt.travLevel((Increase<T>&) Increase<T>()); print(bt); Hailstone<T> he; bt.travIn(he); print(bt); printf("\n ==== Test %2d. Remove/release subtrees in the Tree\n", testID++); while (!bt.empty()) { BinNodePosi(T) p = randomPosiInBinTree(bt.root()); //随机选择一个节点 if (dice(2)) { printf("removing "); print(p->data); printf(" ...\n"); printf("%d node(s) removed\n", bt.remove(p)); print(bt); } else { printf("releasing "); print(p->data); printf(" ...\n"); BinTree<T>* S = bt.secede(p); print(S); printf("%d node(s) released\n", S->size()); release(S); print(bt); } } } int main(void) { //测试二叉树 string strNumber = "10"; srand((unsigned int)time(NULL)); testBinTree<int>(atoi(strNumber.c_str())); //元素类型可以在这里任意选择 return 0; }<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef PQ_H__ #define PQ_H__ //优先级队列PQ模板类 template <typename T> struct PQ { virtual void insert ( T ) = 0; //按照比较器确定的优先级次序插入词条 virtual T getMax() = 0; //取出优先级最高的词条 virtual T delMax() = 0; //删除优先级最高的词条 }; #endif // PQ_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef BST_H__ #define BST_H__ #include "../BinTree/BinTree.h" /* 引入BinTree */ #define EQUAL(e, v) (!(v) || (e) == (v)->data) /* 节点v(或假想的通配哨兵)的关键码等于e */ template <typename T> class BST : public BinTree<T> { /* 由BinTree派生BST模板类 */ protected: BinNodePosi(T) _hot; /* “命中”节点的父亲 */ BinNodePosi(T) connect34 ( /* 按照“3 + 4”结构,联接3个节点及四棵子树 */ BinNodePosi(T), BinNodePosi(T), BinNodePosi(T), BinNodePosi(T), BinNodePosi(T), BinNodePosi(T), BinNodePosi(T) ); BinNodePosi(T) rotateAt ( BinNodePosi(T) x ); /* 对x及其父亲、祖父做统一旋转调整 */ public: /* 基本接口:以virtual修饰,强制要求所有派生类(BST变种)根据各自的规则对其重写 */ virtual BinNodePosi(T) & search ( const T& e ); /* 查找 */ virtual BinNodePosi(T) insert ( const T& e ); /* 插入 */ virtual bool remove ( const T& e ); /* 删除 */ }; /* 在以v为根的(AVL、SPLAY、rbTree等)BST子树中查找关键码e */ template <typename T> static BinNodePosi(T) & searchIn(BinNodePosi(T) & v, const T& e, BinNodePosi(T) & hot) { if (EQUAL(e, v)) return v; hot = v; /* 退化情况:在子树根节点v处命中 */ while (1) { /* 一般地,反复不断地 */ BinNodePosi(T) & c = (e < hot->data) ? hot->lc : hot->rc; /* 确定深入方向 */ if (EQUAL(e, c)) /* 命中返回,或者深入一层 */ return c; hot = c; } /* hot始终指向最后一个失败节点 */ } /* 返回时,返回值指向命中节点(或假想的通配哨兵),hot指向其父亲(退化时为初始值NULL) */ /* 在BST中查找关键码e */ template <typename T> BinNodePosi(T) & BST<T>::search(const T& e) { return searchIn(this->_root, e, _hot = NULL); /* 返回目标节点位置的引用,以便后续插入、删除操作 */ } /* 将关键码e插入BST树中 */ template <typename T> BinNodePosi(T) BST<T>::insert(const T& e) { BinNodePosi(T) & x = search(e); if (x) return x; /* 确认目标不存在(留意对_hot的设置) */ x = new BinNode<T>(e, _hot); /* 创建新节点x:以e为关键码,以_hot为父 */ this->_size++; /* 更新全树规模 */ this->updateHeightAbove(x); /* 更新x及其历代祖先的高度 */ return x; /* 新插入的节点,必为叶子 */ } /* 无论e是否存在于原树中,返回时总有x->data == e */ /****************************************************************************************** * BST节点删除算法:删除位置x所指的节点(全局静态模板函数,适用于AVL、Splay、RedBlack等各种BST) * 目标x在此前经查找定位,并确认非NULL,故必删除成功;与searchIn不同,调用之前不必将hot置空 * 返回值指向实际被删除节点的接替者,hot指向实际被删除节点的父亲——二者均有可能是NULL ******************************************************************************************/ template <typename T> static BinNodePosi(T) removeAt(BinNodePosi(T) & x, BinNodePosi(T) & hot) { BinNodePosi(T) w = x; /* 实际被摘除的节点,初值同x */ BinNodePosi(T) succ = NULL; /* 实际被删除节点的接替者 */ if (!HasLChild(*x)) /* 若*x的左子树为空,则可 */ succ = x = x->rc; /* 直接将*x替换为其右子树 */ else if (!HasRChild(*x)) /* 若右子树为空,则可 */ succ = x = x->lc; /* 对称地处理——注意:此时succ != NULL */ else { /* 若左右子树均存在,则选择x的直接后继作为实际被摘除节点,为此需要 */ w = w->succ(); /* (在右子树中)找到*x的直接后继*w */ swap(x->data, w->data); /* 交换*x和*w的数据元素 */ BinNodePosi(T) u = w->parent; ((u == x) ? u->rc : u->lc) = succ = w->rc; /* 隔离节点*w */ } hot = w->parent; /* 记录实际被删除节点的父亲 */ if (succ) succ->parent = hot; /* 并将被删除节点的接替者与hot相联 */ release(w->data); release(w); return succ; /* 释放被摘除节点,返回接替者 */ } /* 从BST树中删除关键码e */ template <typename T> bool BST<T>::remove(const T& e) { BinNodePosi(T) & x = search(e); if (!x) return false; /* 确认目标存在(留意_hot的设置) */ removeAt(x, _hot); /* 实施删除 */ this->_size--; this->updateHeightAbove(_hot); /* 更新_hot及其历代祖先的高度 */ return true; } /* 删除成功与否,由返回值指示 */ /****************************************************************************************** * 按照“3 + 4”结构联接3个节点及其四棵子树,返回重组之后的局部子树根节点位置(即b) * 子树根节点与上层节点之间的双向联接,均须由上层调用者完成 * 可用于AVL和RedBlack的局部平衡调整 ******************************************************************************************/ template <typename T> BinNodePosi(T) BST<T>::connect34( BinNodePosi(T) a, BinNodePosi(T) b, BinNodePosi(T) c, BinNodePosi(T) T0, BinNodePosi(T) T1, BinNodePosi(T) T2, BinNodePosi(T) T3) { /* *DSA*/print(a); print(b); print(c); printf("\n"); a->lc = T0; if (T0) T0->parent = a; a->rc = T1; if (T1) T1->parent = a; this->updateHeight(a); c->lc = T2; if (T2) T2->parent = c; c->rc = T3; if (T3) T3->parent = c; this->updateHeight(c); b->lc = a; a->parent = b; b->rc = c; c->parent = b; this->updateHeight(b); return b; /* 该子树新的根节点 */ } /****************************************************************************************** * BST节点旋转变换统一算法(3节点 + 4子树),返回调整之后局部子树根节点的位置 * 注意:尽管子树根会正确指向上层节点(如果存在),但反向的联接须由上层函数完成 ******************************************************************************************/ template <typename T> BinNodePosi(T) BST<T>::rotateAt(BinNodePosi(T) v) /* v为非空孙辈节点 */ { /*DSA*/if (!v) { printf("\a\nFail to rotate a null node\n"); exit(-1); } BinNodePosi(T) p = v->parent; /* 视v、p和g相对位置分四种情况 */ BinNodePosi(T) g = p->parent; if (IsLChild(*p)) /* zig */ if (IsLChild(*v)) { /* zig-zig */ // /* *DSA*/printf("\tzIg-zIg: "); p->parent = g->parent; /* 向上联接 */ return connect34(v, p, g, v->lc, v->rc, p->rc, g->rc); } else { /* zig-zag */ // /* *DSA*/printf("\tzIg-zAg: "); v->parent = g->parent; /* 向上联接 */ return connect34(p, v, g, p->lc, v->lc, v->rc, g->rc); } else /* zag */ if (IsRChild(*v)) { /* zag-zag */ // /* *DSA*/printf("\tzAg-zAg: "); p->parent = g->parent; /* 向上联接 */ return connect34(g, p, v, g->lc, p->lc, v->lc, v->rc); } else { /* zag-zig */ // /* *DSA*/printf("\tzAg-zIg: "); v->parent = g->parent; /* 向上联接 */ return connect34(g, v, p, g->lc, v->lc, v->rc, p->rc); } } #endif<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef RAND_H__ #define RAND_H__ #include <stdlib.h> #include <time.h> //通常用当前时刻设置随机种子 /****************************************************************************************** * 在[0, range)内随机生成一个数 ******************************************************************************************/ static int dice ( int range ) { return rand() % range; } //取[0, range)中的随机整数 static int dice ( int lo, int hi ) { return lo + rand() % ( hi - lo ); } //取[lo, hi)中的随机整数 static float dice ( float range ) { return rand() % ( 1000 * ( int ) range ) / ( float ) 1000.; } static double dice ( double range ) { return rand() % ( 1000 * ( int ) range ) / ( double ) 1000.; } static char dice ( char range ) { return ( char ) ( 32 + rand() % 96 ); } #endif // !RAND_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef PRINT_H__ #define PRINT_H__ #include <stdio.h> /* 采用C风格精细控制输出格式 */ #if 0 #include "../huffman/huffChar.h" /* Huffman超字符 */ #endif #include "../BinTree/BinTree.h" /* 二叉树 */ #include "../BST/BST.h" /* 二叉搜索树 */ #include "../Splay/Splay.h" /* 伸展树 */ #include "../AVL/AVL.h" /* AVL树 */ #include "../redBlack/RedBlack.h" /* 红黑树 */ #include "../BTree/BTree.h" /* 二叉搜索树(B-树) */ #include "../Hashtable/Entry.h" /* 词条 */ #include "../Hashtable/Hashtable.h" /* 散列表 */ #include "../PQ_List/PQ_List.h" /* 基于列表实现的优先级队列 */ #include "../PQ_ComplHeap/PQ_ComplHeap.h" /* 基于完全堆实现的优先级队列 */ #include "../PQ_LeftHeap/PQ_LeftHeap.h" /* 基于左式堆实现的优先级队列 */ #include "../graphMatrix/Graph.h" /* 图 */ #include "../graphMatrix/GraphMatrix.h" /* 基于邻接矩阵实现的图 */ /****************************************************************************************** * 数据元素、数据结构通用输出接口 ******************************************************************************************/ template <typename T> static void print ( T* x ) { x ? print ( *x ) : (void)printf ( " <NULL>" ); } template <typename T> static void print ( T& x ) { UniPrint::p ( x ); } template <typename T> static void print ( const T& x ) { UniPrint::p ( x ); } /* for Stack */ static void print ( char* x ) { printf ( " %s", x ? x : "<NULL>" ); } /* 字符串特别处理 */ static void print ( const char* x ) { printf ( " %s", x ? x : "<NULL>" ); } /* 字符串特别处理 */ class UniPrint { public: static void p ( int ); static void p ( float ); static void p ( double ); static void p ( char ); #if 0 static void p ( HuffChar& ); /* Huffman(超)字符 */ #endif static void p ( VStatus ); /* 图顶点的状态 */ static void p ( EType ); /* 图边的类型 */ template <typename K, typename V> static void p ( Entry<K, V>& ); /* Entry */ template <typename T> static void p ( BinNode<T>& ); /* BinTree节点 */ template <typename T> static void p ( BinTree<T>& ); /* 二叉树 */ template <typename T> static void p(BTree<T>&); /* B-树 */ template <typename T> static void p(BST<T>&); /* BST */ template <typename T> static void p(Splay<T>&); /* Splay */ template <typename T> static void p(AVL<T>&); /* AVL */ template <typename T> static void p(RedBlack<T>&); /* RedBlack */ template <typename K, typename V> static void p(Hashtable<K, V>&); /* Hashtable */ template <typename T> static void p(PQ_List<T>&); /* PQ_List */ template <typename T> static void p(PQ_ComplHeap<T>&); /* PQ_ComplHeap */ template <typename T> static void p(PQ_LeftHeap<T>&); /* PQ_LeftHeap */ template <typename Tv, typename Te> static void p(GraphMatrix<Tv, Te>&); /* Graph */ template <typename T> static void p ( T& ); /* 向量、列表等支持traverse()遍历操作的线性结构 */ template <typename T> static void p ( T* s ) /* 所有指针 */ { s ? p ( *s ) : print ( "<NULL>" ); } /* 统一转为引用 */ }; /* UniPrint */ #include "Print_BinNode.h" #include "Print_BinTree.h" #include "Print_BTree.h" #include "Print_Entry.h" #include "Print_Hashtable.h" #include "Print_PQ_List.h" #include "Print_PQ_ComplHeap.h" #include "Print_PQ_LeftHeap.h" #include "Print_graphmatrix.h" #include "Print_traversable.h" #endif // !PRINT_H__ <file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef LISTNODE_H__ #define LISTNODE_H__ typedef int Rank; /* 秩 */ #define ListNodePosi(T) ListNode<T>* /* 列表节点位置 */ template <typename T> struct ListNode { /* 列表节点模板类(以双向链表形式实现) */ /* 成员 */ T data; /* 数值 */ ListNodePosi(T) pred; /* 前驱 */ ListNodePosi(T) succ; /* 后继 */ ListNode() {} /* 构造函数 */ ListNode(T e, ListNodePosi(T) p = NULL, ListNodePosi(T) s = NULL) : data(e), pred(p), succ(s) { } /* 默认构造器 */ ListNodePosi(T) insertAsPred(T const& e); /* 紧靠当前节点之前插入新节点 */ ListNodePosi(T) insertAsSucc(T const& e); /* 紧随当前节点之后插入新节点 */ }; /* 将e紧靠当前节点之前插入于当前节点所属列表(设有哨兵头节点header) */ template <typename T> ListNodePosi(T) ListNode<T>::insertAsPred(T const& e) { ListNodePosi(T) x = new ListNode(e, pred, this); /* 创建新节点 */ pred->succ = x; /* 设置正向链接 */ pred = x; return x; /* 返回新节点的位置 */ } /* 将e紧随当前节点之后插入于当前节点所属列表(设有哨兵尾节点trailer) */ template <typename T> ListNodePosi(T) ListNode<T>::insertAsSucc(T const& e) { ListNodePosi(T) x = new ListNode(e, this, succ); /* 创建新节点 */ succ->pred = x; /* 设置逆向链接 */ succ = x; return x; /* 返回新节点的位置 */ } #endif<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn°Ęadd and modify it . * All rights reserved. ******************************************************************************************/ #ifndef PRINT_ENTRY_H__ #define PRINT_ENTRY_H__ /****************************************************************************************** * Entry ******************************************************************************************/ template <typename K, typename V> void UniPrint::p ( Entry<K, V>& e ) //“ż”√ { printf ( "-<" ); print ( e.key ); printf ( ":" ); print ( e.value ); printf ( ">-" ); } #endif<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef ICREASE_LIST_H__ #define ICREASE_LIST_H__ /*DSA*/#include "..\list\list.h" template <typename T> void increase ( List<T> & L ) //统一递增列表中的各元素 { L.traverse ( (Increase<T> &) Increase<T>() ); } //以Increase<T>()为基本操作进行遍历 #endif // !ICREASE_LIST_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #pragma once /* 以列表为基类,派生出栈模板类 */ #include "../list/list.h" template <typename T> class Stack : public List<T> { /* 将列表的首/末端作为栈顶/底 */ public: /* size()、empty()以及其它开放接口,均可直接沿用 */ void push(T const& e) { this->insertAsLast(e); } /* 入栈:等效于将新元素作为列表的首元素插入 */ T pop() { return this->remove(this->last()); } /* 出栈:等效于删除列表的首元素 */ T& top() { return (this->last())->data; } /* 取顶:直接返回列表的首元素 */ }; //#endif <file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ /****************************************************************************************** * Test of Vector ******************************************************************************************/ #include<iostream> using namespace std; #include "../_share/util.h" #include "../UniPrint/print.h" #include "vector.h" int testID = 0; //测试编号 //随机置乱向量,使各元素等概率出现于各位置 template <typename T> void permute(Vector<T>& V) { for (int i = V.size(); i > 0; i--) //自后向前 swap(V[i - 1], V[rand() % i]); //V[i - 1]与V[0, i)中某一随机元素交换 } /****************************************************************************************** * 测试:无序向量的(顺序)查找 ******************************************************************************************/ template <typename T> //元素类型 void TestFind(Vector<T> & V, int n) { for (int i = 0; i < V.size(); i++) { //依次查找向量中元素,当然成功 T e = V[i]; print(e); Rank r = V.find(e); if (r < 0) printf(" : not found until rank V[%d] <> %d", r, e); else printf(" : found at rank V[%d] = %d", r, V[r]); printf("\n"); } for (int i = 0; i <= V.size(); i++) { //依次查找每对相邻元素的均值,可能成功 T a = (0 < i) ? V[i - 1] : -INT_MAX / 2; T b = (i < V.size()) ? V[i] : INT_MAX / 2; T e = (a + b) / 2; print(e); Rank r = V.find(e); if (r < 0) printf(" : not found until rank V[%d] <> %d", r, e); else printf(" : found at rank V[%d] = %d", r, V[r]); printf("\n"); } } /****************************************************************************************** * 测试:有序向量的查找(binSearch或fibSearch) ******************************************************************************************/ template <typename T> //元素类型 void TestSearch(Vector<T> & V) { for (int i = 0; i < V.size(); i++) { //依次查找向量中元素,当然成功 T e = V[i]; printf("Looking for"); print(e); printf(" in ...\n"); print(V); Rank r = V.search(e); if (V[r] == e) printf("found at rank V[%d] = %d", r, V[r]); else printf("found at rank V[%d] = %d <> %d\a\a", r, V[r], e); printf("\n\n"); } for (int i = 0; i <= V.size(); i++) { //依次相邻元素的均值,可能成功 T a = (0 < i) ? V[i - 1] : -INT_MAX / 2; T b = (i < V.size()) ? V[i] : INT_MAX / 2; T e = (a + b) / 2; printf("Looking for"); print(e); printf(" in ...\n"); print(V); Rank r = V.search(e); printf("V[%3d] =", r); (r < 0) ? print("-INF") : print(V[r]); printf(" ~ "); printf("V[%3d] =", r + 1); (r + 1 < V.size()) ? print(V[r + 1]) : print("+INF"); bool ordered = true; if ((r >= 0) && (V[r] > e)) ordered = false; if ((r + 1 < V.size()) && (V[r + 1] <= e)) ordered = false; if (!ordered) printf("\tincorrect search\a\a"); printf("\n\n"); } } /****************************************************************************************** * 测试:有序向量的插入 ******************************************************************************************/ template <typename T> //元素类型 void TestOrderedInsertion(Vector<T> & V, int n) { while (n * 2 > V.size()) { T e = dice((T)n * 2); printf("Inserting "); print(e); printf(" ...\n"); V.insert(V.search(e) + 1, e); print(V); } } /****************************************************************************************** * 测试向量 ******************************************************************************************/ #define PRINT(x) { print(x); crc(x); checkOrder(x); } template <typename T> //元素类型 void testVector(int testSize) { printf("\n ==== Test %2d. Generate a random vector\n", testID++); Vector<T> V; for (int i = 0; i < testSize; i++) V.insert(dice(i + 1), dice((T)testSize * 3)); //在[0, 3n)中选择n个数,随机插入向量 PRINT(V); permute(V); PRINT(V) printf("\n ==== Test %2d. Lowpass on\n", testID++); PRINT(V); int i = V.size(); while (0 < --i) { V[i - 1] += V[i]; V[i - 1] >>= 1; } PRINT(V); printf("\n ==== Test %2d. Increase\n", testID++); PRINT(V); increase(V); PRINT(V); printf("\n ==== Test %2d. FIND in\n", testID++); PRINT(V); TestFind<T>(V, testSize); printf("\n ==== Test %2d. Sort degenerate intervals each of size 1 in\n", testID++, 0, V.size()); PRINT(V); for (int i = 0; i < V.size(); i += V.size() / 5) { V.sort(i, i); PRINT(V); } //element by element printf("\n ==== Test %2d. Sort 5 intervals each of size %d in\n", testID++, V.size() / 5); PRINT(V); for (int i = 0; i < V.size(); i += V.size() / 5) { V.sort(i, min(V.size(), i + V.size() / 5)); PRINT(V); } //interval by interval printf("\n ==== Test %2d. Sort the entire vector of\n", testID++, 0, V.size()); PRINT(V); V.sort(); PRINT(V); printf("\n ==== Test %2d. FIND in\n", testID++); PRINT(V); TestFind<T>(V, testSize); printf("\n ==== Test %2d. SEARCH in\n", testID++); PRINT(V); TestSearch<T>(V); printf("\n ==== Test %2d. Unsort interval [%d, %d) in\n", testID++, V.size() / 4, 3 * V.size() / 4); PRINT(V); V.unsort(V.size() / 4, 3 * V.size() / 4); PRINT(V); printf("\n ==== Test %2d. Unsort interval [%d, %d) in\n", testID++, 0, V.size()); PRINT(V); V.unsort(); PRINT(V); printf("\n ==== Test %2d. Copy interval [%d, %d) from\n", testID++, V.size() / 4, 3 * V.size() / 4); PRINT(V); Vector<T> U(V, V.size() / 4, 3 * V.size() / 4); PRINT(U); printf("\n ==== Test %2d. Copy from\n", testID++); PRINT(V); Vector<T> W(V); PRINT(W); printf("\n ==== Test %2d. Clone from\n", testID++); PRINT(U); W = U; PRINT(W); printf("\n ==== Test %2d. Remove redundancy in unsorted\n", testID++); PRINT(V); printf("%d node(s) removed\n", V.deduplicate()); PRINT(V); printf("\n ==== Test %2d. Sort interval [%d, %d) in\n", testID++, 0, V.size()); PRINT(V); V.sort(); PRINT(V); printf("\n ==== Test %2d. FIND in V[%d]\n", testID++); PRINT(V); TestFind<T>(V, testSize); printf("\n ==== Test %2d. SEARCH & INSERT in\n", testID++); PRINT(V); TestOrderedInsertion<T>(V, testSize); PRINT(V) printf("\n ==== Test %2d. Remove redundancy in sorted\n", testID++); PRINT(V); printf("%d node(s) removed\n", V.uniquify()); PRINT(V); } /****************************************************************************************** * 测试向量 ******************************************************************************************/ int main(int argc, char* argv[]) { string strNumber = "5"; srand((unsigned int)time(NULL)); testVector<int>(atoi(strNumber.c_str())); //元素类型可以在这里任意选择 return 0; }<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef PQ_LIST_H__ #define PQ_LIST_H__ #include "../PQ/PQ.h" //引入优先级队列ADT #include "../List/List.h" //引入列表 //基于列表实现的优先级队列 template <typename T> class PQ_List : public PQ<T>, public List<T> { /*DSA*/friend class UniPrint; //演示输出使用,否则不必设置友类 public: PQ_List() { } //默认构造 PQ_List(T* E, int n) //批量构造 { while (0 < n--) this->insertAsFirst((E[n])); } void insert(T e) { this->insertAsLast(e); //直接将新元素插至队列末尾 } T getMax() //取出优先级最高的元素 { return this->selectMax()->data; } T delMax() //删除优先级最高的元素 { return this->remove(this->selectMax()); } }; //PQ_List #endif <file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef BIT_O1_INIT_SET_ONLY_H__ #define BIT_O1_INIT_SET_ONLY_H__ #pragma warning(disable : 4996 4800) #include <stdlib.h> #include <stdio.h> #include <memory.h> #include "../_share/release.h" typedef int Rank; /* 秩 */ /* 位图Bitmap类:以空间作为补偿,节省初始化时间(仅允许插入,不支持删除) */ class Bitmap { private: Rank* F; /* 规模为N的向量F,记录[k]被标记的次序(即其在栈T[]中的秩) */ Rank N; Rank* T; /* 容量为N的栈T,记录被标记各位秩的栈,以及栈顶指针 */ Rank top; protected: inline bool valid ( Rank r ) { return ( 0 <= r ) && ( r < top ); } public: Bitmap ( Rank n = 8 ) /* 按指定(或默认)规模创建比特图(为测试暂时选用较小的默认值) */ { /* 在O(1)时间内隐式地初始化 */ N = n; F = new Rank[N]; T = new Rank[N]; top = 0; } ~Bitmap() /* 析构时释放空间 */ { delete [] F; delete [] T; } /* 接口 */ inline void set ( Rank k ) /* 插入 */ { if ( test ( k ) ) /* 忽略已带标记的位 */ return; F[k] = top++; T[ F[k] ] = k; /* 建立校验环 */ } inline bool test ( Rank k ) /* 测试 */ { return valid ( F[k] ) && ( k == T[ F[k] ] ); } }; #endif // !BIT_O1_INIT_SET_ONLY_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef DOUBLE_ELEM_H__ #define DOUBLE_ELEM_H__ template <typename T> struct Double //函数对象:倍增一个T类对象 { virtual void operator() ( T& e ) { e *= 2; } }; //假设T可直接倍增 #endif<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef LIST_H__ #define LIST_H__ #include "listNode.h" /* 引入列表节点类 */ /* 列表模板类 */ template <typename T> class List { private: int _size; /* 规模 */ ListNodePosi(T) header; /* 头哨兵 */ ListNodePosi(T) trailer; /* 尾哨兵 */ protected: void init(); /* 列表创建时的初始化 */ int clear(); /* 清除所有节点、返回原链表元素个数 */ void copyNodes(ListNodePosi(T) p, int n); /* 复制列表中自位置p起的n项 */ void merge(ListNodePosi(T) & p, int n, List<T>& L, ListNodePosi(T) q, int m); /* 归并 */ void mergeSort(ListNodePosi(T) & p, int n); /* 对从p开始连续的n个节点归并排序 */ void selectionSort(ListNodePosi(T) p, int n); /* 对从p开始连续的n个节点选择排序 */ void insertionSort(ListNodePosi(T) p, int n); /* 对从p开始连续的n个节点插入排序 */ public: /* 构造函数 */ List() { init(); } /* 默认构造函数 */ List(List<T> const& L); /* 拷贝构造函数—整体复制列表L */ List(List<T> const& L, Rank r, int n); /* 拷贝构造函数—复制列表L中自第r项起的n项 */ List(ListNodePosi(T) p, int n); /* 复制列表中自位置p起的n项 */ /* 析构函数 */ ~List(); /* 释放(包含头、尾哨兵在内的)所有节点 */ /* 只读访问接口 */ Rank size() const { return _size; } /* 规模 */ bool empty() const { return _size <= 0; } /* 判空 */ T& operator[] (Rank r) const; /* 重载,支持循秩访问(效率低) */ ListNodePosi(T) first() const { return header->succ; } /* 首节点位置 */ ListNodePosi(T) last() const { return trailer->pred; } /* 末节点位置 */ bool valid(ListNodePosi(T) p){ return p && (trailer != p) && (header != p); } /* 判断位置p是否对外合法 */ int disordered() const; /* 判断列表是否已排序 */ ListNodePosi(T) find(T const& e) const { return find(e, _size, trailer); } /* 无序列表查找 */ ListNodePosi(T) find(T const& e, int n, ListNodePosi(T) p) const; /* 无序区间查找 */ ListNodePosi(T) search(T const& e) const { return search(e, _size, trailer); } /* 有序列表查找 */ ListNodePosi(T) search(T const& e, int n, ListNodePosi(T) p) const; /* 有序区间查找 */ ListNodePosi(T) selectMax(ListNodePosi(T) p, int n); /* 在p及其n-1个后继中选出最大者 */ ListNodePosi(T) selectMax(){ return selectMax(header->succ, _size); } /* 整体最大者 */ /* 可写访问接口 */ ListNodePosi(T) insertAsFirst(T const& e); /* 将e当作首节点插入 */ ListNodePosi(T) insertAsLast(T const& e); /* 将e当作末节点插入 */ ListNodePosi(T) insertA(ListNodePosi(T) p, T const& e); /* 将e当作p的后继插入 */ ListNodePosi(T) insertB(ListNodePosi(T) p, T const& e); /* 将e当作p的前驱插入 */ T remove(ListNodePosi(T) p); /* 删除合法位置p处的节点,返回被删除节点 */ void merge(List<T>& L){ merge(first(), size, L, L.first(), L._size); } /* 全列表归并 */ void sort(ListNodePosi(T) p, int n); /* 列表区间排序 */ void sort() { sort(first(), _size); } /* 列表整体排序 */ int deduplicate(); /* 无序去重 */ int uniquify(); /* 有序去重 */ void reverse(); /* 前后倒置(习题) */ /* 遍历 */ void traverse( void (*visit) ( T& ) ); /* 遍历,依次实施visit操作(函数指针,只读或局部性修改) */ template <typename VST> /* 操作器 */ void traverse(VST& visit); /* 遍历,依次实施visit操作(函数对象,可全局性修改)*/ }; #include "../_share/release.h" #include "../_share/util.h" /* 列表初始化,在创建列表对象时统一调用 */ template <typename T> void List<T>::init() { header = new ListNode<T>; /* 创建头哨兵节点 */ trailer = new ListNode<T>; /* 创建尾哨兵节点 */ header->succ = trailer; header->pred = NULL; trailer->pred = header; trailer->succ = NULL; _size = 0; /* 记录规模 */ } /* 清空列表 */ template <typename T> int List<T>::clear() { int oldSize = _size; while (0 < _size) remove(header->succ); /* 反复删除首节点,直至列表变空 */ return oldSize; } /* 列表内部方法:复制列表中自位置p起的n项 */ template <typename T> void List<T>::copyNodes(ListNodePosi(T) p, int n) { /* p合法,且至少有n-1个真后继节点 */ init(); /* 创建头、尾哨兵节点并做初始化 */ while (n--) /* 将起自p的n项依次作为末节点插入 */ { insertAsLast(p->data); p = p->succ; } } /* 有序列表的归并:当前列表中自p起的n个元素,与列表L中自q起的m个元素归并 */ template <typename T> void List<T>::merge(ListNodePosi(T) & p, int n, List<T>& L, ListNodePosi(T) q, int m) { // assert: this.valid(p) && rank(p) + n <= size && this.sorted(p, n) // L.valid(q) && rank(q) + m <= L._size && L.sorted(q, m) // 注意:在归并排序之类的场合,有可能 this == L && rank(p) + n = rank(q) ListNodePosi(T) pp = p->pred; /* 借助前驱(可能是header),以便返回前 ... */ while (0 < m) /* 在q尚未移出区间之前 */ if ((0 < n) && (p->data <= q->data)) /* 若p仍在区间内且v(p) <= v(q),则 */ { if (q == (p = p->succ)) break; n--; } /* p归入合并的列表,并替换为其直接后继 */ else /* 若p已超出右界或v(q) < v(p),则 */ { insertB(p, L.remove((q = q->succ)->pred)); m--; } /* 将q转移至p之前 */ p = pp->succ; /* 确定归并后区间的(新)起点 */ } /* 列表的归并排序算法:对起始于位置p的n个元素排序 */ template <typename T> void List<T>::mergeSort(ListNodePosi(T) & p, int n) { /* valid(p) && rank(p) + n <= size */ /*DSA*/printf("\tMERGEsort [%3d]\n", n); if (n < 2) return; /* 若待排序范围已足够小,则直接返回;否则... */ int m = n >> 1; /* 以中点为界 */ ListNodePosi(T) q = p; for (int i = 0; i < m; i++) /* 均分列表 */ q = q->succ; mergeSort(p, m); /* 对前、后子列表分别排序 */ mergeSort(q, n - m); merge(p, m, *this, q, n - m); /* 归并 */ } /* 注意:排序后,p依然指向归并后区间的(新)起点 */ /* 列表的选择排序算法:对起始于位置p的n个元素排序 */ template <typename T> void List<T>::selectionSort(ListNodePosi(T) p, int n) { /* valid(p) && rank(p) + n <= size */ /*DSA*/printf("SelectionSort ...\n"); ListNodePosi(T) head = p->pred; ListNodePosi(T) tail = p; for (int i = 0; i < n; i++) /* 待排序区间为(head, tail) */ tail = tail->succ; while (1 < n) { /* 在至少还剩两个节点之前,在待排序区间内 */ ListNodePosi(T) max = selectMax(head->succ, n); /* 找出最大者(歧义时后者优先) */ insertB(tail, remove(max)); /* 将其移至无序区间末尾(作为有序区间新的首元素) */ /*DSA*/ /* swap(tail->pred->data, selectMax( head->succ, n )->data ); */ tail = tail->pred; n--; } } /* 列表的插入排序算法:对起始于位置p的n个元素排序 */ template <typename T> void List<T>::insertionSort(ListNodePosi(T) p, int n) { /* valid(p) && rank(p) + n <= size */ /*DSA*/printf("InsertionSort ...\n"); for (int r = 0; r < n; r++) { /* 逐一为各节点 */ insertA(search(p->data, r, p), p->data); /* 查找适当的位置并插入 */ p = p->succ; /* 转向下一节点 */ remove(p->pred); } } /* 复制列表中自位置p起的n项(assert: p为合法位置,且至少有n-1个后继节点) */ template <typename T> List<T>::List(ListNodePosi(T) p, int n) { copyNodes(p, n); } /* 整体复制列表L */ template <typename T> List<T>::List(List<T> const& L) { copyNodes(L.first(), L._size); } /* 复制L中自第r项起的n项(assert: r+n <= L._size) */ template <typename T> List<T>::List(List<T> const& L, Rank r, int n) { copyNodes(L[r], n); } /* 列表析构器 */ template <typename T> List<T>::~List() { clear(); /* 清空列表,释放头、尾哨兵节点 */ delete header; delete trailer; } /* 重载下标操作符,以通过秩直接访问列表节点(虽方便,效率低,需慎用) */ template <typename T> T& List<T>::operator[] (Rank r) const { /* assert: 0 <= r < size */ ListNodePosi(T) p = first(); /* 从首节点出发 */ while (0 < r--) p = p->succ; /* 顺数第r个节点即是 */ return p->data; /* 目标节点,返回其中所存元素 */ } /* 统计逆序相邻元素对的总数 */ template <typename T> int List<T>::disordered() const { int n = 0; ListNode<T>* p = first(); for (int i = 0; i < _size - 1; p = p->succ, i++) if (p->data > p->succ->data) n++; return n; } /* 在无序列表内节点p(可能是trailer)的n个(真)前驱中,找到等于e的最后者 */ template <typename T> ListNodePosi(T) List<T>::find(T const& e, int n, ListNodePosi(T) p) const { while (0 < n--) /* (0 <= n <= rank(p) < _size)对于p的最近的n个前驱,从右向左 */ if (e == (p = p->pred)->data) return p; /* 逐个比对,直至命中或范围越界 */ return NULL; /* p越出左边界意味着区间内不含e,查找失败 */ } /* 失败时,返回NULL */ /* 在有序列表内节点p(可能是trailer)的n个(真)前驱中,找到不大于e的最后者 */ template <typename T> ListNodePosi(T) List<T>::search(T const& e, int n, ListNodePosi(T) p) const { /* assert: 0 <= n <= rank(p) < _size */ /*DSA*/printf("searching for "); print(e); printf(" :\n"); while (0 <= n--) /* 对于p的最近的n个前驱,从右向左逐个比较 */ /*DSA*/ { printf(" <%4d>", p->pred->data); if (((p = p->pred)->data) <= e) break; /* 直至命中、数值越界或范围越界 */ /*DSA*/ } printf("\n"); /* assert: 至此位置p必符合输出语义约定——尽管此前最后一次关键码比较可能没有意义(等效于与-inf比较) */ return p; /* 返回查找终止的位置 */ } /* 失败时,返回区间左边界的前驱(可能是header)——调用者可通过valid()判断成功与否 */ /* 从起始于位置p的n个元素中选出最大者 */ template <typename T> ListNodePosi(T) List<T>::selectMax(ListNodePosi(T) p, int n) { ListNodePosi(T) max = p; /* 最大者暂定为首节点p */ for (ListNodePosi(T) cur = p; 1 < n; n--) /* 从首节点p出发,将后续节点逐一与max比较 */ if (!lt((cur = cur->succ)->data, max->data)) /* 若当前元素不小于max,则 */ max = cur; /* 更新最大元素位置记录 */ return max; /* 返回最大节点位置 */ } /* e当作首节点插入 */ template <typename T> ListNodePosi(T) List<T>::insertAsFirst(T const& e) { _size++; return header->insertAsSucc(e); } /* e当作末节点插入 */ template <typename T> ListNodePosi(T) List<T>::insertAsLast(T const& e) { _size++; return trailer->insertAsPred(e); } /* e当作p的后继插入(After) */ template <typename T> ListNodePosi(T) List<T>::insertA(ListNodePosi(T) p, T const& e) { _size++; return p->insertAsSucc(e); } /* e当作p的前驱插入(Before) */ template <typename T> ListNodePosi(T) List<T>::insertB(ListNodePosi(T) p, T const& e) { _size++; return p->insertAsPred(e); } /* 删除合法位置p处的节点,返回被删除节点 */ template <typename T> T List<T>::remove(ListNodePosi(T) p) { T e = p->data; /* 备份待删除节点的数值(假定T类型可直接赋值) */ p->succ->pred = p->pred; p->pred->succ = p->succ; delete p; _size--; return e; } /* 列表区间排序 */ template <typename T> void List<T>::sort(ListNodePosi(T) p, int n) { switch (rand() % 3) { /* 随机选取排序算法。可根据具体问题的特点灵活选取或扩充 */ case 1: insertionSort(p, n); break; /* 插入排序 */ case 2: selectionSort(p, n); break; /* 选择排序 */ default: mergeSort(p, n); break; /* 归并排序 */ } } /* 剔除无序列表中的重复节点 */ template <typename T> int List<T>::deduplicate() { if (_size < 2) return 0; /* 平凡列表自然无重复 */ int oldSize = _size; /* 记录原规模 */ ListNodePosi(T) p = header; Rank r = 0; /* p从首节点开始 */ while (trailer != (p = p->succ)) { /* 依次直到末节点 */ ListNodePosi(T) q = find(p->data, r, p); /* 在p的r个(真)前驱中查找雷同者 */ q ? remove(q) : r++; /* 若的确存在,则删除之;否则秩加一 */ } /* assert: 循环过程中的任意时刻,p的所有前驱互不相同 */ return oldSize - _size; /* 列表规模变化量,即被删除元素总数 */ } /* 成批剔除重复元素,效率更高 */ template <typename T> int List<T>::uniquify() { if (_size < 2) return 0; /* 平凡列表自然无重复 */ int oldSize = _size; /* 记录原规模 */ ListNodePosi(T) p = first(); ListNodePosi(T) q; /* p为各区段起点,q为其后继 */ while (trailer != (q = p->succ)) /* 反复考查紧邻的节点对(p, q) */ if (p->data != q->data) p = q; /* 若互异,则转向下一区段 */ else remove(q); /* 否则(雷同),删除后者 */ return oldSize - _size; /* 列表规模变化量,即被删除元素总数 */ } /* 前后倒置 */ template <typename T> void List<T>::reverse() { ListNodePosi(T) p = header; /* 头、尾节点 */ ListNodePosi(T) q = trailer; for (int i = 1; i < _size; i += 2) /* (从首、末节点开始)由外而内,捉对地 */ swap((p = p->succ)->data, (q = q->pred)->data); /* 交换对称节点的数据项 */ } /* 借助函数指针机制遍历 */ template <typename T> void List<T>::traverse( void( *visit ) ( T& )) { for ( ListNodePosi(T) p = header->succ; p != trailer; p = p->succ ) visit( p->data ); } /* 借助函数对象机制遍历 */ template <typename T> template <typename VST> /* 元素类型、操作器 */ void List<T>::traverse( VST& visit ) { for ( ListNodePosi(T) p = header->succ; p != trailer; p = p->succ ) visit( p->data ); } #endif<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef BTREE_H__ #define BTREE_H__ #include "BTNode.h" /* 引入B-树节点类 */ /* B-树模板类 */ template <typename T> class BTree { protected: int _size; /* 存放的关键码总数 */ int _order; /* B-树的阶次,至少为3——创建时指定,一般不能修改 */ BTNodePosi(T) _root; /* 根节点 */ BTNodePosi(T) _hot; /* BTree::search()最后访问的非空(除非树空)的节点位置 */ void solveOverflow ( BTNodePosi(T) ); /* 因插入而上溢之后的分裂处理 */ void solveUnderflow ( BTNodePosi(T) ); /* 因删除而下溢之后的合并处理 */ public: /* 构造函数:默认为最低的3阶 */ BTree ( int order = 3 ) : _order ( order ), _size ( 0 ) { _root = new BTNode<T>(); } /* 析构函数:释放所有节点 */ ~BTree() { if ( _root ) release ( _root ); } int const order() { return _order; } /* 阶次 */ int const size() { return _size; } /* 规模 */ BTNodePosi(T) & root() { return _root; } /* 树根 */ bool empty() const { return !_root; } /* 判空 */ BTNodePosi(T) search ( const T& e ); /* 查找 */ bool insert ( const T& e ); /* 插入 */ bool remove ( const T& e ); /* 删除 */ }; /* BTree /* 在B-树中查找关键码e */ template <typename T> BTNodePosi(T) BTree<T>::search(const T& e) { BTNodePosi(T) v = _root; _hot = NULL; /* 从根节点出发 */ while (v) { /* 逐层查找 */ Rank r = v->key.search(e); /* 在当前节点中,找到不大于e的最大关键码 */ if ((0 <= r) && (e == v->key[r])) return v; /* 成功:在当前节点中命中目标关键码 */ _hot = v; v = v->child[r + 1]; /* 否则,转入对应子树(_hot指向其父)——需做I/O,最费时间 */ } /* 这里在向量内是二分查找,但对通常的_order可直接顺序查找 */ return NULL; /* 失败:最终抵达外部节点 */ } /* 将关键码e插入B树中 */ template <typename T> bool BTree<T>::insert(const T& e) { BTNodePosi(T) v = search(e); /* 确认目标节点不存在 */ if (v) return false; Rank r = _hot->key.search(e); /* 在节点_hot的有序关键码向量中查找合适的插入位置 */ _hot->key.insert(r + 1, e); /* 将新关键码插至对应的位置 */ _hot->child.insert(r + 2, NULL); /* 创建一个空子树指针 */ _size++; /* 更新全树规模 */ solveOverflow(_hot); /* 如有必要,需做分裂 */ return true; /* 插入成功 */ } /* 从BTree树中删除关键码e */ template <typename T> bool BTree<T>::remove(const T& e) { BTNodePosi(T) v = search(e); if (!v) return false; /* 确认目标关键码存在 */ Rank r = v->key.search(e); /* 确定目标关键码在节点v中的秩(由上,肯定合法) */ if (v->child[0]) /* 若v非叶子,则e的后继必属于某叶节点 */ { BTNodePosi(T) u = v->child[r + 1]; /* 在右子树中一直向左,即可 */ while (u->child[0]) /* 找出e的后继 */ u = u->child[0]; v->key[r] = u->key[0]; /* 并与之交换位置 */ v = u; r = 0; } /* 至此,v必然位于最底层,且其中第r个关键码就是待删除者 */ v->key.remove(r); /* 删除e,以及其下两个外部节点之一 */ v->child.remove(r + 1); _size--; solveUnderflow(v); /* 如有必要,需做旋转或合并 */ return true; } /* 关键码插入后若节点上溢,则做节点分裂处理 */ template <typename T> void BTree<T>::solveOverflow(BTNodePosi(T) v) { if (_order >= v->child.size()) /* 递归基:当前节点并未上溢 */ return; Rank s = _order / 2; /* 轴点(此时应有_order = key.size() = child.size() - 1) */ BTNodePosi(T) u = new BTNode<T>(); /* 注意:新节点已有一个空孩子 */ for (Rank j = 0; j < _order - s - 1; j++) { /* v右侧_order-s-1个孩子及关键码分裂为右侧节点u */ u->child.insert(j, v->child.remove(s + 1)); /* 逐个移动效率低 */ u->key.insert(j, v->key.remove(s + 1)); /* 此策略可改进 */ } u->child[_order - s - 1] = v->child.remove(s + 1); /* 移动v最靠右的孩子 */ if (u->child[0]) /* 若u的孩子们非空,则 */ for (Rank j = 0; j < _order - s; j++) /* 令它们的父节点统一 */ u->child[j]->parent = u; /* 指向u */ BTNodePosi(T) p = v->parent; /* v当前的父节点p */ if (!p) /* 若p空则创建之 */ { _root = p = new BTNode<T>(); p->child[0] = v; v->parent = p; } Rank r = 1 + p->key.search(v->key[0]); /* p中指向u的指针的秩 */ p->key.insert(r, v->key.remove(s)); /* 轴点关键码上升 */ p->child.insert(r + 1, u); /* 新节点u与父节点p互联 */ u->parent = p; solveOverflow(p); /* 上升一层,如有必要则继续分裂——至多递归O(logn)层 */ } /* 关键码删除后若节点下溢,则做节点旋转或合并处理 */ template <typename T> void BTree<T>::solveUnderflow(BTNodePosi(T) v) { if ((_order + 1) / 2 <= v->child.size()) /* 递归基:当前节点并未下溢 */ return; BTNodePosi(T) p = v->parent; if (!p) { /* 递归基:已到根节点,没有孩子的下限 */ if (!v->key.size() && v->child[0]) { /* 但倘若作为树根的v已不含关键码,却有(唯一的)非空孩子,则 */ /*DSA*/printf("collapse\n"); _root = v->child[0]; /* 这个节点可被跳过 */ _root->parent = NULL; v->child[0] = NULL; release(v); /* 并因不再有用而被销毁 */ } /* 整树高度降低一层 */ return; } Rank r = 0; while (p->child[r] != v) r++; /* 确定v是p的第r个孩子——此时v可能不含关键码,故不能通过关键码查找 */ /* 另外,在实现了孩子指针的判等器之后,也可直接调用Vector::find()定位 */ /*DSA*/printf("\nrank = %d", r); /* 情况1:向左兄弟借关键码 */ if (0 < r) { /* 若v不是p的第一个孩子,则 */ BTNodePosi(T) ls = p->child[r - 1]; /* 左兄弟必存在 */ if ((_order + 1) / 2 < ls->child.size()) { /* 若该兄弟足够“胖”,则 */ /*DSA*/printf(" ... case 1\n"); v->key.insert(0, p->key[r - 1]); /* p借出一个关键码给v(作为最小关键码) */ p->key[r - 1] = ls->key.remove(ls->key.size() - 1); /* ls的最大关键码转入p */ v->child.insert(0, ls->child.remove(ls->child.size() - 1)); /* 同时ls的最右侧孩子过继给v */ if (v->child[0]) v->child[0]->parent = v; /* 作为v的最左侧孩子 */ return; /* 至此,通过右旋已完成当前层(以及所有层)的下溢处理 */ } } /* 至此,左兄弟要么为空,要么太“瘦” */ /* 情况2:向右兄弟借关键码 */ if (p->child.size() - 1 > r) { /* 若v不是p的最后一个孩子,则 */ BTNodePosi(T) rs = p->child[r + 1]; /* 右兄弟必存在 */ if ((_order + 1) / 2 < rs->child.size()) { /* 若该兄弟足够“胖”,则 */ /*DSA*/printf(" ... case 2\n"); v->key.insert(v->key.size(), p->key[r]); /* p借出一个关键码给v(作为最大关键码) */ p->key[r] = rs->key.remove(0); /* ls的最小关键码转入p */ v->child.insert(v->child.size(), rs->child.remove(0)); /* 同时rs的最左侧孩子过继给v */ if (v->child[v->child.size() - 1]) /* 作为v的最右侧孩子 */ v->child[v->child.size() - 1]->parent = v; return; /* 至此,通过左旋已完成当前层(以及所有层)的下溢处理 */ } } /* 至此,右兄弟要么为空,要么太“瘦” */ /* 情况3:左、右兄弟要么为空(但不可能同时),要么都太“瘦”——合并 */ if (0 < r) { /* 与左兄弟合并 */ /*DSA*/printf(" ... case 3L\n"); BTNodePosi(T) ls = p->child[r - 1]; /* 左兄弟必存在 */ ls->key.insert(ls->key.size(), p->key.remove(r - 1)); p->child.remove(r); /* p的第r - 1个关键码转入ls,v不再是p的第r个孩子 */ ls->child.insert(ls->child.size(), v->child.remove(0)); if (ls->child[ls->child.size() - 1]) /* v的最左侧孩子过继给ls做最右侧孩子 */ ls->child[ls->child.size() - 1]->parent = ls; while (!v->key.empty()) { /* v剩余的关键码和孩子,依次转入ls */ ls->key.insert(ls->key.size(), v->key.remove(0)); ls->child.insert(ls->child.size(), v->child.remove(0)); if (ls->child[ls->child.size() - 1]) ls->child[ls->child.size() - 1]->parent = ls; } release(v); /* 释放v */ } else { /* 与右兄弟合并 */ /*DSA*/printf(" ... case 3R\n"); BTNodePosi(T) rs = p->child[r + 1]; /* 右兄度必存在 */ rs->key.insert(0, p->key.remove(r)); p->child.remove(r); /* p的第r个关键码转入rs,v不再是p的第r个孩子 */ rs->child.insert(0, v->child.remove(v->child.size() - 1)); if (rs->child[0]) rs->child[0]->parent = rs; /* v的最左侧孩子过继给ls做最右侧孩子 */ while (!v->key.empty()) { /* v剩余的关键码和孩子,依次转入rs */ rs->key.insert(0, v->key.remove(v->key.size() - 1)); rs->child.insert(0, v->child.remove(v->child.size() - 1)); if (rs->child[0]) rs->child[0]->parent = rs; } release(v); /* 释放v */ } solveUnderflow(p); /* 上升一层,如有必要则继续分裂——至多递归O(logn)层 */ return; } #endif <file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef BITMAP_H__ #define BITMAP_H__ #pragma warning(disable : 4996 4800) #include <stdlib.h> #include <stdio.h> #include <memory.h> #include "../_share/release.h" /* 位图Bitmap类 */ class Bitmap { private: char* M; /* 比特图所存放的空间M[],容量为N*sizeof(char)*8比特 */ int N; protected: void init ( int n ) { M = new char[N = ( n + 7 ) / 8]; memset ( M, 0, N ); } public: Bitmap ( int n = 8 ) /* 按指定或默认规模创建比特图(为测试暂时选用较小的默认值) */ { init ( n ); } Bitmap ( char* file, int n = 8 ) /* 按指定或默认规模,从指定文件中读取比特图 */ { init ( n ); FILE* fp = fopen ( file, "r" ); fread ( M, sizeof ( char ), N, fp ); fclose ( fp ); } ~Bitmap() /* 析构时释放比特图空间 */ { delete [] M; M = NULL; } void set( int k ) { expand ( k ); M[k >> 3] |= ( 0x80 >> ( k & 0x07 ) ); } void clear ( int k ) { expand ( k ); M[k >> 3] &= ~ ( 0x80 >> ( k & 0x07 ) ); } bool test ( int k ) { expand ( k ); return M[k >> 3] & ( 0x80 >> ( k & 0x07 ) ); } void dump ( char* file ) /* 将位图整体导出至指定的文件,以便对此后的新位图批量初始化 */ { FILE* fp = fopen ( file, "w" ); fwrite ( M, sizeof ( char ), N, fp ); fclose ( fp ); } char* bits2string ( int n ) /* 将前n位转换为字符串 */ { expand ( n - 1 ); /* 此时可能被访问的最高位为bitmap[n - 1] */ char* s = new char[n + 1]; /* 字符串所占空间,由上层调用者负责释放 */ s[n] = '\0'; for (int i = 0; i < n; i++) s[i] = test(i) ? '1' : '0'; return s; //返回字符串位置 } void expand ( int k ) { /* 若被访问的Bitmap[k]已出界,则需扩容 */ if ( k < 8 * N ) /* 仍在界内,无需扩容 */ return; int oldN = N; char* oldM = M; init ( 2 * k ); /* 与向量类似,加倍策略 */ memcpy_s ( M, N, oldM, oldN );/* 原数据转移至新空间 --此处的原数据是否已被memset为0??? */ delete [] oldM; } /*DSA*/ void print ( int n ) /* 逐位打印以检验位图内容,非必需接口 */ { expand ( n ); for ( int i = 0; i < n; i++ ) printf ( test ( i ) ? "1" : "0" ); } }; #endif // !BITMAP_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef ENTRY_H__ #define ENTRY_H__ /* 词条模板类 */ template <typename K, typename V> struct Entry { K key; /* 关键码、数值 */ V value; Entry ( K k = K(), V v = V() ) : key ( k ), value ( v ) {}; /* 默认构造函数 */ Entry ( Entry<K, V> const& e ) : key ( e.key ), value ( e.value ) {}; /* 基于克隆的构造函数 */ bool operator< ( Entry<K, V> const& e ) { return key < e.key; } /* 比较器:小于 */ bool operator> ( Entry<K, V> const& e ) { return key > e.key; } /* 比较器:大于 */ bool operator== ( Entry<K, V> const& e ) { return key == e.key; } /* 判等器:等于 */ bool operator!= ( Entry<K, V> const& e ) { return key != e.key; } /* 判等器:不等于 */ }; /* 得益于比较器和判等器,从此往后,不必严格区分词条及其对应的关键码 */ #endif<file_sep>## DataStructAndAlgorithm this repository is to recording my study of data struct and algorithm. data struct contains: vector、list、tree(binary tree、bst、bbst)、graph、hashtable... algorithm contains: searching and sorting 、Backtracking、Dynamic planning、N-queen... ## Prerequisites Microsoft Vistual Studio (2017 above) ## Getting Started 1、download the repository; 2、compile on Vistual Studio (this repository was create、edit、compile、run at Microsoft Vistual Studio 2017) ## Special Thanks About this repository, most of content was learn from professor Junhui DENG, specially thanks for his data structure book、video course and Open source code. Online resources have also inspired me a lot, thanks for all free sharing seniors, whose thought、blog、answers、source code、book ... also help me a lot. If there is any infringement in my repository, please contact me for processing, it's real unintentional. ## License DataStructAndAlgorithm is under MIT License. ## Author <NAME>, <EMAIL> / <EMAIL><file_sep>/****************************************************************************************** * Data Structures in C++ * ISBN: 7-302-33064-6 & 7-302-33065-3 & 7-302-29652-2 & 7-302-26883-3 * <NAME>, <EMAIL> * Computer Science & Technology, Tsinghua University * Copyright (c) 2006-2013. All rights reserved. ******************************************************************************************/ #ifndef HASHTABLE_HASHCODE_H__ #define HASHTABLE_HASHCODE_H__ /* 字符 */ static size_t hashCode ( char c ) { return ( size_t ) c; } /* 整数以及长长整数 */ static size_t hashCode ( int k ) { return ( size_t ) k; } static size_t hashCode ( long long i ) { return ( size_t ) ( ( i >> 32 ) + ( int ) i ); } /* 生成字符串的循环移位散列码(cyclic shift hash code) */ static size_t hashCode ( char s[] ) { int h = 0; /* 散列码 */ for ( size_t n = strlen ( s ), i = 0; i < n; i++ ) /* 自左向右,逐个处理每一字符 */ { /* 散列码循环左移5位,再累加当前字符 */ h = ( h << 5 ) | ( h >> 27 ); h += ( int ) s[i]; } return ( size_t ) h; /* 如此所得的散列码,实际上可理解为近似的“多项式散列码” */ } /* 对于英语单词,"循环左移5位"是实验统计得出的最佳值 */ #endif // !HASHTABLE_HASHCODE_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef HAILSTONE_ELEM_H__ #define HAILSTONE_ELEM_H__ /* 3n+1问题(Hailstone问题)*/ template <typename T> struct Hailstone { //函数对象:按照Hailstone规则转化一个T类对象 virtual void operator() ( T& e ) { //假设T可直接做算术运算 int step = 0; //转换所需步数 while ( 1 != e ) { //按奇、偶逐步转换,直至为1 ( e % 2 ) ? e = 3 * e + 1 : e /= 2; step++; } e = step; //返回转换所经步数 } }; #endif<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef PRINT_TRAVERSABLE_H__ #define PRINT_TRAVERSABLE_H__ /****************************************************************************************** * 向量、列表等支持traverse()遍历操作的线性结构 ******************************************************************************************/ template <typename T> //元素类型 void UniPrint::p ( T& s ) { //引用 printf ( "%s[%d]*%d:\n", typeid ( s ).name(), &s, s.size() ); //基本信息 s.traverse ( print ); //通过print()遍历输出所有元素 printf ( "\n" ); } #endif // !PRINT_TRAVERSABLE_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ /****************************************************************************************** * Test of list ******************************************************************************************/ #include<iostream> using namespace std; #include "list.h" #include "../UniPrint/print.h" int testID = 0; //测试编号 /****************************************************************************************** * 随机生成长度为n的列表(其中可能包含重复节点) ******************************************************************************************/ template <typename T> //元素类型 void randomList(List<T> & list, int n) { //在[0, 2n)中选择n个偶数,随机插入列表 ListNodePosi(T) p = (rand() % 2) ? list.insertAsLast(rand() % (T)n * 2) : list.insertAsFirst(rand() % (T)n * 2); for (int i = 1; i < n; i++) p = rand() % 2 ? list.insertB(p, rand() % (T)n * 2) : list.insertA(p, rand() % (T)n * 2); } /****************************************************************************************** * 测试列表 ******************************************************************************************/ #define PRINT(x) { print(x); crc(x); checkOrder(x); } template <typename T> //元素类型 void testList(int testSize) { printf("\n ==== Test %2d. Generate two lists each of size %d by random insertions\n", testID++, testSize); List<T> La; randomList(La, testSize); PRINT(La); List<T> Lb; randomList(Lb, testSize); PRINT(Lb); printf("\n ==== Test %2d. Call list members by rank (with high complexity)\n", testID++); for (int i = 0; i < La.size(); i++) print(La[i]); printf("\n"); for (int i = 0; i < Lb.size(); i++) print(Lb[i]); printf("\n"); printf("\n ==== Test %2d. Concatenation\n", testID++); PRINT(La); PRINT(Lb); while (0 < Lb.size()) La.insertAsLast(Lb.remove(Lb.first())); PRINT(La); PRINT(Lb); printf("\n ==== Test %2d. Increase\n", testID++); PRINT(La); increase(La); PRINT(La); printf("\n ==== Test %2d. Lowpass (with high complexity) on\n", testID++); PRINT(La); int i = La.size(); while (0 < --i) { La[i - 1] += La[i]; La[i - 1] >>= 1; } PRINT(La); printf("\n ==== Test %2d. reverse\n", testID++, testSize); La.reverse(); PRINT(La); printf("\n ==== Test %2d. Copy\n", testID++); PRINT(La); List<T> Ld(La); PRINT(Ld); printf("\n ==== Test %2d. Trim by random deletions\n", testID++); PRINT(Ld); while (testSize / 4 < Ld.size()) { int N = rand() % Ld.size(); printf("removing L[%d]=", N); ListNodePosi(T) p = Ld.first(); while (0 < N--) p = p->succ; print(p->data); printf(" ...\n"); Ld.remove(p); PRINT(Ld); } printf("\n ==== Test %2d. Copy\n", testID++); PRINT(La); List<T> Le(La); PRINT(Le); printf("\n ==== Test %2d. FIND in\n", testID++); PRINT(Le); for (int i = 0; i <= testSize * 2; i++) { //逐一测试[0, 2n]中的所有可能 ListNodePosi(T) p = Le.find((T)i); printf("Looking for "); print((T)i); printf(": "); if (p) { printf(" found with"); print(p->data); } else printf(" not found"); printf("\n"); } //正确的结构应该是大致(n+1次)失败、(n次)成功相间 printf("\n ==== Test %2d. Sort\n", testID++); PRINT(La); La.sort(); PRINT(La); printf("\n ==== Test %2d. SEARCH in\n", testID++); PRINT(La); for (int i = 0; i <= testSize * 2; i++) { //逐一测试[0, 2n]中的所有可能 ListNodePosi(T) p = La.search((T)i); printf("Looking for "); print((T)i); printf(": "); printf(" stopped at"); print(p->data); if ((T)i == p->data) printf(" and found"); printf("\n"); } //正确的结构应该是大致(n+1次)失败、(n次)成功相间 printf("\n ==== Test %2d. Remove redundancy in\n", testID++); PRINT(La); printf("%d node(s) removed\n", La.uniquify()); PRINT(La); La.reverse(); PRINT(La); printf("\n ==== Test %2d. Remove redundancy in\n", testID++); PRINT(Le); printf("%d node(s) removed\n", Le.deduplicate()); PRINT(Le); printf("\n ==== Test %2d. Sort\n", testID++); PRINT(Le); Le.sort(); PRINT(Le); return; } /****************************************************************************************** * 测试列表 ******************************************************************************************/ int main() { string strNumber = "5"; srand((unsigned int)time(NULL)); testList<int>(atoi(strNumber.c_str())); //元素类型可以在这里任意选择 return 0; }<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * Author: <NAME> * All rights reserved. ******************************************************************************************/ #ifndef LRUCACHE_H__ #define LRUCACHE_H__ #include <iostream> #include <map> #include <unordered_map> using namespace std; #define LRUCacheNodePosi(T_KEY, T_VALUE) LRUCacheNode<T_KEY, T_VALUE>* #define DEFAULT_CACHE_SIZE (8) //默认Cache缓存个数 /* 节点模板类 */ template <typename T_KEY, typename T_VALUE> class LRUCacheNode { public: T_KEY _key; T_VALUE _value; LRUCacheNodePosi(T_KEY, T_VALUE) _pred; /* 前驱 */ LRUCacheNodePosi(T_KEY, T_VALUE) _succ; /* 后继 */ LRUCacheNode() {} /* 默认构造函数 */ /* 构造函数 */ LRUCacheNode(T_KEY key, T_VALUE value, LRUCacheNodePosi(T_KEY, T_VALUE) pred = NULL, LRUCacheNodePosi(T_KEY, T_VALUE) succ = NULL) : \ _key(key), _value(value), _pred(pred), _succ(succ) {} LRUCacheNodePosi(T_KEY, T_VALUE) insertAsPred(T_KEY const& key, T_VALUE const& value) /* 紧靠当前节点之前插入新节点 */ { LRUCacheNodePosi(T_KEY, T_VALUE) ptrInsertingNode = new LRUCacheNode(key, value, _pred, this); _pred->_succ = ptrInsertingNode; _pred = ptrInsertingNode; return ptrInsertingNode; } LRUCacheNodePosi(T_KEY, T_VALUE) insertAsSucc(T_KEY const& key, T_VALUE const& value) /* 紧随当前节点之后插入新节点 */ { LRUCacheNodePosi(T_KEY, T_VALUE) ptrInsertingNode = new LRUCacheNode(key, value, this, _succ); _succ->_pred = ptrInsertingNode; _succ = ptrInsertingNode; return ptrInsertingNode; } }; /* LRUCache模板类-使用map实现 */ template <typename T_KEY, typename T_VALUE> class LRUCacheUseMap{ private: int _count; //当前节点总数 int _size; //规模 LRUCacheNodePosi(T_KEY, T_VALUE) _cachelist; //列表 LRUCacheNodePosi(T_KEY, T_VALUE) _head; //列表头 LRUCacheNodePosi(T_KEY, T_VALUE) _tail; //列表尾 map<T_KEY, LRUCacheNodePosi(T_KEY, T_VALUE)> _mp; //记录key的map void init(); //LRUCacheUseMap初始化 void moveNodeFirst(LRUCacheNodePosi(T_KEY, T_VALUE) &ptrNode); //将节点移动到链表头 LRUCacheNodePosi(T_KEY, T_VALUE) insertAsFirst(T_KEY const& key, T_VALUE const& value); //插入新节点到链表头 void removeLastNode(T_KEY &keyName); //删除尾节点 public: LRUCacheUseMap() { init(); } //构造函数 void setCacheSize(unsigned int size); //设置缓存空间大小 /****************************************************************** * 获取缓存值 * 时间复杂度 :O(logn) = O(logn)的map查找复杂度 + O(1)的list移动命中节点到列表头的复杂度 ******************************************************************/ T_VALUE get(T_KEY key); /****************************************************************** * 设置缓存值 * 时间复杂度: * 已存在该key :O(logn) = O(logn)的map查找复杂度 + O(1)的list移动命中节点到列表头的复杂度 * 不存在该key且不需要淘汰 :O(logn) = O(logn)的map查找复杂度 + O(logn)的map插入复杂度 + O(1)的list插入复杂度(插入到列表头) * 没命中且需要淘汰 :O(logn) = O(logn)的map查找、插入复杂度 + O(logn)的map删除复杂度 + O(1)的list删除、插入复杂度(插入到列表头) ******************************************************************/ void set(T_KEY key, T_VALUE value); void printLRUCacheList(); //打印输出函数 }; /* 将节点移动到链表头 */ template <typename T_KEY, typename T_VALUE> void LRUCacheUseMap<T_KEY, T_VALUE>::moveNodeFirst(LRUCacheNodePosi(T_KEY, T_VALUE) &ptrNode) { if (ptrNode == _head->_succ) return; //已经在链表头,则不需要移动 ptrNode->_pred->_succ = ptrNode->_succ; //从链表中摘除节点 ptrNode->_succ->_pred = ptrNode->_pred; _head->_succ = ptrNode; //插入到链表中的头结点 ptrNode->_pred = _head; ptrNode->_succ = _cachelist; _cachelist->_pred = ptrNode; _cachelist = ptrNode; } /* 新建节点并插入到链表头 */ template<typename T_KEY, typename T_VALUE> LRUCacheNodePosi(T_KEY, T_VALUE) LRUCacheUseMap<T_KEY, T_VALUE>::insertAsFirst(T_KEY const & key, T_VALUE const & value) { _count++; return _head->insertAsSucc(key, value); } template <typename T_KEY, typename T_VALUE> void LRUCacheUseMap<T_KEY, T_VALUE>::init() { _size = DEFAULT_CACHE_SIZE; _count = 0; _cachelist = NULL; _head = new LRUCacheNode< T_KEY, T_VALUE>; _tail = new LRUCacheNode< T_KEY, T_VALUE>; _head->_pred = NULL; _tail->_succ = NULL; _head->_succ = _tail; _tail->_pred = _head; } /* 删除尾节点*/ template <typename T_KEY, typename T_VALUE> void LRUCacheUseMap<T_KEY, T_VALUE>::removeLastNode(T_KEY &keyName) { LRUCacheNodePosi(T_KEY, T_VALUE) ptrDelNode = _tail->_pred; LRUCacheNodePosi(T_KEY, T_VALUE) ptrDelNodePre = ptrDelNode->_pred; ptrDelNodePre->_succ = _tail; _tail->_pred = ptrDelNodePre; _count--; keyName = ptrDelNode->_key; delete ptrDelNode; return; } /* 设置缓存空间大小 */ template <typename T_KEY, typename T_VALUE> void LRUCacheUseMap<T_KEY, T_VALUE>::setCacheSize(unsigned int size) { _size = size; } /* 获取输入key的缓存值 */ template <typename T_KEY, typename T_VALUE> T_VALUE LRUCacheUseMap<T_KEY, T_VALUE>::get(T_KEY key) { if (NULL == _cachelist) /* 本地队列为空,则返回 */ { return -1; } typedef map<T_KEY, LRUCacheNodePosi(T_KEY, T_VALUE)>::iterator it_map; it_map it = _mp.find(key); if (_mp.end() == it) /* 如果在Cache中不存在该key,则返回-1 */ { return -1; } else { LRUCacheNodePosi(T_KEY, T_VALUE) ptrHit = it->second; moveNodeFirst(ptrHit); /* 将节点置于链表头部 */ return _cachelist->_value; /* 返回命中值 */ } } /* 设置输入key的缓存值 */ template <typename T_KEY, typename T_VALUE> void LRUCacheUseMap<T_KEY, T_VALUE>::set(T_KEY key, T_VALUE value) { if (NULL == _cachelist) { insertAsFirst(key, value); _cachelist = _head->_succ; _mp[key] = _cachelist; } else { typedef map<T_KEY, LRUCacheNodePosi(T_KEY, T_VALUE)>::iterator it_map; it_map it = _mp.find(key); if (_mp.end() == it) /* 没有命中 */ { if (_count == _size) /* Cache已满 */ { T_KEY keyName; removeLastNode(keyName); /* 删除尾节点 */ _mp.erase(keyName); /* 在map中删除 */ } /* 新建节点并插入链表头 */ insertAsFirst(key, value); _cachelist = _head->_succ; _mp[key] = _cachelist; } else /* 命中 */ { LRUCacheNodePosi(T_KEY, T_VALUE) ptrHit = it->second; ptrHit->_value = value; moveNodeFirst(ptrHit); /* 将节点置于链表头部 */ } } } /* 打印输出函数 */ template <typename T_KEY, typename T_VALUE> void LRUCacheUseMap<T_KEY, T_VALUE>::printLRUCacheList() { LRUCacheNodePosi(T_KEY, T_VALUE) p = _cachelist; int index = 0; while ((p != _tail) && (index < _count)) { cout << "index = "<< index << ", key = " << p->_key << ", value = " << p->_value << endl; p = p->_succ; index++; } } /* LRUCache模板类-使用哈希表实现(std::unordered_map底层采用hashtable实现) */ template <typename T_KEY, typename T_VALUE> class LRUCacheUseHashtable { private: int _count; //当前节点总数 int _size; //规模 LRUCacheNodePosi(T_KEY, T_VALUE) _cachelist; //列表 LRUCacheNodePosi(T_KEY, T_VALUE) _head; //列表头 LRUCacheNodePosi(T_KEY, T_VALUE) _tail; //列表尾 unordered_map<T_KEY, LRUCacheNodePosi(T_KEY, T_VALUE)> _mp; //记录key的map void init(); //LRUCacheUseHashtable初始化 void moveNodeFirst(LRUCacheNodePosi(T_KEY, T_VALUE) &ptrNode); //将节点移动到链表头 LRUCacheNodePosi(T_KEY, T_VALUE) insertAsFirst(T_KEY const& key, T_VALUE const& value); //插入新节点到链表头 void removeLastNode(T_KEY &keyName); //删除尾节点 public: LRUCacheUseHashtable() { init(); } //构造函数 void setCacheSize(unsigned int size); //设置缓存空间大小 /****************************************************************** * 获取缓存值 * 时间复杂度 :O(1) = O(1)的hashtable查找复杂度 + O(1)的list移动命中节点到列表头的复杂度 ******************************************************************/ T_VALUE get(T_KEY key); /****************************************************************** * 设置缓存值 * 时间复杂度: * 已存在该key :O(1) = O(1)的hashtable查找复杂度 + O(1)的list移动命中节点到列表头的复杂度 * 不存在该key且不需要淘汰 :O(1) = O(1)的hashtable查找复杂度 + O(1)的hashtable插入复杂度 + O(1)的list插入复杂度(插入到列表头) * 没命中且需要淘汰 :O(1) = O(1)的hashtable查找、插入复杂度 + O(1)的hashtable删除复杂度 + O(1)的list删除、插入复杂度(插入到列表头) ******************************************************************/ void set(T_KEY key, T_VALUE value); void printLRUCacheList(); //打印输出函数 }; /* 将节点移动到链表头 */ template <typename T_KEY, typename T_VALUE> void LRUCacheUseHashtable<T_KEY, T_VALUE>::moveNodeFirst(LRUCacheNodePosi(T_KEY, T_VALUE) &ptrNode) { if (ptrNode == _head->_succ) return; //已经在链表头,则不需要移动 ptrNode->_pred->_succ = ptrNode->_succ; //从链表中摘除节点 ptrNode->_succ->_pred = ptrNode->_pred; _head->_succ = ptrNode; //插入到链表中的头结点 ptrNode->_pred = _head; ptrNode->_succ = _cachelist; _cachelist->_pred = ptrNode; _cachelist = ptrNode; } template<typename T_KEY, typename T_VALUE> LRUCacheNodePosi(T_KEY, T_VALUE) LRUCacheUseHashtable<T_KEY, T_VALUE>::insertAsFirst(T_KEY const & key, T_VALUE const & value) { _count++; return _head->insertAsSucc(key, value); } template <typename T_KEY, typename T_VALUE> void LRUCacheUseHashtable<T_KEY, T_VALUE>::init() { _size = DEFAULT_CACHE_SIZE; _count = 0; _cachelist = NULL; _head = new LRUCacheNode< T_KEY, T_VALUE>; _tail = new LRUCacheNode< T_KEY, T_VALUE>; _head->_pred = NULL; _tail->_succ = NULL; _head->_succ = _tail; _tail->_pred = _head; } /* 删除尾节点*/ template <typename T_KEY, typename T_VALUE> void LRUCacheUseHashtable<T_KEY, T_VALUE>::removeLastNode(T_KEY &keyName) { LRUCacheNodePosi(T_KEY, T_VALUE) ptrDelNode = _tail->_pred; LRUCacheNodePosi(T_KEY, T_VALUE) ptrDelNodePre = ptrDelNode->_pred; ptrDelNodePre->_succ = _tail; _tail->_pred = ptrDelNodePre; _count--; keyName = ptrDelNode->_key; delete ptrDelNode; return; } /* 设置缓存空间大小 */ template <typename T_KEY, typename T_VALUE> void LRUCacheUseHashtable<T_KEY, T_VALUE>::setCacheSize(unsigned int size) { _size = size; } /* 获取输入key的缓存值 */ template <typename T_KEY, typename T_VALUE> T_VALUE LRUCacheUseHashtable<T_KEY, T_VALUE>::get(T_KEY key) { if (NULL == _cachelist) /* 本地队列为空,则返回 */ { return -1; } typedef unordered_map<T_KEY, LRUCacheNodePosi(T_KEY, T_VALUE)>::iterator it_unorderedmap; it_unorderedmap it = _mp.find(key); if (_mp.end() == it) /* 如果在Cache中不存在该key,则返回-1 */ { return -1; } else { LRUCacheNodePosi(T_KEY, T_VALUE) ptrHit = it->second; moveNodeFirst(ptrHit); /* 将节点置于链表头部 */ return _cachelist->_value; /* 返回命中值 */ } } /* 设置输入key的缓存值 */ template <typename T_KEY, typename T_VALUE> void LRUCacheUseHashtable<T_KEY, T_VALUE>::set(T_KEY key, T_VALUE value) { if (NULL == _cachelist) { insertAsFirst(key, value); _cachelist = _head->_succ; _mp[key] = _cachelist; } else { typedef unordered_map<T_KEY, LRUCacheNodePosi(T_KEY, T_VALUE)>::iterator it_unorderedmap; it_unorderedmap it = _mp.find(key); if (_mp.end() == it) /* 没有命中 */ { if (_count == _size) /* Cache已满 */ { T_KEY keyName; removeLastNode(keyName); /* 删除尾节点 */ _mp.erase(keyName); /* 在map中删除 */ } /* 新建节点并插入链表头 */ insertAsFirst(key, value); _cachelist = _head->_succ; _mp[key] = _cachelist; } else /* 命中 */ { LRUCacheNodePosi(T_KEY, T_VALUE) ptrHit = it->second; ptrHit->_value = value; moveNodeFirst(ptrHit); /* 将节点置于链表头部 */ } } } /* 打印输出函数 */ template <typename T_KEY, typename T_VALUE> void LRUCacheUseHashtable<T_KEY, T_VALUE>::printLRUCacheList() { LRUCacheNodePosi(T_KEY, T_VALUE) p = _cachelist; int index = 0; while ((p != _tail) && (index < _count)) { cout << "index = " << index << ", key = " << p->_key << ", value = " << p->_value << endl; p = p->_succ; index++; } } #endif <file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ /****************************************************************************************** * Test of RedBlack Tree ******************************************************************************************/ #include<iostream> using namespace std; #include "../_share/util.h" #include "../UniPrint/print.h" #include "RedBlack.h" /****************************************************************************************** * Test a RedBlack ******************************************************************************************/ template <typename T> //元素类型 void testRedBlack ( int n ) { RedBlack<T> rb; //for ( int i = 0; i < 256; i++ ) { // int e = i % 2 ? i : -i; // printf("Inserting "); print(e); printf(" ...\n"); // BinNodePosi(T) p = rb.insert(e); // if ( p->data != e) { print(p->data); printf(" <> "); print(e); printf("\n"); } // printf("Done with"), print(p), printf("\n"), print(rb); //} //return; while ( rb.size() < n ) { T e = dice ( ( T ) n * 3 ); //[0, 3n)范围内的e switch ( dice ( 6 ) ) { case 0: { //查找(概率 = 1/6) printf ( "Searching for " ); print ( e ); printf ( " ...\n" ); BinNodePosi(T) p = rb.search ( e ); p ? printf ( "Found with" ), print ( p ), printf ( "\n" ) : printf ( "Not found\n" ); break; } case 1: case 2: { //删除(概率 = 2/6) printf ( "Removing " ); print ( e ); printf ( " ...\n" ); rb.remove ( e ) ? printf ( "Done\n" ), print ( rb ) : (void)printf ( "Not exists\n" ); break; } default: { //插入(概率 = 3/6) printf ( "Inserting " ); print ( e ); printf ( " ...\n" ); BinNodePosi(T) p = rb.insert ( e ); if ( p->data != e ) { print ( p->data ); printf ( " <> " ); print ( e ); printf ( "\n" ); } printf ( "Done with" ), print ( p ), printf ( "\n" ), print ( rb ); break; } } } while ( rb.size() > 0 ) { T e = dice ( ( T ) n * 3 ); //[0, 3n)范围内的e printf ( "Removing " ); print ( e ); printf ( " ...\n" ); rb.remove ( e ) ? printf ( "Done\n" ), print ( rb ) : (void)printf ( "Not exists\n" ); } } /****************************************************************************************** * 测试主入口 ******************************************************************************************/ int main ( int argc, char* argv[] ) { int size = 10; srand ( ( unsigned int ) time ( NULL ) ); testRedBlack<int> (size); //元素类型可以在这里任意选择 return 0; }<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef GRAPHMATRIX_H__ #define GRAPHMATRIX_H__ #include "../vector/vector.h" /* 引入向量 */ #include "../GraphMatrix/graph.h" /* 引入图ADT */ template <typename Tv> struct Vertex { /* 顶点对象(为简化起见,并未严格封装) */ Tv data; /* 数据、出入度数、状态 */ int inDegree, outDegree; VStatus status; int dTime, fTime; /* 时间标签 */ int parent; /* 在遍历树中的父节点、优先级数 */ int priority; Vertex ( Tv const& d = ( Tv ) 0 ) : /* 构造新顶点 */ data ( d ), inDegree ( 0 ), outDegree ( 0 ), status ( UNDISCOVERED ), dTime ( -1 ), fTime ( -1 ), parent ( -1 ), priority ( INT_MAX ) {}/* 暂不考虑权重溢出 */ }; template <typename Te> struct Edge { /* 边对象(为简化起见,并未严格封装) */ Te data; /* 数据、权重、类型 */ int weight; EType type; Edge ( Te const& d, int w ) : data ( d ), weight ( w ), type ( UNDETERMINED ) {} /* 构造 */ }; /* 基于向量,以邻接矩阵形式实现的图 */ template <typename Tv, typename Te> /* 顶点类型、边类型 */ class GraphMatrix : public Graph<Tv, Te> { private: Vector< Vertex< Tv > > V; /* 顶点集(向量) */ Vector< Vector< Edge< Te > * > > E; /* 边集(邻接矩阵) */ public: GraphMatrix() { this->n = this->e = 0; } /* 构造 */ ~GraphMatrix() { /* 析构 */ for ( int j = 0; j < this->n; j++ ) /* 所有动态创建的 */ for ( int k = 0; k < this->n; k++ ) /* 边记录 */ delete E[j][k]; /* 逐条清除 */ } /* 顶点的基本操作:查询第i个顶点(0 <= i < n) */ virtual Tv& vertex ( int i ) { return V[i].data; } /* 数据 */ virtual int inDegree ( int i ) { return V[i].inDegree; } /* 入度 */ virtual int outDegree ( int i ) { return V[i].outDegree; } /* 出度 */ virtual int firstNbr ( int i ) { return nextNbr ( i, this->n ); } /* 首个邻接顶点 */ virtual int nextNbr ( int i, int j ) /* 相对于顶点j的下一邻接顶点(改用邻接表可提高效率) */ { while ( ( -1 < j ) && ( !exists ( i, --j ) ) ); /* 逆向线性试探 */ return j; } virtual VStatus& status ( int i ) { return V[i].status; } /* 状态 */ virtual int& dTime ( int i ) { return V[i].dTime; } /* 时间标签dTime */ virtual int& fTime ( int i ) { return V[i].fTime; } /* 时间标签fTime */ virtual int& parent ( int i ) { return V[i].parent; } /* 在遍历树中的父亲 */ virtual int& priority ( int i ) { return V[i].priority; } /* 在遍历树中的优先级数 */ /* 顶点的动态操作 */ virtual int insert ( Tv const& vertex ) { /* 插入顶点,返回编号 */ for ( int j = 0; j < this->n; j++ ) E[j].insert ( NULL ); /* 各顶点预留一条潜在的关联边 */ this->n++; E.insert ( Vector<Edge<Te>*> (this->n, this->n, ( Edge<Te>* ) NULL ) ); /* 创建新顶点对应的边向量 */ return V.insert ( Vertex<Tv> ( vertex ) ); /* 顶点向量增加一个顶点 */ } virtual Tv remove ( int i ) { /* 删除第i个顶点及其关联边(0 <= i < n) */ for ( int j = 0; j < this->n; j++ ) /* 所有出边 */ if ( exists ( i, j ) ) { /* 逐条删除 */ delete E[i][j]; V[j].inDegree--; } E.remove ( i ); /* 删除第i行 */ this->n--; Tv vBak = vertex ( i ); V.remove ( i ); /* 删除顶点i */ for ( int j = 0; j < this->n; j++ ) /* 所有入边 */ if ( Edge<Te> * e = E[j].remove ( i ) ) /* 逐条删除 */ { delete e; V[j].outDegree--; } return vBak; /* 返回被删除顶点的信息 */ } /* 边的确认操作 */ virtual bool exists ( int i, int j ) /* 边(i, j)是否存在 */ { return ( 0 <= i ) && ( i < this->n ) && ( 0 <= j ) && ( j < this->n ) && E[i][j] != NULL; } /* 边的基本操作:查询顶点i与j之间的联边(0 <= i, j < n且exists(i, j)) */ virtual EType & type ( int i, int j ) { return E[i][j]->type; } /* 边(i, j)的类型 */ virtual Te& edge ( int i, int j ) { return E[i][j]->data; } /* 边(i, j)的数据 */ virtual int& weight ( int i, int j ) { return E[i][j]->weight; } /* 边(i, j)的权重 */ /* 边的动态操作 */ virtual void insert ( Te const& edge, int w, int i, int j ) { /* 插入权重为w的边e = (i, j) */ if ( exists ( i, j ) ) return; /* 确保该边尚不存在 */ E[i][j] = new Edge<Te> ( edge, w ); /* 创建新边 */ this->e++; V[i].outDegree++; V[j].inDegree++; /* 更新边计数与关联顶点的度数 */ } virtual Te remove ( int i, int j ) { /* 删除顶点i和j之间的联边(exists(i, j)) */ Te eBak = edge ( i, j ); /* 备份后删除边记录 */ delete E[i][j]; E[i][j] = NULL; this->e--; /* 更新边计数与关联顶点的度数v */ V[i].outDegree--; V[i].outDegree--; V[j].inDegree--; return eBak; /* 返回被删除边的信息 */ } }; #endif<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ /****************************************************************************************** * Test of queue ******************************************************************************************/ #include<iostream> using namespace std; #include "queue.h" #include "../_share/util.h" #include "../UniPrint/print.h" int testID = 0; //测试编号 /****************************************************************************************** * 测试栈 ******************************************************************************************/ template <typename T> //元素类型 void testQueue(int n) { Queue<T> Q; printf("\n ==== Test %2d. Growing queue\n", testID++); while (Q.size() < n) { (Q.empty() || (30 < dice(100))) ? Q.enqueue(dice((T)2 * n)) : //70%入队 (void)Q.dequeue(); //30%出队 print(Q); } printf("\n ==== Test %2d. Shrinking queue\n", testID++); while (!Q.empty()) { (70 < dice(100)) ? Q.enqueue(dice((T)2 * n)) : //30%入队 (void)Q.dequeue(); //70%出队 print(Q); } } /****************************************************************************************** * 测试队列 ******************************************************************************************/ int main() { string strNumber = "5"; srand((unsigned int)time(NULL)); testQueue<int>(atoi(strNumber.c_str())); //元素类型可以在这里任意选择 return 0; }<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef INCREASE_VECTOR_H__ #define INCREASE_VECTOR_H__ /*DSA*/#include "..\vector\vector.h" template <typename T> void increase ( Vector<T> & V ) //统一递增向量中的各元素 { V.traverse ((Increase<T> &) Increase<T>() ); } //以Increase<T>()为基本操作进行遍历 #endif // !INCREASE_VECTOR_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef PRINT_BINTREE_H__ #define PRINT_BINTREE_H__ /****************************************************************************************** * 二叉树输出打印 ******************************************************************************************/ #include "../bitmap/bitmap.h" //使用位图记录分支转向 #define ROOT 0 #define L_CHILD 1 #define R_CHILD -1*L_CHILD /****************************************************************************************** * 基础BinTree ******************************************************************************************/ template <typename T> //元素类型 void UniPrint::p ( BinTree<T> & bt ) { //引用 printf ( "%s[%d]*%d:\n", typeid ( bt ).name(), &bt, bt.size() ); //基本信息 Bitmap* branchType = new Bitmap; //记录当前节点祖先的方向 printBinTree ( bt.root(), -1, ROOT, branchType ); //树状结构 release ( branchType ); printf ( "\n" ); } /****************************************************************************************** * 基于BinTree实现的BST ******************************************************************************************/ template <typename T> //元素类型 void UniPrint::p ( BST<T> & bt ) { //引用 printf ( "%s[%d]*%d:\n", typeid ( bt ).name(), &bt, bt.size() ); //基本信息 Bitmap* branchType = new Bitmap; //记录当前节点祖先的方向 printBinTree ( bt.root(), -1, ROOT, branchType ); //树状结构 release ( branchType ); printf ( "\n" ); } /****************************************************************************************** * 基于BST实现的AVL * 其中调用的BinNode的打印例程,可以显示BF状态 ******************************************************************************************/ template <typename T> //元素类型 void UniPrint::p(AVL<T> & avl) { //引用 printf("%s[%d]*%d:\n", typeid (avl).name(), &avl, avl.size()); //基本信息 Bitmap* branchType = new Bitmap; //记录当前节点祖先的方向 printBinTree(avl.root(), -1, ROOT, branchType); //树状结构 release(branchType); printf("\n"); } /****************************************************************************************** * 基于BST实现的RedBlack * 其中调用的BinNode的打印例程,可以显示BF状态 ******************************************************************************************/ template <typename T> //元素类型 void UniPrint::p(RedBlack<T> & rb) { //引用 printf("%s[%d]*%d:\n", typeid (rb).name(), &rb, rb.size()); //基本信息 Bitmap* branchType = new Bitmap; //记录当前节点祖先的方向 printBinTree(rb.root(), -1, ROOT, branchType); //树状结构 release(branchType); printf("\n"); } /****************************************************************************************** * 基于BST实现的Splay * 鉴于Splay不必设置bf之类的附加标识,其打印例程与BST完全一致 ******************************************************************************************/ template <typename T> //元素类型 void UniPrint::p(Splay<T> & bt) { //引用 printf("%s[%d]*%d:\n", typeid (bt).name(), &bt, bt.size()); //基本信息 Bitmap* branchType = new Bitmap; //记录当前节点祖先的方向 printBinTree(bt.root(), -1, ROOT, branchType); //树状结构 release(branchType); printf("\n"); } /****************************************************************************************** * 二叉树各种派生类的统一打印 ******************************************************************************************/ template <typename T> //元素类型 static void printBinTree ( BinNodePosi(T) bt, int depth, int type, Bitmap* bType ) { if ( !bt ) return; if ( -1 < depth ) //设置当前层的拐向标志 R_CHILD == type ? bType->set ( depth ) : bType->clear ( depth ); printBinTree ( bt->rc, depth + 1, R_CHILD, bType ); //右子树(在上) print ( bt ); printf ( " *" ); for ( int i = -1; i < depth; i++ ) //根据相邻各层 if ( ( 0 > i ) || bType->test ( i ) == bType->test ( i + 1 ) ) //的拐向是否一致,即可确定 printf ( " " ); //是否应该 else printf ( "│ " ); //打印横线 switch ( type ) { case R_CHILD : printf ( "┌─" ); break; case L_CHILD : printf ( "└─" ); break; default : printf ( "──" ); break; //root } print ( bt ); #if defined(DSA_HUFFMAN) if ( IsLeaf ( *bt ) ) bType->print ( depth + 1 ); //输出Huffman编码 #endif printf ( "\n" ); printBinTree ( bt->lc, depth + 1, L_CHILD, bType ); //左子树(在下) } #endif // !PRINT_BINTREE_H__<file_sep>/****************************************************************************************** * Data Structures in C++ * ISBN: 7-302-33064-6 & 7-302-33065-3 & 7-302-29652-2 & 7-302-26883-3 * <NAME>, <EMAIL> * Computer Science & Technology, Tsinghua University * Copyright (c) 2006-2013. All rights reserved. ******************************************************************************************/ /****************************************************************************************** * Test of Binary Search Tree ******************************************************************************************/ #include<iostream> using namespace std; #include "../_share/util.h" #include "../UniPrint/print.h" #include "BST.h" /****************************************************************************************** * Test a BST ******************************************************************************************/ template <typename T> void testBST ( int n ) { BST<T> bst; while ( bst.size() < n ) bst.insert ( dice ( ( T ) n * 3 ) ); print ( bst ); //随机创建 bst.stretchToLPath(); print ( bst ); //伸直成撇 while ( !bst.empty() ) bst.remove ( bst.root()->data ); //清空 while ( bst.size() < n ) bst.insert ( dice ( ( T ) n * 3 ) ); print ( bst ); //随机创建 bst.stretchToRPath(); print ( bst ); //伸直成捺 while ( !bst.empty() ) bst.remove ( bst.root()->data ); //清空 while ( bst.size() < n ) { //随机插入、查询、删除 T e = dice ( ( T ) n * 3 ); //[0, 3n)范围内的e switch ( dice ( 3 ) ) { case 0: { //查找,成功率 <= 33.3% printf ( "Searching for " ); print ( e ); printf ( " ... " ); BinNodePosi(T) & p = bst.search ( e ); p ? (void)printf ( "Found with" ), print ( p->data ), (void)printf ( "\n" ) : (void)printf ( "not found\n" ); break; } case 1: { //删除,成功率 <= 33.3% printf ( "Removing " ); print ( e ); printf ( " ... " ); bst.remove ( e ) ? (void)printf ( "Done\n" ), print ( bst ) :(void)printf ( "not exists\n" ); break; } default: {//插入,成功率 == 100% printf ( "Inserting " ); print ( e ); printf ( " ... " ); printf ( "Done with" ), print ( bst.insert ( e )->data ), printf ( "\n" ), print ( bst ); break; } } } while ( bst.size() > 0 ) { //清空 T e = dice ( ( T ) n * 3 ); //[0, 3n)范围内的e printf ( "Removing " ); print ( e ); printf ( " ... " ); bst.remove ( e ) ? printf ( "Done\n" ), print ( bst ) : (void)printf ( "not exists\n" ); } } /****************************************************************************************** * 测试主入口 ******************************************************************************************/ int main ( int argc, char* argv[] ) { int size = 10; srand ( ( unsigned int ) time ( NULL ) ); testBST<int> (size); //元素类型可以在这里任意选择 return 0; }<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef BINNODE_H__ #define BINNODE_H__ #include "../stack/[email protected]" /* 引入栈模板类 */ #include "../queue/queue.h" /* 引入队列模板类 */ #define BinNodePosi(T) BinNode<T>* /* 节点位置 */ #define stature(p) ((p) ? (p)->height : -1) /* 节点高度(与“空树高度为-1”的约定相统一) */ typedef enum { RB_RED, RB_BLACK } RBColor; /* 节点颜色 */ /****************************************************************************************** * BinNode状态与性质的判断 ******************************************************************************************/ #define IsRoot(x) ( ! ( (x).parent ) ) #define IsLChild(x) ( ! IsRoot(x) && ( & (x) == (x).parent->lc ) ) #define IsRChild(x) ( ! IsRoot(x) && ( & (x) == (x).parent->rc ) ) #define HasParent(x) ( ! IsRoot(x) ) #define HasLChild(x) ( (x).lc ) #define HasRChild(x) ( (x).rc ) #define HasChild(x) ( HasLChild(x) || HasRChild(x) ) /* 至少拥有一个孩子 */ #define HasBothChild(x) ( HasLChild(x) && HasRChild(x) ) /* 同时拥有两个孩子 */ #define IsLeaf(x) ( ! HasChild(x) ) /****************************************************************************************** * 与BinNode具有特定关系的节点及指针 ******************************************************************************************/ /*兄弟*/ #define sibling(p) ( IsLChild( *(p) ) ? (p)->parent->rc : (p)->parent->lc ) /*叔叔*/ #define uncle(x) ( IsLChild( * ( (x)->parent ) ) ? (x)->parent->parent->rc : (x)->parent->parent->lc ) /*来自父亲的引用*/ #define FromParentTo(x) ( IsRoot(x) ? this->_root : ( IsLChild(x) ? (x).parent->lc : (x).parent->rc ) ) /****************************************************************************************** * 与AVL树具有特定关系的相关概念 ******************************************************************************************/ /*高度更新常规条件*/ /*DSA*/#define HeightUpdated(x)( (x).height == 1 + max( stature( (x).lc ), stature( (x).rc ) ) ) #define Balanced(x) ( stature( (x).lc ) == stature( (x).rc ) ) /* 理想平衡条件 */ #define BalFac(x) ( stature( (x).lc ) - stature( (x).rc ) ) /* 平衡因子 */ #define AvlBalanced(x) ( ( -2 < BalFac(x) ) && ( BalFac(x) < 2 ) ) /* AVL平衡条件 */ /****************************************************************************************** * 与红黑树具有特定关系的相关概念 ******************************************************************************************/ #define IsBlack(p) ( ! (p) || ( RB_BLACK == (p)->color ) ) /* 外部节点也视作黑节点 */ #define IsRed(p) ( ! IsBlack(p) ) /* 非黑即红 */ #define BlackHeightUpdated(x) ( /*RedBlack高度更新条件*/ \ ( stature( (x).lc ) == stature( (x).rc ) ) && \ ( (x).height == ( IsRed(& x) ? stature( (x).lc ) : stature( (x).lc ) + 1 ) ) \ ) /* 二叉树节点模板类 */ template <typename T> struct BinNode { /* 成员(为简化描述起见统一开放,读者可根据需要进一步封装) */ T data; /* 数值 */ BinNodePosi(T) parent; /* 父节点及左、右孩子 */ BinNodePosi(T) lc; BinNodePosi(T) rc; int height; /* 高度(通用) */ int npl; /* Null Path Length(左式堆,也可直接用height代替) */ RBColor color; /* 颜色(红黑树) */ /* 构造函数 */ BinNode() : parent ( NULL ), lc ( NULL ), rc ( NULL ), height ( 0 ), npl ( 1 ), color ( RB_RED ) { } BinNode ( T e, BinNodePosi(T) p = NULL, BinNodePosi(T) lc = NULL, BinNodePosi(T) rc = NULL, int h = 0, int l = 1, RBColor c = RB_RED ) : data ( e ), parent ( p ), lc ( lc ), rc ( rc ), height ( h ), npl ( l ), color ( c ) { } /* 操作接口 */ int size(); /* 统计当前节点后代总数,亦即以其为根的子树的规模 */ BinNodePosi(T) insertAsLC ( T const& e); /* 作为当前节点的左孩子插入新节点 */ BinNodePosi(T) insertAsRC ( T const& e); /* 作为当前节点的右孩子插入新节点 */ BinNodePosi(T) succ(); /* 取当前节点的直接后继 */ template <typename VST> void travPre ( VST& visit); /* 子树先序遍历 */ template <typename VST> void travIn ( VST& visit); /* 子树中序遍历 */ template <typename VST> void travPost ( VST& visit); /* 子树后序遍历 */ template <typename VST> void travLevel(VST& visit); /* 子树层次遍历 */ /* 比较器、判等器(各列其一,其余自行补充) */ bool operator< ( BinNode const& bn ) { return data < bn.data; } /* 小于 */ bool operator== ( BinNode const& bn ) { return data == bn.data; } /* 等于 */ /*DSA*/ /*DSA*/BinNodePosi(T) zig(); /* 顺时针旋转 */ /*DSA*/BinNodePosi(T) zag(); /* 逆时针旋转 */ /*DSA*/void stretchByZig(BinNodePosi(T) & x, int h); /* 通过zig旋转调整,将子树x拉伸成最右侧通路 */ /*DSA*/void stretchByZag(BinNodePosi(T) & x); /* 通过zag旋转调整,将子树x拉伸成最左侧通路 */ }; /* 统计当前节点后代总数,即以其为根的子树规模 */ template <typename T> int BinNode<T>::size() { int s = 1; /* 计入本身 */ if (lc) s += lc->size(); /* 递归计入左子树规模 */ if (rc) s += rc->size(); /* 递归计入右子树规模 */ return s; } /* 将e作为当前节点的左孩子插入二叉树 */ template <typename T> BinNodePosi(T) BinNode<T>::insertAsLC(T const& e) { return lc = new BinNode(e, this); } /* 将e作为当前节点的右孩子插入二叉树 */ template <typename T> BinNodePosi(T) BinNode<T>::insertAsRC(T const& e) { return rc = new BinNode(e, this); } /* 定位节点v的直接后继 */ template <typename T> BinNodePosi(T) BinNode<T>::succ() { BinNodePosi(T) s = this; /* 记录后继的临时变量 */ if (rc) { /* 若有右孩子,则直接后继必在右子树中,具体地就是 */ s = rc; /* 右子树中 */ while (HasLChild(*s)) s = s->lc; /* 最靠左(最小)的节点 */ } else { /* 否则,直接后继应是“将当前节点包含于其左子树中的最低祖先”,具体地就是 */ while (IsRChild(*s)) s = s->parent; /* 逆向地沿右向分支,不断朝左上方移动 */ s = s->parent; /* 最后再朝右上方移动一步,即抵达直接后继(如果存在) */ } return s; } /* 二叉树先序遍历算法统一入口 */ template <typename T> template <typename VST> /* 元素类型、操作器 */ void BinNode<T>::travPre(VST& visit) { switch (rand() % 3) { /* 此处暂随机选择以做测试,共三种选择 */ case 1:travPre_I1(this, visit); break; /* 迭代版#1 */ case 2:travPre_I2(this, visit); break; /* 迭代版#2 */ default:travPre_R(this, visit); break; /* 递归版 */ } } /* 二叉树先序遍历算法(迭代版#1) */ template <typename T, typename VST> /* 元素类型、操作器 */ void travPre_I1(BinNodePosi(T) x, VST& visit) { Stack<BinNodePosi(T)> S; /* 辅助栈 */ if (x) S.push(x); /* 根节点入栈 */ while (!S.empty()) { /* 在栈变空之前反复循环 */ x = S.pop(); /* 弹出并访问当前节点,其非空孩子的入栈次序为先右后左 */ visit(x->data); if (HasRChild(*x)) S.push(x->rc); if (HasLChild(*x)) S.push(x->lc); } } /* 从当前节点出发,沿左分支不断深入,直至没有左分支的节点;沿途节点遇到后立即访问 */ template <typename T, typename VST> /* 元素类型、操作器 */ static void visitAlongLeftBranch(BinNodePosi(T) x, VST& visit, Stack<BinNodePosi(T)>& S) { while (x) { visit(x->data); /* 访问当前节点 */ S.push(x->rc); /* 右孩子入栈暂存(可优化:通过判断,避免空的右孩子入栈) */ x = x->lc; /* 沿左分支深入一层 */ } } /* 二叉树先序遍历算法(迭代版#2) */ template <typename T, typename VST> /* 元素类型、操作器 */ void travPre_I2(BinNodePosi(T) x, VST& visit) { Stack<BinNodePosi(T)> S; /* 辅助栈 */ while (true) { visitAlongLeftBranch(x, visit, S); /* 从当前节点出发,逐批访问 */ if (S.empty()) break; /* 直到栈空 */ x = S.pop(); /* 弹出下一批的起点 */ } } /* 二叉树先序遍历算法(递归版) */ template <typename T, typename VST> /* 元素类型、操作器 */ void travPre_R(BinNodePosi(T) x, VST& visit) { if (!x) return; visit( x->data ); travPre_R( x->lc, visit ); travPre_R( x->rc, visit ); } /* 二叉树中序遍历算法统一入口 */ template <typename T> template <typename VST> /* 元素类型、操作器 */ void BinNode<T>::travIn(VST& visit) { switch (rand() % 5) { /* 此处暂随机选择以做测试,共五种选择 */ case 1: travIn_I1(this, visit); break; /* 迭代版#1 */ case 2: travIn_I2(this, visit); break; /* 迭代版#2 */ case 3: travIn_I3(this, visit); break; /* 迭代版#3 */ case 4: travIn_I4(this, visit); break; /* 迭代版#4 很经典巧妙,非常难想出来 */ default: travIn_R(this, visit); break; /* 递归版 */ } } /* 从当前节点出发,沿左分支不断深入,直至没有左分支的节点 */ template <typename T> static void goAlongLeftBranch(BinNodePosi(T) x, Stack<BinNodePosi(T)>& S) { while (x) { /* 当前节点入栈后随即向左侧分支深入,迭代直到无左孩子 */ S.push(x); x = x->lc; } } /* 二叉树中序遍历算法(迭代版#1) */ template <typename T, typename VST> /* 元素类型、操作器 */ void travIn_I1(BinNodePosi(T) x, VST& visit) { Stack<BinNodePosi(T)> S; /* 辅助栈 */ while (true) { goAlongLeftBranch(x, S); /* 从当前节点出发,逐批入栈 */ if (S.empty()) break; /* 直至所有节点处理完毕 */ x = S.pop(); /* 弹出栈顶节点并访问之 */ visit(x->data); x = x->rc; /* 转向右子树 */ } } /* 二叉树中序遍历算法(迭代版#2) */ template <typename T, typename VST> /* 元素类型、操作器 */ void travIn_I2(BinNodePosi(T) x, VST& visit) { /* 二叉树中序遍历算法(迭代版#2) */ Stack<BinNodePosi(T)> S; /* 辅助栈 */ while (true) if (x) { S.push(x); /* 根节点进栈 */ x = x->lc; /* 深入遍历左子树 */ } else if (!S.empty()) { x = S.pop(); /* 尚未访问的最低祖先节点退栈 */ visit(x->data); /* 访问该祖先节点 */ x = x->rc; /* 遍历祖先的右子树 */ } else break; /* 遍历完成 */ } /* 二叉树中序遍历算法(迭代版#3,无需辅助栈) */ template <typename T, typename VST> /* 元素类型、操作器 */ void travIn_I3(BinNodePosi(T) x, VST& visit) { /* 二叉树中序遍历算法(迭代版#3,无需辅助栈) */ bool backtrack = false; /* 前一步是否刚从右子树回溯——省去栈,仅O(1)辅助空间 */ while (true) if (!backtrack && HasLChild(*x)) /* 若有左子树且不是刚刚回溯,则 */ x = x->lc; /* 深入遍历左子树 */ else { /* 否则——无左子树或刚刚回溯(相当于无左子树) */ visit(x->data); /* 访问该节点 */ if (HasRChild(*x)) { /* 若其右子树非空,则 */ x = x->rc; /* 深入右子树继续遍历 */ backtrack = false; /* 并关闭回溯标志 */ } else { /* 若右子树空,则 */ if (!(x = x->succ())) break; /* 回溯(含抵达末节点时的退出返回) */ backtrack = true; /* 并设置回溯标志 */ } } } /* 二叉树中序遍历(迭代版#4,无需栈或标志位) */ template <typename T, typename VST> /* 元素类型、操作器 */ void travIn_I4(BinNodePosi(T) x, VST& visit) { /* 二叉树中序遍历(迭代版#4,无需栈或标志位) */ while (true) if (HasLChild(*x)) /* 若有左子树,则 */ x = x->lc; /* 深入遍历左子树 */ else { /* 否则 */ visit(x->data); /* 访问当前节点,并 */ while (!HasRChild(*x)) /* 不断地在无右分支处 */ if (!(x = x->succ())) return; /* 回溯至直接后继(在没有后继的末节点处,直接退出) */ else visit(x->data); /* 访问新的当前节点 */ x = x->rc; /* (直至有右分支处)转向非空的右子树 */ } } /* 二叉树中序遍历算法(递归版) */ template <typename T, typename VST> /* 元素类型、操作器 */ void travIn_R(BinNodePosi(T) x, VST& visit) { if (!x) return; travIn_R(x->lc, visit); visit(x->data); travIn_R(x->rc, visit); } /* 二叉树后序遍历算法统一入口 */ template <typename T> template <typename VST> /* 元素类型、操作器 */ void BinNode<T>::travPost(VST& visit) { switch (rand() % 2) { /* 此处暂随机选择以做测试,共两种选择 */ case 1: travPost_I(this, visit); break; /* 迭代版-较难理解 */ default: travPost_R(this, visit); break; /* 递归版 */ } } //在以S栈顶节点为根的子树中,找到最高左侧可见叶节点 template <typename T> static void gotoHLVFL(Stack<BinNodePosi(T)>& S) { /* 沿途所遇节点依次入栈 */ while (BinNodePosi(T) x = S.top()) /* 自顶而下,反复检查当前节点(即栈顶) */ if (HasLChild(*x)) { /* 尽可能向左 */ if (HasRChild(*x)) S.push(x->rc); /* 若有右孩子,优先入栈 */ S.push(x->lc); /* 然后才转至左孩子 */ } else /* 实不得已 */ S.push(x->rc); /* 才向右 */ S.pop(); /* 返回之前,弹出栈顶的空节点 */ } /* 二叉树的后序遍历(迭代版) */ template <typename T, typename VST> void travPost_I(BinNodePosi(T) x, VST& visit) { Stack<BinNodePosi(T)> S; /* 辅助栈 */ if (x) S.push(x); /* 根节点入栈 */ while (!S.empty()) { if (S.top() != x->parent) /* 若栈顶非当前节点之父(则必为其右兄),此时需 */ gotoHLVFL(S); /* 在以其右兄为根之子树中,找到HLVFL(相当于递归深入其中) */ x = S.pop(); /* 弹出栈顶(即前一节点之后继),并访问之 */ visit(x->data); } } /* 二叉树后序遍历算法(递归版) */ template <typename T, typename VST> /* 元素类型、操作器 */ void travPost_R(BinNodePosi(T) x, VST& visit) { if (!x) return; travPost_R(x->lc, visit); travPost_R(x->rc, visit); visit(x->data); } /* 二叉树层次遍历算法 */ template <typename T> template <typename VST> /* 元素类型、操作器 */ void BinNode<T>::travLevel(VST& visit) { Queue<BinNodePosi(T)> Q; /* 辅助队列 */ Q.enqueue(this); /* 根节点入队 */ while (!Q.empty()) { /* 在队列再次变空之前,反复迭代 */ BinNodePosi(T) x = Q.dequeue(); visit(x->data); /* 取出队首节点并访问之 */ if (HasLChild(*x)) Q.enqueue(x->lc); /* 左孩子入队 */ if (HasRChild(*x)) Q.enqueue(x->rc); /* 右孩子入队 */ } } /* 顺时针旋转 */ template <typename T> BinNodePosi(T) BinNode<T>::zig() { BinNodePosi(T) lChild = lc; lChild->parent = this->parent; if (lChild->parent) ((this == lChild->parent->rc) ? lChild->parent->rc : lChild->parent->lc) = lChild; lc = lChild->rc; if (lc) lc->parent = this; lChild->rc = this; this->parent = lChild; return lChild; } /* 逆时针旋转 */ template <typename T> BinNodePosi(T) BinNode<T>::zag() { BinNodePosi(T) rChild = rc; rChild->parent = this->parent; if (rChild->parent) ((this == rChild->parent->lc) ? rChild->parent->lc : rChild->parent->rc) = rChild; rc = rChild->lc; if (rc) rc->parent = this; rChild->lc = this; this->parent = rChild; return rChild; } /* 通过zig旋转调整,将子树x拉伸成最右侧通路 */ template <typename T> void stretchByZig(BinNodePosi(T) & x, int h) { int c = 0; for (BinNodePosi(T) v = x; v; v = v->rc) { while (v->lc) { v = v->zig(); c++; } v->height = --h; } while (x->parent) x = x->parent; printf("\nh = %d, c = %d\n\n", h, c); } /* 通过zag旋转调整,将子树x拉伸成最左侧通路 */ template <typename T> void stretchByZag(BinNodePosi(T) & x) { int c = 0; /* 记录旋转次数 */ int h = 0; BinNodePosi(T) p = x; while (p->rc) p = p->rc; /* 最大节点,必是子树最终的根 */ while (x->lc) x = x->lc; x->height = h++; /* 转至初始最左侧通路的末端 */ for (; x != p; x = x->parent, x->height = h++) { /* 若x右子树已空,则上升一层 */ while (x->rc) /* 否则,反复地 */ { x->zag(); /* 以x为轴做zag旋转 */ c++; } } /* 直到抵达子树的根 */ printf("\nh = %d, c = %d\n\n", h, c); } #endif<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef BINTREE_H__ #define BINTREE_H__ #include "BinNode.h" /* 引入二叉树节点类 */ #include "../_share/release.h" /* TODO: Being added by Yugma for updateHeight interface */ #define max(a,b) (((a) > (b)) ? (a) : (b)) #define min(a,b) (((a) < (b)) ? (a) : (b)) template <typename T> class BinTree { /* 二叉树模板类 */ protected: int _size; /* 规模 */ BinNodePosi(T) _root; /* 根节点 */ virtual int updateHeight(BinNodePosi(T) x); /* 更新节点x的高度 */ void updateHeightAbove(BinNodePosi(T) x); /* 更新节点x及其祖先的高度 */ public: BinTree() : _size(0), _root(NULL) { } /* 构造函数 */ ~BinTree() { if (0 < _size) remove(_root); } /* 析构函数 */ int size() const { return _size; } /* 规模 */ bool empty() const { return !_root; } /* 判空 */ BinNodePosi(T) root() const { return _root; } /* 树根 */ BinNodePosi(T) insertAsRoot(T const& e); /* 插入根节点 */ BinNodePosi(T) insertAsLC(BinNodePosi(T) x, T const& e); /* e作为x的左孩子(原无)插入 */ BinNodePosi(T) insertAsRC(BinNodePosi(T) x, T const& e); /* e作为x的右孩子(原无)插入 */ BinNodePosi(T) attachAsLC(BinNodePosi(T) x, BinTree<T>* &T); /* T作为x左子树接入 */ BinNodePosi(T) attachAsRC(BinNodePosi(T) x, BinTree<T>* &T); /* T作为x右子树接入 */ int remove(BinNodePosi(T) x); /* 删除以位置x处节点为根的子树,返回该子树原先的规模 */ BinTree<T>* secede(BinNodePosi(T) x); /* 将子树x从当前树中摘除,并将其转换为一棵独立子树(secede:分裂出去) */ template <typename VST> /* 操作器 */ void travLevel(VST& visit) { if (_root) _root->travLevel(visit); } /* 层次遍历 */ template <typename VST> /* 操作器 */ void travPre(VST& visit) { if (_root) _root->travPre(visit); } /* 先序遍历 */ template <typename VST> /* 操作器 */ void travIn(VST& visit) { if (_root) _root->travIn(visit); } /* 中序遍历 */ template <typename VST> /* 操作器 */ void travPost(VST& visit) { if (_root) _root->travPost(visit); } /* 后序遍历 */ bool operator< (BinTree<T> const& t) /* 比较器(其余自行补充)*/ { return _root && t._root && lt(_root, t._root); } bool operator== (BinTree<T> const& t) /* 判等器 */ { return _root && t._root && (_root == t._root); } /* DSA */ /*DSA*/void stretchToLPath() { stretchByZag(_root); } /* 借助zag旋转,转化为左向单链 (stretch:伸展) */ /*DSA*/void stretchToRPath() { stretchByZig(_root, _size); } /* 借助zig旋转,转化为右向单链 */ }; /* 更新节点x高度 */ template <typename T> int BinTree<T>::updateHeight(BinNodePosi(T) x) { return x->height = 1 + max(stature(x->lc), stature(x->rc)); /* 具体规则,因树而异 */ } /* 更新高度 */ template <typename T> void BinTree<T>::updateHeightAbove(BinNodePosi(T) x) { while (x) { /* 从x出发,覆盖历代祖先。可优化 */ updateHeight(x); x = x->parent; } } /* 将e当作根节点插入空的二叉树 */ template <typename T> BinNodePosi(T) BinTree<T>::insertAsRoot(T const& e) { _size = 1; return _root = new BinNode<T>(e); } /* e插入为x的左孩子 */ template <typename T> BinNodePosi(T) BinTree<T>::insertAsLC(BinNodePosi(T) x, T const& e) { _size++; x->insertAsLC(e); updateHeightAbove(x); return x->lc; } /* e插入为x的右孩子 */ template <typename T> BinNodePosi(T) BinTree<T>::insertAsRC(BinNodePosi(T) x, T const& e) { _size++; x->insertAsRC(e); updateHeightAbove(x); return x->rc; } /* 二叉树子树接入算法:将S当作节点x的左子树接入,S本身置空 */ template <typename T> BinNodePosi(T) BinTree<T>::attachAsLC(BinNodePosi(T) x, BinTree<T>* &S) { /* x->lc == NULL */ if (x->lc = S->_root) x->lc->parent = x; /* 接入 */ _size += S->_size; updateHeightAbove(x); /* 更新全树规模与x所有祖先的高度 */ S->_root = NULL; S->_size = 0; /*release操作没有找到*/ release(S); /* 释放原树,返回接入位置 */ S = NULL; return x; } /* 二叉树子树接入算法:将S当作节点x的右子树接入,S本身置空 */ template <typename T> BinNodePosi(T) BinTree<T>::attachAsRC(BinNodePosi(T) x, BinTree<T>* &S) { /* x->rc == NULL */ if (x->rc = S->_root) x->rc->parent = x; /* 接入 */ _size += S->_size; updateHeightAbove(x); /* 更新全树规模与x所有祖先的高度 */ S->_root = NULL; S->_size = 0; /*release操作没有找到*/ release(S); /* 释放原树,返回接入位置 */ S = NULL; return x; } /* 删除二叉树中位置x处的节点及其后代,返回被删除节点的数值 */ template <typename T> int BinTree<T>::remove(BinNodePosi(T) x) { /* assert: x为二叉树中的合法位置 */ FromParentTo(*x) = NULL; /* 切断来自父节点的指针 */ updateHeightAbove(x->parent); /* 更新祖先高度 */ int n = removeAt(x); /* 删除子树x,更新规模,返回删除节点总数 */ _size -= n; return n; } template <typename T> /* 删除二叉树中位置x处的节点及其后代,返回被删除节点的数值 */ static int removeAt(BinNodePosi(T) x) { /* assert: x为二叉树中的合法位置 */ if (!x) /* 递归基:空树 */ return 0; int n = 1 + removeAt(x->lc) + removeAt(x->rc); /* 递归释放左、右子树 */ release(x->data); /* 释放被摘除节点,并返回删除节点总数 */ release(x); return n; } /* 二叉树子树分离算法:将子树x从当前树中摘除,将其封装为一棵独立子树返回 */ template <typename T> BinTree<T>* BinTree<T>::secede(BinNodePosi(T) x) { /* assert: x为二叉树中的合法位置 */ FromParentTo(*x) = NULL; /* 切断来自父节点的指针 */ updateHeightAbove(x->parent); /* 更新原树中所有祖先的高度 */ BinTree<T>* S = new BinTree<T>; /* 新树以x为根 */ S->_root = x; x->parent = NULL; S->_size = x->size(); /* 更新规模,返回分离出来的子树 */ _size -= S->_size; return S; } #endif // !BINTREE_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef INCREASE_ELEM_H__ #define INCREASE_ELEM_H__ template <typename T> struct Increase //函数对象:递增一个T类对象 { virtual void operator() ( T& e ) { e++; } }; //假设T可直接递增或已重载++ #endif<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn¡¢add and modify it . * All rights reserved. ******************************************************************************************/ #include "print.h" /****************************************************************************************** * »ù±¾ÀàÐÍ ******************************************************************************************/ void UniPrint::p ( int e ) { printf ( " %04d", e ); } void UniPrint::p ( float e ) { printf ( " %4.1f", e ); } void UniPrint::p ( double e ) { printf ( " %4.1f", e ); } void UniPrint::p ( char e ) { printf ( " %c", ( 31 < e ) && ( e < 128 ) ? e : '$' ); } void UniPrint::p ( VStatus e ) { switch ( e ) { case UNDISCOVERED: printf ( "U" ); break; case DISCOVERED: printf ( "D" ); break; case VISITED: printf ( "V" ); break; default: printf ( "X" ); break; } } void UniPrint::p ( EType e ) { switch ( e ) { case UNDETERMINED: printf ( "U" ); break; case TREE: printf ( "T" ); break; case CROSS: printf ( "C" ); break; case BACKWARD: printf ( "B" ); break; case FORWARD: printf ( "F" ); break; default: printf ( "X" ); break; } } <file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef PQ_LEFTHEAP_H__ #define PQ_LEFTHEAP_H__ #include "../PQ/PQ.h" //引入优先级队列ADT #include "../BinTree/BinTree.h" //引入二叉树节点模板类 template <typename T> class PQ_LeftHeap : public PQ<T>, public BinTree<T> { //基于二叉树,以左式堆形式实现的PQ /*DSA*/friend class UniPrint; //演示输出使用,否则不必设置友类 public: PQ_LeftHeap() { } //默认构造 PQ_LeftHeap ( T* E, int n ) //批量构造:可改进为Floyd建堆算法 { for ( int i = 0; i < n; i++ ) insert ( E[i] ); } void insert ( T ); //按照比较器确定的优先级次序插入元素 T getMax(); //取出优先级最高的元素 T delMax(); //删除优先级最高的元素 }; //PQ_LeftHeap #include "../_share/release.h" #include "../_share/util.h" //基于合并操作的词条插入算法 template <typename T> void PQ_LeftHeap<T>::insert(T e) { BinNodePosi(T) v = new BinNode<T>(e); //为e创建一个二叉树节点 this->_root = merge(this->_root, v); //通过合并完成新节点的插入 // _root->parent = NULL; //既然此时堆非空,还需相应设置父子链接 this->_size++; //更新规模 } //获取非空左式堆中优先级最高的词条 template <typename T> T PQ_LeftHeap<T>::getMax() { return this->_root->data; }//按照此处约定,堆顶即优先级最高的词条 //基于合并操作的词条删除算法(当前队列非空) template <typename T> T PQ_LeftHeap<T>::delMax() { BinNodePosi(T) lHeap = this->_root->lc; //左子堆 BinNodePosi(T) rHeap = this->_root->rc; //右子堆 T e = this->_root->data; delete this->_root; //删除根节点 this->_size--; this->_root = merge(lHeap, rHeap); //原左右子堆合并 // if ( _root ) _root->parent = NULL; //若堆非空,还需相应设置父子链接 return e; //返回原根节点的数据项 } //根据相对优先级确定适宜的方式,合并以a和b为根节点的两个左式堆 template <typename T> static BinNodePosi(T) merge(BinNodePosi(T) a, BinNodePosi(T) b) { if (!a) return b; //退化情况 if (!b) return a; //退化情况 if (lt(a->data, b->data)) swap(a, b); //一般情况:首先确保b不大 a->rc = merge(a->rc, b); //将a的右子堆,与b合并 a->rc->parent = a; //并更新父子关系 if (!a->lc || a->lc->npl < a->rc->npl) //若有必要 swap(a->lc, a->rc); //交换a的左、右子堆,以确保右子堆的npl不大 a->npl = a->rc ? a->rc->npl + 1 : 1; //更新a的npl return a; //返回合并后的堆顶 } //本算法只实现结构上的合并,堆的规模须由上层调用者负责更新 #endif // PQ_LEFTHEAP_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef SPLAY_H__ #define SPLAY_H__ #include "../BST/BST.h" /* 基于BST实现Splay */ /* 由BST派生的Splay树模板类 */ template <typename T> class Splay : public BST<T> { protected: BinNodePosi(T) splay ( BinNodePosi(T) v ); /* 将节点v伸展至根 */ public: BinNodePosi(T) & search ( const T& e ); /* 查找(重写) */ BinNodePosi(T) insert ( const T& e ); /* 插入(重写) */ bool remove ( const T& e ); /* 删除(重写) */ }; /* 在伸展树中查找e */ template <typename T> BinNodePosi(T) & Splay<T>::search(const T& e) { BinNodePosi(T) p = searchIn(this->_root, e, this->_hot = NULL); this->_root = splay(p ? p : this->_hot); /* 将最后一个被访问的节点伸展至根 */ return this->_root; } /* 与其它BST不同,无论查找成功与否,_root都指向最后被访问的节点 */ /* 将关键码e插入伸展树中 */ template <typename T> BinNodePosi(T) Splay<T>::insert(const T& e) { if (!this->_root) { /* 处理原树为空的退化情况 */ this->_size++; return this->_root = new BinNode<T>(e); } if (e == search(e)->data) return this->_root; /* 确认目标节点不存在 */ this->_size++; BinNodePosi(T) t = this->_root; /* 创建新节点。以下调整<=7个指针以完成局部重构 */ if (this->_root->data < e) { /* 插入新根,以t和t->rc为左、右孩子 */ t->parent = this->_root = new BinNode<T>(e, NULL, t, t->rc); /* 2 + 3个 */ if (HasRChild(*t)) { /* <= 2个 */ t->rc->parent = this->_root; t->rc = NULL; } } else { /* 插入新根,以t->lc和t为左、右孩子 */ t->parent = this->_root = new BinNode<T>(e, NULL, t->lc, t); /* 2 + 3个 */ if (HasLChild(*t)) { /* <= 2个 */ t->lc->parent = this->_root; t->lc = NULL; } } this->updateHeightAbove(t); /* 更新t及其祖先(实际上只有_root一个)的高度 */ return this->_root; /* 新节点必然置于树根,返回之 */ } /* 无论e是否存在于原树中,返回时总有_root->data == e */ /* 从伸展树中删除关键码e */ template <typename T> bool Splay<T>::remove(const T& e) { if (!this->_root || (e != search(e)->data)) return false; /* 若树空或目标不存在,则无法删除 */ BinNodePosi(T) w = this->_root; /* assert: 经search()后节点e已被伸展至树根 */ if (!HasLChild(*(this->_root))) { /* 若无左子树,则直接删除 */ this->_root = this->_root->rc; if (this->_root) this->_root->parent = NULL; } else if (!HasRChild(*(this->_root))) { /* 若无右子树,也直接删除 */ this->_root = this->_root->lc; if (this->_root) this->_root->parent = NULL; } else { /* 若左右子树同时存在,则 */ BinNodePosi(T) lTree = this->_root->lc; lTree->parent = NULL; this->_root->lc = NULL; /* 暂时将左子树切除 */ this->_root = this->_root->rc; this->_root->parent = NULL; /* 只保留右子树 */ this->search(w->data); /* 以原树根为目标,做一次(必定失败的)查找 */ /* assert: 至此,右子树中最小节点必伸展至根,且(因无雷同节点)其左子树必空,于是 */ this->_root->lc = lTree; /* 只需将原左子树接回原位即可 */ lTree->parent = this->_root; } release(w->data); /* 释放节点,更新规模 */ release(w); this->_size--; if (this->_root) this->updateHeight(this->_root); /* 此后,若树非空,则树根的高度需要更新 */ return true; /* 返回成功标志 */ } /* 若目标节点存在且被删除,返回true;否则返回false */ /* 在节点*p与*lc(可能为空)之间建立父(左)子关系 */ template <typename NodePosi> inline void attachAsLChild(NodePosi p, NodePosi lc) { p->lc = lc; if (lc) lc->parent = p; } /* 在节点*p与*rc(可能为空)之间建立父(右)子关系 */ template <typename NodePosi> inline void attachAsRChild(NodePosi p, NodePosi rc) { p->rc = rc; if (rc) rc->parent = p; } /* Splay树伸展算法:从节点v出发逐层伸展 */ template <typename T> BinNodePosi(T) Splay<T>::splay(BinNodePosi(T) v) { /* v为因最近访问而需伸展的节点位置 */ if (!v) return NULL; BinNodePosi(T) p; /* *v的父亲与祖父 */ BinNodePosi(T) g; while ((p = v->parent) && (g = p->parent)) { /* 自下而上,反复对*v做双层伸展 */ BinNodePosi(T) gg = g->parent; /* 每轮之后*v都以原曾祖父(great-grand parent)为父 */ if (IsLChild(*v)) if (IsLChild(*p)) { /* zig-zig */ /*DSA*/printf("\tzIg-zIg :"); print(g); print(p); print(v); printf("\n"); attachAsLChild(g, p->rc); attachAsLChild(p, v->rc); attachAsRChild(p, g); attachAsRChild(v, p); } else { /* zig-zag */ /*DSA*/printf("\tzIg-zAg :"); print(g); print(p); print(v); printf("\n"); attachAsLChild(p, v->rc); attachAsRChild(g, v->lc); attachAsLChild(v, g); attachAsRChild(v, p); } else if (IsRChild(*p)) { /* zag-zag */ /*DSA*/printf("\tzAg-zAg :"); print(g); print(p); print(v); printf("\n"); attachAsRChild(g, p->lc); attachAsRChild(p, v->lc); attachAsLChild(p, g); attachAsLChild(v, p); } else { /* zag-zig */ /*DSA*/printf("\tzAg-zIg :"); print(g); print(p); print(v); printf("\n"); attachAsRChild(p, v->lc); attachAsLChild(g, v->rc); attachAsRChild(v, g); attachAsLChild(v, p); } if (!gg) v->parent = NULL; /* 若*v原先的曾祖父*gg不存在,则*v现在应为树根 */ else /* 否则,*gg此后应该以*v作为左或右孩子 */ (g == gg->lc) ? attachAsLChild(gg, v) : attachAsRChild(gg, v); this->updateHeight(g); this->updateHeight(p); this->updateHeight(v); } /* 双层伸展结束时,必有g == NULL,但p可能非空 */ if (p = v->parent) { /* 若p果真非空,则额外再做一次单旋 */ /*DSA*/if (IsLChild(*v)) { printf("\tzIg :"); print(p); print(v); printf("\n"); } /*DSA*/else { printf("\tzAg :"); print(p); print(v); printf("\n"); } if (IsLChild(*v)) { attachAsLChild(p, v->rc); attachAsRChild(v, p); } else{ attachAsRChild(p, v->lc); attachAsLChild(v, p); } this->updateHeight(p); this->updateHeight(v); } v->parent = NULL; return v; } /* 调整之后新树根应为被伸展的节点,故返回该节点的位置以便上层函数更新树根 */ #endif // !SPLAY_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef PRINT_GRAPHMATRIX_H__ #define PRINT_GRAPHMATRIX_H__ /****************************************************************************************** * 图Graph ******************************************************************************************/ template <typename Tv, typename Te> //顶点类型、边类型 void UniPrint::p ( GraphMatrix<Tv, Te>& s ) { //引用 int inD = 0; for ( int i = 0; i < s.n; i++ ) inD += s.inDegree ( i ); int outD = 0; for ( int i = 0; i < s.n; i++ ) outD += s.outDegree ( i ); printf ( "%s[%d]*(%d, %d):\n", typeid ( s ).name(), &s, s.n, s.e ); //基本信息 // 标题行 print ( s.n ); printf ( " " ); print ( inD ); printf ( "|" ); for ( int i = 0; i < s.n; i++ ) { print ( s.vertex ( i ) ); printf ( "[" ); print ( s.status ( i ) ); printf ( "] " ); } printf ( "\n" ); // 标题行(续) print ( outD ); printf ( " " ); print ( s.e ); printf ( "|" ); for ( int i = 0; i < s.n; i++ ) { print ( s.inDegree ( i ) ); printf ( " " ); } printf ( "| dTime fTime Parent Weight\n" ); // 水平分隔线 printf ( "-----------+" ); for ( int i = 0; i < s.n; i++ ) printf ( "------" ); printf ( "+----------------------------\n" ); // 逐行输出各顶点 for ( int i = 0; i < s.n; i++ ) { print ( s.vertex ( i ) ); printf ( "[" ); print ( s.status ( i ) ); printf ( "] " ); print ( s.outDegree ( i ) ); printf ( "|" ); for ( int j = 0; j < s.n; j++ ) if ( s.exists ( i, j ) ) { print ( s.edge ( i, j ) ); print ( s.type ( i, j ) ); } else printf ( " ." ); printf ( "| " ); print ( s.dTime ( i ) ); printf ( " " ); print ( s.fTime ( i ) ); printf ( " " ); if ( 0 > s.parent ( i ) ) print ( "^" ); else print ( s.vertex ( s.parent ( i ) ) ); printf ( " " ); if ( INT_MAX > s.priority ( i ) ) print ( s.priority ( i ) ); else print ( " INF" ); printf ( "\n" ); } printf ( "\n" ); } #endif // !PRINT_GRAPHMATRIX_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * Author: <NAME> * All rights reserved. ******************************************************************************************/ #include <chrono> #include "../../_share/util.h" #include "LRUCache.h" #define EXPAND_MULTIPLE (20) //测试数据扩大倍数 #define CAHCE_SIZE_MUTIPLE (EXPAND_MULTIPLE/4) //Cache容量扩大倍数 int testID = 0; //测试编号 /****************************************************************************************** * 测试两种不同解决方案的性能 ******************************************************************************************/ template <typename T_KEY, typename T_VALUE> //元素类型 void testLRUCachePerformance(int CacheSize) { printf("\n ==== Test %2d. testLRUCachePerformance() ,which size = %d \n", testID++, CacheSize); LRUCacheUseMap<int, int> oLRUcache; LRUCacheUseHashtable<int, int> oLRUcacheUseHashtable; std::chrono::steady_clock::time_point timeForLRUCacheUseMapBegin = std::chrono::steady_clock::now(); for (int i = 0; i < EXPAND_MULTIPLE * CacheSize; i++) { oLRUcache.setCacheSize(CAHCE_SIZE_MUTIPLE * CacheSize); oLRUcache.set(i, i); oLRUcache.get(i); } auto timeForLRUCacheUseMapEnd = std::chrono::steady_clock::now(); std::chrono::duration<double> time_span = std::chrono::duration_cast<std::chrono::duration<double>>(timeForLRUCacheUseMapEnd - timeForLRUCacheUseMapBegin); std::cout << "set and get " << EXPAND_MULTIPLE * CacheSize << " times using LRUCacheUseMap takes " << time_span.count() << " seconds." << endl; timeForLRUCacheUseMapBegin = std::chrono::steady_clock::now(); for (int i = 0; i < EXPAND_MULTIPLE * CacheSize; i++) { oLRUcacheUseHashtable.setCacheSize(CAHCE_SIZE_MUTIPLE * CacheSize); oLRUcacheUseHashtable.set(i, i); oLRUcacheUseHashtable.get(i); } timeForLRUCacheUseMapEnd = std::chrono::steady_clock::now(); time_span = std::chrono::duration_cast<std::chrono::duration<double>>(timeForLRUCacheUseMapEnd - timeForLRUCacheUseMapBegin); std::cout << "set and get " << EXPAND_MULTIPLE * CacheSize << " times using LRUcacheUseHashtable takes " << time_span.count() << " seconds." << endl; } /****************************************************************************************** * 测试LRUCache ******************************************************************************************/ template <typename T_KEY, typename T_VALUE> //元素类型 void testLRUCache(int CacheSize) { printf("\n ==== Test %2d. Generate one LRUCacheUseMap ,which size %d by random insertions\n", testID++, CacheSize); LRUCacheUseMap<int, int> oLRUcache; oLRUcache.setCacheSize((unsigned int)CacheSize); int imaxElement = 10 * CacheSize; int ivalue = 0; for (int i = 0; i < imaxElement; i++) { Rank k = dice(imaxElement); if (dice(2)) { //printf("set(%d) ", k); //set(k,k) oLRUcache.set(k, k); //printf(" = %d \n ", k); //set(k,k) } else { //printf("get(%d) ", k); //get(k) ivalue = oLRUcache.get(k); //printf(" = %d \n", ivalue ); } } //oLRUcache.printLRUCacheList(); printf("\n ==== Test %2d. Generate one LRUCacheUseHashtable ,which size %d by random insertions\n", testID++, CacheSize); LRUCacheUseHashtable<int, int> oLRUcacheUseHashtable; oLRUcacheUseHashtable.setCacheSize((unsigned int)CacheSize); for (int i = 0; i < imaxElement; i++) { Rank k = dice(imaxElement); if (dice(2)) { //printf("set(%d) ", k); //set(k,k) oLRUcacheUseHashtable.set(k, k); //printf(" = %d \n ", k); //set(k,k) } else { //printf("get(%d) ", k); //get(k) ivalue = oLRUcacheUseHashtable.get(k); //printf(" = %d \n", ivalue); } } //oLRUcacheUseHashtable.printLRUCacheList(); //性能对比 testLRUCachePerformance<int, int>(CacheSize); return; } /****************************************************************************************** * 测试LRUCache ******************************************************************************************/ int main() { int iTestSize = 2000; srand((unsigned int)time(NULL)); testLRUCache<int, int>(iTestSize); //元素类型可以在这里任意选择 return 0; } <file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef PRINT_BINNODE_H__ #define PRINT_BINNODE_H__ /****************************************************************************************** * BinTree节点 ******************************************************************************************/ template <typename T> void UniPrint::p ( BinNode<T>& node ) { p ( node.data ); //数值 /****************************************************************************************** * height & NPL ******************************************************************************************/ #if defined(DSA_LEFTHEAP) printf ( "(%-2d)", node.npl ); //NPL #elif defined(DSA_BST) printf ( "(%-2d)", node.height ); //高度 #elif defined(DSA_AVL) printf ( "(%-2d)", node.height ); //高度 #elif defined(DSA_REDBLACK) printf ( "(%-2d)", node.height ); //高度 #elif defined(DSA_SPLAY) printf ( "(%-2d)", node.height ); //高度 #endif /****************************************************************************************** * 父子链接指针 ******************************************************************************************/ printf ( ( ( node.lc && &node != node.lc->parent ) || ( node.rc && &node != node.rc->parent ) ) ? "@" : " " ); /****************************************************************************************** * 节点颜色 ******************************************************************************************/ #if defined(DSA_REDBLACK) printf ( node.color == RB_BLACK ? "B" : " " ); //(忽略红节点) #endif /****************************************************************************************** * 父子(黑)高度、NPL匹配 ******************************************************************************************/ #if defined(DSA_PQ_COMPLHEAP) //高度不必匹配 #elif defined(DSA_PQ_LEFTHEAP) printf ( //NPL ( node.rc && node.npl != 1 + node.rc->npl ) || ( node.lc && node.npl > 1 + node.lc->npl ) ? "%%" : " " ); #elif defined(DSA_REDBLACK) printf ( BlackHeightUpdated ( node ) ? " " : "!" ); //黑高度 #else printf ( HeightUpdated ( node ) ? " " : "!" ); //(常规)高度 #endif /****************************************************************************************** * 左右平衡 ******************************************************************************************/ #if defined(DSA_AVL) if ( !AvlBalanced ( node ) ) printf ( "X" ); //AVL平衡 else if ( 0 < BalFac ( node ) ) printf ( "\\" ); //AVL平衡 else if ( BalFac ( node ) < 0 ) printf ( "/" ); //AVL平衡 else printf ( "-" ); //AVL平衡 #elif defined(DSA_REDBLACK) if ( !Balanced ( node ) ) printf ( "X" ); //RB平衡 else if ( 0 < BalFac ( node ) ) printf ( "\\" ); //RB平衡 else if ( BalFac ( node ) < 0 ) printf ( "/" ); //RB平衡 else printf ( "-" ); //RB平衡 #else //平衡无所谓 #endif } #endif // !PRINT_BINNODE_H__<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef CEHCKORDER_LIST_H__ #define CEHCKORDER_LIST_H__ #include "..\list\list.h" template <typename T> void checkOrder ( List<T> & L ) { //判断列表是否整体有序 int unsorted = 0; //逆序计数器 L.traverse ( (CheckOrder<T> &) CheckOrder<T> ( unsorted, L.first()->data ) ); //进行遍历 if ( 0 < unsorted ) printf ( "Unsorted with %d adjacent disordered pair(s)\n", unsorted ); else printf ( "Sorted\n" ); printf ( "DISORDERED = %d\n", L.disordered() ); } #endif<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef VECTOR_H__ #define VECTOR_H__ typedef int Rank; /* 秩 */ #define DEFAULT_CAPACITY 3 /* 默认的初始容量(实际应用中可设置为更大) */ /* 向量模板类 */ template <typename T> class Vector { protected: Rank _size; /* 规模、容量、数据区 */ int _capacity; T* _elem; void copyFrom(T const* A, Rank lo, Rank hi); /* 复制数组区间A[lo, hi) */ void expand(); /* 空间不足时扩容 */ void shrink(); /* 装填因子过小时压缩 */ bool bubble(Rank lo, Rank hi); /* 扫描交换 */ void bubbleSort(Rank lo, Rank hi); /* 起泡排序算法 */ Rank max(Rank lo, Rank hi); /* 选取最大元素 */ void selectionSort(Rank lo, Rank hi); /* 选择排序算法 */ void merge(Rank lo, Rank mi, Rank hi); /* 归并算法 */ void mergeSort(Rank lo, Rank hi); /* 归并排序算法 */ Rank partition(Rank lo, Rank hi); /* 轴点构造算法 */ void quickSort(Rank lo, Rank hi); /* 快速排序算法 */ void heapSort(Rank lo, Rank hi); /* 堆排序 */ public: /* 构造函数 */ Vector(int c = DEFAULT_CAPACITY, int s = 0, T v = 0) /* 容量为c、规模为s、所有元素初始为v */ { _elem = new T[_capacity = c]; for (_size = 0; _size < s; _elem[_size++] = v); } /* s<=c */ Vector(T const* A, Rank n) /* 数组整体复制 */ { copyFrom(A, 0, n); } Vector(T const* A, Rank lo, Rank hi) /* 区间 */ { copyFrom(A, lo, hi); } Vector(Vector<T> const& V) /* 向量整体复制 */ { copyFrom(V._elem, 0, V._size); } Vector(Vector<T> const& V, Rank lo, Rank hi) /* 区间 */ { copyFrom(V._elem, lo, hi); } /* 析构函数 */ ~Vector() { delete[] _elem; } /* 释放内部空间 */ /* 只读访问接口 */ Rank size() const { return _size; } /* 规模 */ bool empty() const { return !_size; } /* 判空 */ int disordered() const; /* 判断向量是否已排序 */ Rank find(T const& e) const /* 无序向量整体查找 */ { return find(e, 0, _size); } Rank find(T const& e, Rank lo, Rank hi) const; /* 无序向量区间查找 */ Rank search(T const& e) const /* 有序向量整体查找 */ { return (0 >= _size) ? -1 : search(e, 0, _size); } /* 暂未整理,后期待学习 */ Rank search(T const& e, Rank lo, Rank hi) const; /* 有序向量区间查找 */ /* 可写访问接口 */ T& operator[] (Rank r) const; /* 重载下标操作符,可以类似于数组形式引用各元素 */ Vector<T> & operator= (Vector<T> const&); /* 重载赋值操作符,以便直接克隆向量 */ T remove(Rank r); /* 删除秩为r的元素 */ int remove(Rank lo, Rank hi); /* 删除秩在区间[lo, hi)之内的元素 */ Rank insert(Rank r, T const& e); /* 插入元素 */ Rank insert(T const& e) /* 默认作为末元素插入 */ { return insert(_size, e); } void sort(Rank lo, Rank hi); /* 对[lo, hi)排序 */ void sort() { sort(0, _size); } /* 整体排序 */ void unsort(Rank lo, Rank hi); /* 对[lo, hi)置乱 */ void unsort() { unsort(0, _size); } /* 整体置乱 */ int deduplicate(); /* 无序去重 */ int uniquify(); /* 有序去重 */ /* 遍历 */ void traverse(void(*visit) (T&)); /* 遍历(使用函数指针,只读或局部性修改) */ template <typename VST> void traverse(VST& visit); /* 遍历(使用函数对象,可全局性修改) */ }; /* 以数组区间A[lo, hi)为蓝本复制向量 */ template <typename T> /* 元素类型 */ void Vector<T>::copyFrom(T const* A, Rank lo, Rank hi) { _elem = new T[_capacity = 2 * (hi - lo)]; /* 分配空间,规模清零 */ _size = 0; while (lo < hi) /* A[lo, hi)内的元素逐一 */ _elem[_size++] = A[lo++]; /* 复制至_elem[0, hi - lo) */ } /* 向量空间不足时扩容 */ template <typename T> void Vector<T>::expand() { if (_size < _capacity) return; /* 尚未满员时,不必扩容 */ if (_capacity < DEFAULT_CAPACITY) /* 不低于最小容量 */ _capacity = DEFAULT_CAPACITY; T* oldElem = _elem; _elem = new T[_capacity <<= 1]; /* 容量加倍 */ for (int i = 0; i < _size; i++) _elem[i] = oldElem[i]; /* 复制原向量内容(T为基本类型,或已重载赋值操作符'=') */ /*DSA*/ //printf("\n_ELEM [%x]*%d/%d expanded and shift to [%x]*%d/%d\n", oldElem, _size, _capacity/2, _elem, _size, _capacity); delete[] oldElem; /* 释放原空间 */ } /* 装填因子过小时压缩向量所占空间 */ template <typename T> void Vector<T>::shrink() { if (_capacity < DEFAULT_CAPACITY << 1) return; /* 不致收缩到DEFAULT_CAPACITY以下 */ if (_size << 2 > _capacity) return; /* 以25%为界 */ T* oldElem = _elem; _elem = new T[_capacity >>= 1]; /* 容量减半 */ for (int i = 0; i < _size; i++) _elem[i] = oldElem[i]; /* 复制原向量内容 */ delete[] oldElem; /* 释放原空间 */ } /* 一趟扫描交换 */ template <typename T> bool Vector<T>::bubble(Rank lo, Rank hi) { bool sorted = true; /* 整体有序标志 */ while (++lo < hi) /* 自左向右,逐一检查各对相邻元素 */ if (_elem[lo - 1] > _elem[lo]) { /* 若逆序,则 */ sorted = false; /* 意味着尚未整体有序,并需要 */ /* swap操作并没有实现*/ swap(_elem[lo - 1], _elem[lo]); /* 通过交换使局部有序 */ } return sorted; /* 返回有序标志 */ } /* 向量的起泡排序 */ template <typename T> void Vector<T>::bubbleSort(Rank lo, Rank hi) /* assert: 0 <= lo < hi <= size */ { while (!bubble(lo, hi--)); /* 逐趟做扫描交换,直至全序 */ } /* 在[lo, hi]内找出最大者*/ template <typename T> Rank Vector<T>::max(Rank lo, Rank hi) { Rank mx = hi; while (lo < hi--) /* 逆向扫描 */ if (_elem[hi] > _elem[mx]) /* 且严格比较 */ mx = hi; /* 故能在max有多个时保证后者优先,进而保证selectionSort稳定 */ return mx; } /* 向量选择排序 */ template <typename T> void Vector<T>::selectionSort(Rank lo, Rank hi) { /* assert: 0 < lo <= hi <= size */ /*DSA*/printf("\tSELECTIONsort [%3d, %3d)\n", lo, hi); while (lo < --hi) swap(_elem[max(lo, hi)], _elem[hi]); /* 将[hi]与[lo, hi]中的最大者交换 */ } /* 有序向量的归并 */ template <typename T> void Vector<T>::merge(Rank lo, Rank mi, Rank hi) { /* 各自有序的子向量[lo, mi)和[mi, hi) */ T* A = _elem + lo; /* 合并后的向量A[0, hi - lo) = _elem[lo, hi) */ int lb = mi - lo; T* B = new T[lb]; /* 前子向量B[0, lb) = _elem[lo, mi) */ for (Rank i = 0; i < lb; B[i] = A[i++]); /* 复制前子向量 */ int lc = hi - mi; T* C = _elem + mi; /* 后子向量C[0, lc) = _elem[mi, hi) */ for (Rank i = 0, j = 0, k = 0; (j < lb) || (k < lc); ) /* B[j]和C[k]中的小者续至A末尾 */ { if ((j < lb) && (!(k < lc) || (B[j] <= C[k]))) A[i++] = B[j++]; if ((k < lc) && (!(j < lb) || (C[k] < B[j]))) A[i++] = C[k++]; } delete[] B; /* 释放临时空间B */ } /* 归并后得到完整的有序向量[lo, hi) */ /* 向量归并排序 */ template <typename T> void Vector<T>::mergeSort(Rank lo, Rank hi) { /* 0 <= lo < hi <= size */ /*DSA*/printf("\tMERGEsort [%3d, %3d)\n", lo, hi); if (hi - lo < 2) return; /* 单元素区间自然有序,否则... */ int mi = (lo + hi) / 2; /* 以中点为界 */ mergeSort(lo, mi); /* 分别排序 */ mergeSort(mi, hi); merge(lo, mi, hi); /* 归并 */ } /* 轴点构造算法:通过调整元素位置构造区间[lo, hi]的轴点,并返回其秩 */ template <typename T> Rank Vector<T>::partition(Rank lo, Rank hi) { /* 版本B:可优化处理多个关键码雷同的退化情况 */ swap(_elem[lo], _elem[lo + rand() % (hi - lo + 1)]); /* 任选一个元素与首元素交换 */ T pivot = _elem[lo]; /* 以首元素为候选轴点——经以上交换,等效于随机选取 */ while (lo < hi) { /* 从向量的两端交替地向中间扫描 */ while (lo < hi) if (pivot < _elem[hi]) /* 在大于pivot的前提下 */ hi--; /* 向左拓展右端子向量 */ else /* 直至遇到不大于pivot者 */ { _elem[lo++] = _elem[hi]; break; } /* 将其归入左端子向量 */ while (lo < hi) if (_elem[lo] < pivot) /* 在小于pivot的前提下 */ lo++; /* 向右拓展左端子向量 */ else /* 直至遇到不小于pivot者 */ { _elem[hi--] = _elem[lo]; break; } /* 将其归入右端子向量 */ } /* assert: lo == hi */ _elem[lo] = pivot; /* 将备份的轴点记录置于前、后子向量之间 */ return lo; /* 返回轴点的秩 */ } /* 向量快速排序 */ template <typename T> void Vector<T>::quickSort(Rank lo, Rank hi) { /* 0 <= lo < hi <= size */ /*DSA*/printf("\tQUICKsort [%3d, %3d)\n", lo, hi); if (hi - lo < 2) return; /* 单元素区间自然有序,否则... */ Rank mi = partition(lo, hi - 1); /* 在[lo, hi - 1]内构造轴点 */ quickSort(lo, mi); /* 对前缀递归排序 */ quickSort(mi + 1, hi); /* 对后缀递归排序 */ } /* 向量堆排序 */ template <typename T> void Vector<T>::heapSort(Rank lo, Rank hi) { //0 <= lo < hi <= size /*DSA*/ printf("\tHEAPsort [%3d, %3d)\n", lo, hi); PQ_ComplHeap<T> H(_elem + lo, hi - lo); //将待排序区间建成一个完全二叉堆,O(n) while (!H.empty()) //反复地摘除最大元并归入已排序的后缀,直至堆空 /*DSA*/ { _elem[--hi] = H.delMax(); //等效于堆顶与末元素对换后下滤 /*DSA*/ for (int i = lo; i < hi; i++) print(H[i]); print(_elem[hi]); printf("\n"); /*DSA*/ } } /* 返回向量中逆序相邻元素对的总数 */ template <typename T> int Vector<T>::disordered() const { int n = 0; /* 计数器 */ for (int i = 1; i < _size; i++) /* 逐一检查_size - 1对相邻元素 */ if (_elem[i - 1] > _elem[i]) n++; /* 逆序则计数 */ return n; /* 向量有序当且仅当n = 0 */ } /* 无序向量的顺序查找:返回最后一个元素e的位置;失败时,返回lo - 1 */ template <typename T> Rank Vector<T>::find(T const& e, Rank lo, Rank hi) const { /* assert: 0 <= lo < hi <= _size */ while ((lo < hi--) && (e != _elem[hi])); /* 从后向前,顺序查找 */ return hi; /* 若hi < lo,则意味着失败;否则hi即命中元素的秩 */ } /* 二分查找算法(版本C):在有序向量的区间[lo, hi)内查找元素e,0 <= lo <= hi <= _size */ template <typename T> static Rank binSearch(T* A, T const& e, Rank lo, Rank hi) { /*DSA*/printf("BIN search (C)\n"); while (lo < hi) { /* 每步迭代仅需做一次比较判断,有两个分支 */ /*DSA*/ for (int i = 0; i < lo; i++) printf(" "); if (lo >= 0) for (int i = lo; i < hi; i++) printf("....^"); printf("\n"); Rank mi = (lo + hi) >> 1; /* 以中点为轴点 */ (e < A[mi]) ? hi = mi : lo = mi + 1; /* 经比较后确定深入[lo, mi)或(mi, hi) */ } /* 成功查找不能提前终止 */ /*DSA*/ for (int i = 0; i < lo - 1; i++) printf(" "); if (lo > 0) printf("....|\n"); else printf("<<<<|\n"); return --lo; /* 循环结束时,lo为大于e的元素的最小秩,故lo - 1即不大于e的元素的最大秩 */ } /* 有多个命中元素时,总能保证返回秩最大者;查找失败时,能够返回失败的位置 */ /* 引入Fib数列类 */ #include "..\fibonacci\Fib.h" /* Fibonacci查找算法(版本B):在有序向量的区间[lo, hi)内查找元素e,0 <= lo <= hi <= _size */ template <typename T> static Rank fibSearch(T* A, T const& e, Rank lo, Rank hi) { /*DSA*/printf("FIB search (B)\n"); Fib fib(hi - lo); /* 用O(log_phi(n = hi - lo)时间创建Fib数列 */ while (lo < hi) { /* 每步迭代仅仅做一次比较判断,有两个分支 */ /*DSA*/ for (int i = 0; i < lo; i++) printf(" "); if (lo >= 0) for (int i = lo; i < hi; i++) printf("....^"); else printf("<<<<|"); printf("\n"); while (hi - lo < fib.get()) fib.prev(); /* 通过向前顺序查找(分摊O(1))——至多迭代几次? */ Rank mi = lo + fib.get() - 1; /* 确定形如Fib(k) - 1的轴点 */ (e < A[mi]) ? hi = mi : lo = mi + 1; /* 比较后确定深入前半段[lo, mi)或后半段(mi, hi) */ } /* 成功查找不能提前终止 */ /*DSA*/ for (int i = 0; i < lo - 1; i++) printf(" "); if (lo > 0) printf("....|\n"); else printf("<<<<|\n"); return --lo; /* 循环结束时,lo为大于e的元素的最小秩,故lo - 1即不大于e的元素的最大秩 */ } /* 有多个命中元素时,总能保证返回最秩最大者;查找失败时,能够返回失败的位置 */ /* 在有序向量的区间[lo, hi)内,确定不大于e的最后一个节点的秩 */ template <typename T> Rank Vector<T>::search(T const& e, Rank lo, Rank hi) const {/* assert: 0 <= lo < hi <= _size */ /* 按各50%的概率随机使用二分查找或Fibonacci查找 */ return (rand() % 2) ? binSearch(_elem, e, lo, hi) : fibSearch(_elem, e, lo, hi); } /* 重载下标操作符 */ template <typename T> T& Vector<T>::operator[] (Rank r) const { return _elem[r]; } /* assert: 0 <= r < _size */ /* 重载赋值操作符 */ template <typename T> Vector<T>& Vector<T>::operator= (Vector<T> const& V) { if (_elem) delete[] _elem; /* 释放原有内容 */ copyFrom(V._elem, 0, V.size()); /* 整体复制 */ return *this; /* 返回当前对象的引用,以便链式赋值 */ } /* 删除向量中秩为r的元素,0 <= r < size */ template <typename T> T Vector<T>::remove(Rank r) { T e = _elem[r]; /* 备份被删除元素 */ remove(r, r + 1); /* 调用区间删除算法,等效于对区间[r, r + 1)的删除 */ return e; /* 返回被删除元素 */ } /* 删除区间[lo, hi) */ template <typename T> int Vector<T>::remove(Rank lo, Rank hi) { if (lo == hi) return 0; /* 出于效率考虑,单独处理退化情况,比如remove(0, 0) */ while (hi < _size) _elem[lo++] = _elem[hi++]; /* [hi, _size)顺次前移hi - lo个单元 */ _size = lo; /* 更新规模,直接丢弃尾部[lo, _size = hi)区间 */ shrink(); /* 若有必要,则缩容 */ return hi - lo; /* 返回被删除元素的数目 */ } /* 将e作为秩为r元素插入 */ template <typename T> Rank Vector<T>::insert(Rank r, T const& e) { /* assert: 0 <= r <= size */ expand(); /* 若有必要,扩容 */ for (int i = _size; i > r; i--) /* 自后向前,后继元素顺次后移一个单元 */ _elem[i] = _elem[i - 1]; _elem[r] = e; /* 置入新元素并更新容量 */ _size++; return r; /* 返回秩 */ } /* 向量区间[lo, hi)排序 */ template <typename T> void Vector<T>::sort(Rank lo, Rank hi) { switch (rand() % 5) { /* 随机选取排序算法。可根据具体问题的特点灵活选取或扩充 */ case 1: bubbleSort(lo, hi); break; /* 起泡排序 */ case 2: selectionSort(lo, hi); break; /* 选择排序(习题) */ case 3: mergeSort(lo, hi); break; /* 归并排序 */ case 4: heapSort(lo, hi); break; /* 堆排序 */ default: quickSort(lo, hi); break; /* 快速排序(稍后介绍) */ } /*DSA*/ /* selectionSort(lo, hi); */ } /* 等概率随机置乱区间[lo, hi) */ template <typename T> void Vector<T>::unsort(Rank lo, Rank hi) { T* V = _elem + lo; /* 将子向量_elem[lo, hi)视作另一向量V[0, hi - lo) */ for (Rank i = hi - lo; i > 0; i--) /* 自后向前 */ swap(V[i - 1], V[rand() % i]); /* 将V[i - 1]与V[0, i)中某一元素随机交换 */ } /* 该方案有问题,待调试 */ /* 删除无序向量中重复元素(高效版) */ template <typename T> int Vector<T>::deduplicate() { int oldSize = _size; /* 记录原规模 */ Rank i = 1; /* 从_elem[1]开始 */ while (i < _size) /* 自前向后逐一考查各元素_elem[i] */ (find(_elem[i], 0, i) < 0) ? /* 在其前缀中寻找与之雷同者(至多一个) */ i++ : remove(i); /* 若无雷同则继续考查其后继,否则删除雷同者 */ return oldSize - _size; /* 向量规模变化量,即被删除元素总数 */ } /* 有序向量重复元素剔除算法(高效版) */ template <typename T> int Vector<T>::uniquify() { Rank i = 0, j = 0; /* 各对互异“相邻”元素的秩 */ while (++j < _size) /* 逐一扫描,直至末元素 */ if (_elem[i] != _elem[j]) /* 跳过雷同者 */ _elem[++i] = _elem[j]; /* 发现不同元素时,向前移至紧邻于前者右侧 */ _size = ++i; /* 直接截除尾部多余元素 */ shrink(); return j - i; /* 向量规模变化量,即被删除元素总数 */ } /* 遍历向量 */ template <typename T> void Vector<T>::traverse(void(*visit) (T&)) /* 借助函数指针机制 */ { for (int i = 0; i < _size; i++) visit(_elem[i]); } /* 遍历向量 */ template <typename T> template <typename VST> /* 元素类型、操作器 */ void Vector<T>::traverse(VST& visit) /* 借助函数对象机制 */ { for (int i = 0; i < _size; i++) visit(_elem[i]); } #endif<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #pragma warning(disable : 4996 4800) #include "../_share/util.h" #include "bitmap.h" //#include "bitmap_O1_init.h" /* 以上两个头文件只需包含任意其中一个即可; bitmap.h: 空间复杂度最低 bitmap_O1_init.h: 以空间作为补偿,节省初始化时间 */ /****************************************************************************************** * 测试位图 ******************************************************************************************/ int testBitmap ( int n ) { bool* B = new bool[n]; memset ( B, 0, n * sizeof ( bool ) ); //常规位图,创建后随即O(n)时间初始化 Bitmap M ( n ); for ( int i = 0; i < 9 * n; i++ ) { Rank k = dice ( n ); if ( dice ( 2 ) ) { printf ( "set(%d) ...", k ); //set(k) B[k] = true; M.set ( k ); } else { printf ( "clear(%d) ...", k ); //clear(k) B[k] = false; M.clear ( k ); } printf ( "done\n CRC: " ); for ( int j = 0; j < n; j++ ) printf ( "%6c", B[j] == M.test ( j ) ? ' ' : '!' ); printf ( "\n B[]: " ); for ( int j = 0; j < n; j++ ) printf ( "%6c", B[j] ? 'x' : '.' ); printf ( "\n M[]: " ); for ( int j = 0; j < n; j++ ) printf ( "%6c", M.test ( j ) ? 'x' : '.' ); printf ( "\n\n\n" ); } delete [] B; return 0; } /****************************************************************************************** * 测试位图 ******************************************************************************************/ int main ( int argc, char* argv[] ) { int iNumber = 10; srand ( ( unsigned int ) time ( NULL ) ); //设置随机种子 return testBitmap (iNumber); //启动测试 }<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #pragma warning(disable : 4996 4800) #include "../_share/util.h" #include "bitmap_O1_init.h" /****************************************************************************************** * 测试位图 ******************************************************************************************/ int testBitmap ( int n ) { bool* B = new bool[n]; memset ( B, 0, n * sizeof ( bool ) ); //常规位图,创建后随即O(n)时间初始化 Bitmap M ( n ); for ( int i = 0; i < 9 * n; i++ ) { Rank k = dice ( n ); printf ( "set(%d) ...", k ); //set(k) B[k] = true; M.set ( k ); printf ( "done\n CRC: " ); for ( int j = 0; j < n; j++ ) printf ( "%6c", B[j] == M.test ( j ) ? ' ' : '!' ); printf ( "\n B[]: " ); for ( int j = 0; j < n; j++ ) printf ( "%6c", B[j] ? 'x' : '.' ); printf ( "\n M[]: " ); for ( int j = 0; j < n; j++ ) printf ( "%6c", M.test ( j ) ? 'x' : '.' ); printf ( "\n\n\n" ); } delete [] B; return 0; } /****************************************************************************************** * 测试位图 ******************************************************************************************/ int main ( int argc, char* argv[] ) { int iNumber = 10; srand((unsigned int)time(NULL)); //设置随机种子 return testBitmap (iNumber); //启动测试 }<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ /*DSA*/#include "../bitmap/bitmap.h" int primeNLT ( int c, int n, char* file ) { //根据file文件中的记录,在[c, n)内取最小的素数 Bitmap B ( file, n ); //file已经按位图格式,记录了n以内的所有素数,因此只要 while ( c < n ) //从c开始,逐位地 if ( B.test ( c ) ) c++; //测试,即可 else return c; //返回首个发现的素数 return c; //若没有这样的素数,返回n(实用中不能如此简化处理) } <file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ /****************************************************************************************** * Test of Graph ******************************************************************************************/ #include<iostream> using namespace std; #include "../_share/util.h" #include "../UniPrint/print.h" #include "../GraphMatrix/GraphMatrix.h" /****************************************************************************************** * 生成由v个顶点、e条边构成的随机图 ******************************************************************************************/ template <typename Tv, typename Te> //顶点类型、边类型 void randomGraph(GraphMatrix<Tv, Te> & g, int n, int e) { //assert: 0 < e(e-1) <= v while ((g.n < n) || (g.e < e)) { //随机测试 if (g.n < n) { //顶点 if (dice(100) < 65) { //65%概率插入顶点 Tv vertex = (Tv)('A' + dice(26)); /*DSA*/printf("Inserting vertex"); print(vertex); printf(" ..."); g.insert(vertex); /*DSA*/printf("done\n"); } else { //35%概率删除顶点 if (1 > g.n) continue; int i = dice(g.n); /*DSA*/printf("Removing vertex %d ...", i); Tv v = g.remove(i); /*DSA*/printf("done with"); print(v); printf("\n"); } /*DSA*/print(g); } if ((1 < g.n) && (g.e < e)) { //边 if (dice(100) < 65) { //65%概率插入边 int i = dice(g.n), j = dice(g.n); Te e = dice((Te)3 * n); /*DSA*/printf("Inserting edge (%d, %d) = ", i, j); print(e); printf(" ..."); if (g.exists(i, j)) { /*DSA*/printf("already exists\n"); } else { g.insert(e, e, i, j); /*DSA*/printf("done\n"); } } else { //35%概率删除边 int i = dice(g.n), j = dice(g.n); /*DSA*/printf("Removing edge (%d, %d) ...", i, j); if (g.exists(i, j)) { Te e = g.remove(i, j); /*DSA*/printf("done with"); print(e); printf("\n"); } else { /*DSA*/printf("not exists\n"); } } /*DSA*/print(g); } } for (int i = 0; i < n; i++) g.vertex(i) = 'A' + i; /*DSA*/print(g); } /****************************************************************************************** * 从命令行(文件重定向)中导入图 ******************************************************************************************/ void importGraph(GraphMatrix<char, int> & g) { int n; scanf("%d\n", &n); /*DSA*/printf("%d vertices\n", n); for (int i = 0; i < n; i++) { //插入v个顶点 char vertex; scanf("%c", &vertex); /*DSA*/printf("Inserting vertex"); print(vertex); printf(" ..."); g.insert(vertex); /*DSA*/printf("done\n"); print(g); } for (int i = 0; i < n; i++) for (int j = 0; j < n; j++) { //插入边 int edge; scanf("%d", &edge); if (0 > edge) continue; /*DSA*/printf("Inserting edge (%d, %d) = ", i, j); print(edge); printf(" ..."); g.insert(edge, edge, i, j); /*DSA*/printf("done\n"); } } /****************************************************************************************** * 图结构的统一测试 ******************************************************************************************/ int main(int argc, char* argv[]) { GraphMatrix<char, int> g; int VertexNum = 5; int EdgeNum = 10; randomGraph<char, int>(g, VertexNum, EdgeNum); //顶点以字符编号,边为整数权重 /*DSA*/printf("\n"); print(g); /*DSA*/printf("=== BFS\n"); g.bfs(0); /*DSA*/print(g); /*DSA*/printf("=== BFS (PFS)\n"); g.pfs(0, BfsPU<char, int>()); /*DSA*/print(g); /*DSA*/printf("=== DFS\n"); g.dfs(0); /*DSA*/print(g); /*DSA*/printf("=== DFS (PFS)\n"); g.pfs(0, DfsPU<char, int>()); /*DSA*/print(g); /*DSA*/printf("=== TopoSort\n"); Stack<char>* ts = g.tSort(0); /*DSA*/ print(ts); print(g); release(ts); /*DSA*/printf("=== BCC\n"); g.bcc(0); /*DSA*/print(g); /*DSA*/printf("=== Prim\n"); g.prim(0); /*DSA*/print(g); /*DSA*/printf("=== Prim (PFS)\n"); g.pfs(0, PrimPU<char, int>()); /*DSA*/print(g); /*DSA*/printf("=== Dijkstra\n"); g.dijkstra(0); /*DSA*/print(g); /*DSA*/printf("=== Dijkstra (PFS)\n"); g.pfs(0, DijkstraPU<char, int>()); /*DSA*/print(g); return 0; }<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef GRAPH_H__ #define GRAPH_H__ typedef enum { UNDISCOVERED, DISCOVERED, VISITED } VStatus; /* 顶点状态 */ typedef enum { UNDETERMINED, TREE, CROSS, FORWARD, BACKWARD } EType; /* 边在遍历树中所属的类型 */ /* 图Graph模板类 */ template <typename Tv, typename Te> /* 顶点类型、边类型 */ class Graph { private: void reset() { /* 所有顶点、边的辅助信息复位 */ for (int i = 0; i < n; i++) { /* 所有顶点的 */ status(i) = UNDISCOVERED; /* 状态,时间标签 */ dTime(i) = fTime(i) = -1; parent(i) = -1; /*(在遍历树中的)父节点,优先级数 */ priority(i) = INT_MAX; for (int j = 0; j < n; j++) /* 所有边的 */ if (exists(i, j)) type(i, j) = UNDETERMINED; /* 类型 */ } } void BFS(int, int&); /*(连通域)广度优先搜索算法 */ void DFS(int, int&); /*(连通域)深度优先搜索算法 */ void BCC(int, int&, Stack<int>&); /*(连通域)基于DFS的双连通分量分解算法 */ bool TSort(int, int&, Stack<Tv>*); /*(连通域)基于DFS的拓扑排序算法 */ template <typename PU> void PFS(int, PU); /*(连通域)优先级搜索框架 */ public: /* 顶点 */ int n; /* 顶点总数 */ virtual int insert(Tv const&) = 0; /* 插入顶点,返回编号 */ virtual Tv remove(int) = 0; /* 删除顶点及其关联边,返回该顶点信息 */ virtual Tv& vertex(int) = 0; /* 顶点v的数据(该顶点的确存在) */ virtual int inDegree(int) = 0; /* 顶点v的入度(该顶点的确存在) */ virtual int outDegree(int) = 0; /* 顶点v的出度(该顶点的确存在) */ virtual int firstNbr(int) = 0; /* 顶点v的首个邻接顶点 */ virtual int nextNbr(int, int) = 0; /* 顶点v的(相对于顶点j的)下一邻接顶点 */ virtual VStatus& status(int) = 0; /* 顶点v的状态 */ virtual int& dTime(int) = 0; /* 顶点v的时间标签dTime */ virtual int& fTime(int) = 0; /* 顶点v的时间标签fTime */ virtual int& parent(int) = 0; /* 顶点v在遍历树中的父亲 */ virtual int& priority(int) = 0; /* 顶点v在遍历树中的优先级数 */ /* 边:这里约定,无向边均统一转化为方向互逆的一对有向边,从而将无向图视作有向图的特例 */ int e; /* 边总数 */ virtual bool exists(int, int) = 0; /* 边(v, u)是否存在 */ virtual void insert(Te const&, int, int, int) = 0; /* 在顶点v和u之间插入权重为w的边e */ virtual Te remove(int, int) = 0; /* 删除顶点v和u之间的边e,返回该边信息 */ virtual EType & type(int, int) = 0; /* 边(v, u)的类型 */ virtual Te& edge(int, int) = 0; /* 边(v, u)的数据(该边的确存在) */ virtual int& weight(int, int) = 0; /* 边(v, u)的权重 */ /* 算法 */ void bfs(int); /* 广度优先搜索算法 */ void dfs(int); /* 深度优先搜索算法 */ void bcc(int); /* 基于DFS的双连通分量分解算法 */ Stack<Tv>* tSort(int); /* 基于DFS的拓扑排序算法 */ void prim(int); /* 最小支撑树Prim算法 */ void dijkstra(int); /* 最短路径Dijkstra算法 */ template <typename PU> void pfs(int, PU); /* 优先级搜索框架 */ }; #include "../_share/release.h" #include "../_share/util.h" /* 广度优先搜索BFS算法(全图) */ template <typename Tv, typename Te> void Graph<Tv, Te>::bfs(int s) { /* assert: 0 <= s < n */ reset(); /* 初始化 */ int clock = 0; int v = s; do /* 逐一检查所有顶点 */ if (UNDISCOVERED == status(v)) /* 一旦遇到尚未发现的顶点 */ BFS(v, clock); /* 即从该顶点出发启动一次BFS */ while (s != (v = (++v % n))); /* 按序号检查,故不漏不重 */ } /* 广度优先搜索BFS算法(单个连通域) */ template <typename Tv, typename Te> void Graph<Tv, Te>::BFS(int v, int& clock) { /* assert: 0 <= v < n */ Queue<int> Q; /* 引入辅助队列 */ status(v) = DISCOVERED; /* 初始化起点 */ Q.enqueue(v); while (!Q.empty()) { /* 在Q变空之前,不断 */ int v = Q.dequeue(); /* 取出队首顶点v */ dTime(v) = ++clock; for (int u = firstNbr(v); -1 < u; u = nextNbr(v, u)) /* 枚举v的所有邻居u */ if (UNDISCOVERED == status(u)) { /* 若u尚未被发现,则 */ status(u) = DISCOVERED; /* 发现该顶点 */ Q.enqueue(u); type(v, u) = TREE; /* 引入树边拓展支撑树 */ parent(u) = v; } else { /* 若u已被发现,或者甚至已访问完毕,则 */ type(v, u) = CROSS; /* 将(v, u)归类于跨边 */ } status(v) = VISITED; /* 至此,当前顶点访问完毕 */ } } /* 深度优先搜索DFS算法(全图) */ template <typename Tv, typename Te> void Graph<Tv, Te>::dfs(int s) { /* assert: 0 <= s < n */ reset(); /* 初始化 */ int clock = 0; int v = s; do /* 逐一检查所有顶点 */ if (UNDISCOVERED == status(v)) /* 一旦遇到尚未发现的顶点 */ DFS(v, clock); /* 即从该顶点出发启动一次DFS */ while (s != (v = (++v % n))); /* 按序号检查,故不漏不重 */ } /* 深度优先搜索DFS算法(单个连通域) */ template <typename Tv, typename Te> void Graph<Tv, Te>::DFS(int v, int& clock) { /* assert: 0 <= v < n */ dTime(v) = ++clock; /* 发现当前顶点v */ status(v) = DISCOVERED; for (int u = firstNbr(v); -1 < u; u = nextNbr(v, u)) /* 枚举v的所有邻居u */ switch (status(u)) { /* 并视其状态分别处理 */ case UNDISCOVERED: /* u尚未发现,意味着支撑树可在此拓展 */ type(v, u) = TREE; parent(u) = v; DFS(u, clock); break; case DISCOVERED: /* u已被发现但尚未访问完毕,应属被后代指向的祖先 */ type(v, u) = BACKWARD; break; default: /* u已访问完毕(VISITED,有向图),则视承袭关系分为前向边或跨边 */ type(v, u) = (dTime(v) < dTime(u)) ? FORWARD : CROSS; break; } status(v) = VISITED; fTime(v) = ++clock; /* 至此,当前顶点v方告访问完毕 */ } /* 基于DFS的BCC分解算法 */ template <typename Tv, typename Te> void Graph<Tv, Te>::bcc(int s) { reset(); int clock = 0; int v = s; Stack<int> S; /* 栈S用以记录已访问的顶点 */ do if (UNDISCOVERED == status(v)) { /* 一旦发现未发现的顶点(新连通分量)*/ BCC(v, clock, S); /* 即从该顶点出发启动一次BCC */ S.pop(); /* 遍历返回后,弹出栈中最后一个顶点——当前连通域的起点 */ } while (s != (v = (++v % n))); } #define hca(x) (fTime(x)) /* 利用此处闲置的fTime[]充当hca[] */ template <typename Tv, typename Te> /* 顶点类型、边类型 */ void Graph<Tv, Te>::BCC(int v, int& clock, Stack<int>& S) { /* assert: 0 <= v < n */ hca(v) = dTime(v) = ++clock; status(v) = DISCOVERED; S.push(v); /* v被发现并入栈 */ for (int u = firstNbr(v); -1 < u; u = nextNbr(v, u)) /* 枚举v的所有邻居u */ switch (status(u)) { /* 并视u的状态分别处理 */ case UNDISCOVERED: parent(u) = v; type(v, u) = TREE; BCC(u, clock, S); /* 从顶点u处深入 */ if (hca(u) < dTime(v)) /* 遍历返回后,若发现u(通过后向边)可指向v的真祖先 */ hca(v) = min(hca(v), hca(u)); /* 则v亦必如此 */ else { /* 否则,以v为关节点(u以下即是一个BCC,且其中顶点此时正集中于栈S的顶部) */ /*DSA*/printf("BCC rooted at %c:", vertex(v)); /*DSA*/Stack<int> temp; do { temp.push(S.pop()); print(vertex(temp.top())); } while (v != temp.top()); while (!temp.empty()) S.push(temp.pop()); while (v != S.pop()); /* 依次弹出当前BCC中的节点,亦可根据实际需求转存至其它结构 */ S.push(v); /* 最后一个顶点(关节点)重新入栈——分摊不足一次 */ /*DSA*/printf("\n"); } break; case DISCOVERED: type(v, u) = BACKWARD; /* 标记(v, u),并按照“越小越高”的准则 */ if (u != parent(v)) hca(v) = min(hca(v), dTime(u)); //更新hca[v] */ break; default: /* VISITED (digraphs only) */ type(v, u) = (dTime(v) < dTime(u)) ? FORWARD : CROSS; break; } status(v) = VISITED; /* 对v的访问结束 */ } #undef hca /* 基于DFS的拓扑排序算法 */ template <typename Tv, typename Te> Stack<Tv>* Graph<Tv, Te>::tSort(int s) { /* assert: 0 <= s < n */ reset(); int clock = 0; int v = s; Stack<Tv>* S = new Stack<Tv>; /* 用栈记录排序顶点 */ do { if (UNDISCOVERED == status(v)) if (!TSort(v, clock, S)) { /* clock并非必需 */ /*DSA*/print(S); while (!S->empty()) /* 任一连通域(亦即整图)非DAG */ S->pop(); break; /* 则不必继续计算,故直接返回 */ } } while (s != (v = (++v % n))); return S; /* 若输入为DAG,则S内各顶点自顶向底排序;否则(不存在拓扑排序),S空 */ } /* 基于DFS的拓扑排序算法(单趟) */ template <typename Tv, typename Te> bool Graph<Tv, Te>::TSort(int v, int& clock, Stack<Tv>* S) { /* assert: 0 <= v < n */ dTime(v) = ++clock; status(v) = DISCOVERED; /* 发现顶点v */ for (int u = firstNbr(v); -1 < u; u = nextNbr(v, u)) /* 枚举v的所有邻居u */ switch (status(u)) { /* 并视u的状态分别处理 */ case UNDISCOVERED: parent(u) = v; type(v, u) = TREE; if (!TSort(u, clock, S)) /* 从顶点u处出发深入搜索 */ return false; /* 若u及其后代不能拓扑排序(则全图亦必如此),故返回并报告 */ break; case DISCOVERED: type(v, u) = BACKWARD; /* 一旦发现后向边(非DAG),则 */ return false; /* 不必深入,故返回并报告 */ default: /* VISITED (digraphs only) */ type(v, u) = (dTime(v) < dTime(u)) ? FORWARD : CROSS; break; } status(v) = VISITED; S->push(vertex(v)); /* 顶点被标记为VISITED时,随即入栈 */ return true; /* v及其后代可以拓扑排序 */ } /* Prim算法:无向连通图,各边表示为方向互逆、权重相等的一对边 */ template <typename Tv, typename Te> void Graph<Tv, Te>::prim(int s) { /* assert: 0 <= s < n */ reset(); priority(s) = 0; for (int i = 0; i < n; i++) { /* 共需引入n个顶点和n-1条边 */ status(s) = VISITED; if (-1 != parent(s)) type(parent(s), s) = TREE; /* 引入当前的s */ for (int j = firstNbr(s); -1 < j; j = nextNbr(s, j)) /* 枚举s的所有邻居j */ if ((status(j) == UNDISCOVERED) && (priority(j) > weight(s, j))) /* 对邻接顶点j做松弛 */ { priority(j) = weight(s, j); parent(j) = s; } /* 与Dijkstra算法唯一的不同之处 */ for (int shortest = INT_MAX, j = 0; j < n; j++) /* 选出下一极短跨边 */ if ((status(j) == UNDISCOVERED) && (shortest > priority(j))) { shortest = priority(j); s = j; } } } /* 最短路径Dijkstra算法:适用于一般的有向图 */ template <typename Tv, typename Te> void Graph<Tv, Te>::dijkstra(int s) { /* assert: 0 <= s < n */ reset(); priority(s) = 0; for (int i = 0; i < n; i++) { /* 共需引入n个顶点和n-1条边 */ status(s) = VISITED; if (-1 != parent(s)) type(parent(s), s) = TREE; /* 引入当前的s */ for (int j = firstNbr(s); -1 < j; j = nextNbr(s, j)) /* 枚举s的所有邻居j */ if ((status(j) == UNDISCOVERED) && (priority(j) > priority(s) + weight(s, j))) /* 对邻接顶点j做松弛 */ { priority(j) = priority(s) + weight(s, j); parent(j) = s; } /* 与Prim算法唯一的不同之处 */ for (int shortest = INT_MAX, j = 0; j < n; j++) /* 选出下一最近顶点 */ if ((status(j) == UNDISCOVERED) && (shortest > priority(j))) { shortest = priority(j); s = j; } } } /* 对于无向连通图,假设每一条边表示为方向互逆、权重相等的一对边 */ /* 优先级搜索(全图) */ template <typename Tv, typename Te> template <typename PU> void Graph<Tv, Te>::pfs(int s, PU prioUpdater) { /* assert: 0 <= s < n */ reset(); int v = s; /* 初始化 */ do /* 逐一检查所有顶点 */ if (UNDISCOVERED == status(v)) /* 一旦遇到尚未发现的顶点 */ PFS(v, prioUpdater); /* 即从该顶点出发启动一次PFS */ while (s != (v = (++v % n))); /* 按序号检查,故不漏不重 */ } /* 顶点类型、边类型、优先级更新器 */ template <typename Tv, typename Te> template <typename PU> void Graph<Tv, Te>::PFS(int s, PU prioUpdater) { /* 优先级搜索(单个连通域) */ priority(s) = 0; status(s) = VISITED; parent(s) = -1; /* 初始化,起点s加至PFS树中 */ while (1) { /* 将下一顶点和边加至PFS树中 */ for (int w = firstNbr(s); -1 < w; w = nextNbr(s, w)) /* 枚举s的所有邻居w */ prioUpdater(this, s, w); /* 更新顶点w的优先级及其父顶点 */ for (int shortest = INT_MAX, w = 0; w < n; w++) if (UNDISCOVERED == status(w)) /* 从尚未加入遍历树的顶点中 */ if (shortest > priority(w)) /* 选出下一个 */ { shortest = priority(w); s = w; } /* 优先级最高的顶点s */ if (VISITED == status(s)) break; /* 直至所有顶点均已加入 */ status(s) = VISITED; /* 将s及与其父的联边加入遍历树 */ type(parent(s), s) = TREE; } }/* 通过定义具体的优先级更新策略prioUpdater,即可实现不同的算法功能 */ template <typename Tv, typename Te> struct BfsPU { //针对BFS算法的顶点优先级更新器 virtual void operator() (Graph<Tv, Te>* g, int uk, int v) { if (g->status(v) == UNDISCOVERED) //对于uk每一尚未被发现的邻接顶点v if (g->priority(v) > g->priority(uk) + 1) { //将其到起点的距离作为优先级数 g->priority(v) = g->priority(uk) + 1; //更新优先级(数) g->parent(v) = uk; //更新父节点 } //如此效果等同于,先被发现者优先 } }; template <typename Tv, typename Te> struct DfsPU { //针对DFS算法的顶点优先级更新器 virtual void operator() (Graph<Tv, Te>* g, int uk, int v) { if (g->status(v) == UNDISCOVERED) //对于uk每一尚未被发现的邻接顶点v if (g->priority(v) > g->priority(uk) - 1) { //将其到起点距离的负数作为优先级数 g->priority(v) = g->priority(uk) - 1; //更新优先级(数) g->parent(v) = uk; //更新父节点 return; //注意:与BfsPU()不同,这里只要有一个邻接顶点可更新,即可立即返回 } //如此效果等同于,后被发现者优先 } }; template <typename Tv, typename Te> struct PrimPU { //针对Prim算法的顶点优先级更新器 virtual void operator() (Graph<Tv, Te>* g, int uk, int v) { if (UNDISCOVERED == g->status(v)) //对于uk每一尚未被发现的邻接顶点v if (g->priority(v) > g->weight(uk, v)) { //按Prim策略做松弛 g->priority(v) = g->weight(uk, v); //更新优先级(数) g->parent(v) = uk; //更新父节点 } } }; template <typename Tv, typename Te> struct DijkstraPU { //针对Dijkstra算法的顶点优先级更新器 virtual void operator() ( Graph<Tv, Te>* g, int uk, int v ) { if ( UNDISCOVERED == g->status ( v ) ) //对于uk每一尚未被发现的邻接顶点v,按Dijkstra策略 if ( g->priority ( v ) > g->priority ( uk ) + g->weight ( uk, v ) ) { //做松弛 g->priority ( v ) = g->priority ( uk ) + g->weight ( uk, v ); //更新优先级(数) g->parent ( v ) = uk; //并同时更新父节点 } } }; #endif<file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ /****************************************************************************************** * Test of PQ_ComplHeap & PQ_LeftHeap ******************************************************************************************/ //#define DSA_PQ_COMPLHEAP //#define DSA_PQ_LEFTHEAP #define DSA_PQ_LIST #include<iostream> using namespace std; #include "../_share/util.h" #include "../UniPrint/print.h" #if defined(DSA_PQ_LEFTHEAP) #include "../PQ_LeftHeap/PQ_LeftHeap.h" #elif defined(DSA_PQ_COMPLHEAP) #include "../PQ_ComplHeap/PQ_ComplHeap.h" #elif defined(DSA_PQ_LIST) #include "../PQ_List/PQ_List.h" #endif #include <windows.h> /****************************************************************************************** * 针对基于列表、向量以及左式堆实现的优先级队列,做过程统一的测试 ******************************************************************************************/ template <typename PQ, typename T> //堆类型、词条类型 void testHeap ( int n ) { T* A = new T[2*n/3]; //创建容量为2*n/3的数组,并 for ( int i = 0; i < 2 * n / 3; i++ ) //在其中随机生成2*n/3个词条 A[i] = dice ( ( T ) 3 * n ); /*DSA*/printf ( "%d random keys created:\n", 2 * n / 3 ); /*DSA*/for ( int i = 0; i < 2 * n / 3; i++ ) print ( A[i] ); printf ( "\n" ); PQ heap ( A + n / 6, n / 3 ); //批量建堆(PQ_ComplHeap实现了Robert Floyd算法) delete [] A; /*DSA*/system("cls"); print ( heap ); Sleep(100); while ( heap.size() < n ) { //随机测试 if ( dice ( 100 ) < 70 ) { //70%概率插入新词条 T e = dice ( ( T ) 3 * n ); /*DSA*/printf ( "Inserting" ); print ( e ); printf ( " ...\n" ); heap.insert ( e ); /*DSA*/printf ( "Insertion done\n" ); } else { //30%概率摘除最大词条 if ( !heap.empty() ) { /*DSA*/printf ( "Deleting max ...\n" ); T e = heap.delMax(); /*DSA*/printf ( "Deletion done with" ); print ( e ); printf ( "\n" ); } } /*DSA*/system("cls"); print ( heap ); Sleep(100); } while ( !heap.empty() ) { //清空 T e = heap.delMax(); /*DSA*/printf ( "Deletion done with" ); print ( e ); printf ( "\n" ); /*DSA*/system("cls"); print ( heap ); Sleep(100); } } /****************************************************************************************** * 优先级队列测试 ******************************************************************************************/ int main ( int argc, char* argv[] ) { int n = 10; srand ( ( unsigned int ) time ( NULL ) ); #if defined(DSA_PQ_LEFTHEAP) testHeap<PQ_LeftHeap<int>, int> (n); //词条类型可以在这里任意选择 #elif defined(DSA_PQ_COMPLHEAP) testHeap<PQ_ComplHeap<int>, int> (n); //词条类型可以在这里任意选择 #elif defined(DSA_PQ_LIST) testHeap<PQ_List<int>, int> (n); //词条类型可以在这里任意选择 #else printf ( "PQ type not defined yet\n" ); #endif return 0; } <file_sep>/****************************************************************************************** * Data Structures and Algorithm in C++ . * <NAME>(<EMAIL>) is first author, <NAME> learn、add and modify it . * All rights reserved. ******************************************************************************************/ #ifndef PRINT_HASHTABLE_H__ #define PRINT_HASHTABLE_H__ /****************************************************************************************** * Hashtable ******************************************************************************************/ template <typename K, typename V> //e、value void UniPrint::p ( Hashtable<K, V>& ht ) { //引用 printf ( "%s[%d]*%d/%d:\n", typeid ( ht ).name(), &ht, ht.N, ht.M ); //基本信息 for ( int i = 0; i < ht.M; i++ ) //输出桶编号 printf ( " %4d ", i ); printf ( "\n" ); for ( int i = 0; i < ht.M; i++ ) //输出所有元素 if ( ht.ht[i] ) printf ( "-<%04d>-", ht.ht[i]->key ); //演示用,仅适用于int else if ( ht.lazyRemoval->test ( i ) ) printf ( "-<xxxx>-" ); else printf ( "--------" ); printf ( "\n" ); for ( int i = 0; i < ht.M; i++ ) //输出所有元素 if ( ht.ht[i] ) printf ( " %c ", ht.ht[i]->value ); //演示用,仅适用于char // if (ht.ht[i]) printf("%8s", ht.ht[i]->value); //针对Huffman编码中使用的散列表 else if ( ht.lazyRemoval->test ( i ) ) printf ( " <xxxx> " ); else printf ( " " ); printf ( "\n" ); } #endif // !PRINT_HASHTABLE_H__
bc787a7a76cdc7ad4f574fede976a5dd067f2302
[ "Markdown", "C", "C++" ]
66
C++
bulingma/DataStructAndAlgorithm
76da9f14dc2c5818590dd3ca007723967575a310
fca7693cf52c0be7393bd4200aeae9e9efb31ab1
refs/heads/master
<repo_name>sydorbogdan/lab_3<file_sep>/t2/t2.py import pprint import json import copy def help_doc(hlp: bool): ''' help function ''' if hlp: print('------------------------------------------') print("cd - enter in key \n.. - go back \n" "ls - show sons \np - show all sons and their values \n") print('!!!!!!') print("If >>>cd 'key' don't work try: \n>>> ls\n>>> cd (copy key from ls here)") print('!!!!!!') print() print("(keys in dict can be only string)") print('------------------------------------------') return None def open_json(path: str): """ open jsop file """ with open(path) as f: data = json.load(f) return data def ls_(data): ''' show all sons ''' if type(data) == list: for i in range(len(data)): print(str(i) + ' - ' + str(type(data[i]))) elif type(data) == dict: for i in data.keys(): print(str(i) + ' - ' + str(type(data[i]))) else: print('Empty') def cd_(data, key, pth): ''' go into son ''' new_path = copy.deepcopy(pth) if type(data) == list: try: new_path.append((key, 'list')) rez = (data[int(key)], new_path) return rez except: print('Incorrect index') elif type(data) == dict: try: new_path.append((key, 'dict')) rez = (data[key], new_path) return rez except: print('Incorrect key') else: return None def get_back(data, pth): ''' get back to father ''' new_path = [] rez_data = copy.deepcopy(data) check = 0 for i in range(len(pth) - 1): # print(pth) check += 1 if pth[i][1] == 'dict': new_path.append(pth[i]) rez_data = rez_data[pth[i][0]] elif pth[i][1] == 'list': new_path.append(pth[i]) rez_data = rez_data[int(pth[i][0])] if check == 0: return (rez_data, pth) return (rez_data, new_path) def cmd_live(data): ''' main function ''' live_data = copy.deepcopy(data) path = [] help_doc(True) while True: comnd = input('>>> ') if comnd == 'ls': ls_(live_data) elif comnd.split()[0] == 'cd': rez_cd = cd_(live_data, ' '.join(comnd.split()[1:]), path) if rez_cd == None: print('No way') else: live_data = rez_cd[0] path = rez_cd[1] elif comnd == '..': rez_ = get_back(data, path) live_data = rez_[0] path = rez_[1] elif comnd == 'p': pp = pprint.PrettyPrinter(indent=4, width=41, compact=True) pp.pprint(live_data) elif comnd == 'path': print(path) else: print('Input error') if __name__ == '__main__': data = open_json( '/home/bogdan/Documents/1yr_2sm_lab/lab_3/t2/json (2).json') cmd_live(data) <file_sep>/t3/t3.py from flask import Flask, render_template, url_for, request import tweepy import geocoder import folium from random import randint app = Flask(__name__) app.config['SECRET_KEY'] = '1234567890' post = [] def get_users(nic: str) -> dict: ''' return users data ''' rez = {} CONSUMER_KEY = 'IfFqvMUJgQAyBOhYM6YX2cADQ' CONSUMER_SECRET = '<KEY>' ACCESS_TOKEN = '<KEY>' ACCESS_TOKEN_SECRET = '<KEY>' auth = tweepy.auth.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET) api = tweepy.API(auth, wait_on_rate_limit=True) users = tweepy.Cursor(api.friends, screen_name=nic).items() for user in users: if user.location != '': rez[user.screen_name] = user.location return rez def get_cord_(dct: dict) -> dict: ''' return cord of users ''' rez = {} for i in dct: try: rez[i] = point = geocoder.osm(dct[i]).osm except: continue return rez def get_map(dct: dict): ''' create map(HTML file) ''' map_ = folium.Map() for mrk in dct: try: folium.Marker((dct[mrk]['y'] + randint(0,100)*0.0001, dct[mrk]['x'] + randint(0,100)*0.0001),popup='<i>' + str(mrk)+'</i>').add_to(map_) except: continue return map_ @app.route("/") def home(): ''' run app ''' return render_template('home.html', post = post) @app.route('/', methods=['POST']) def my_form_post(): ''' return html map ''' try: text = request.form['text'] dt = get_cord_(get_users(text)) mp = get_map(dt) return mp._repr_html_() except: return 'Input error' if __name__ == '__main__': app.run(debug=True)
093ef9d7ef0f60a477a641e30b7a51331f3cc80c
[ "Python" ]
2
Python
sydorbogdan/lab_3
00ae05c2b7a9db795c87362d4f657a7a9000621a
99298c54c68d8a092cd0c18d460d6218110493f4
refs/heads/master
<file_sep>$(document).ready(function () { var dataBaseURL = 'https://github.com/wotv-resource/wotv-resource.github.io/tree/master/data'; //console.log(dataBaseURL); $.ajax({ url: dataBaseURL + '/Units.json', method: 'GET', success: function(data) { console.log(data); }, error: function(error) { console.log('error with api: ' + error); } }); /* readJSON('data/Units.json') function readJSON(path) { var xhr = new XMLHttpRequest(); xhr.open('GET', path, true); xhr.responseType = 'blob'; xhr.onload = function(e) { if (this.status == 200) { var file = new File([this.response], 'temp'); var fileReader = new FileReader(); fileReader.addEventListener('load', function(){ //do stuff with fileReader.result console.log(fileReader.result); }); fileReader.readAsText(file); } } xhr.send(); } */ /* fetch(url, {mode: 'no-cors'}) // Call the fetch function passing the url of the API as a parameter .then(function(data) { console.log(data.response); }) .catch(function() { // This is where you run code if the server returns any errors }); */ })
6dad38ce4e239e98a0ec4c424a04d0446fa2aff3
[ "JavaScript" ]
1
JavaScript
wotv-resource/wotv-resource.github.io
4c104d96cca6bdd9b6168b6591dc25ecf21a85e1
727e4f4511d24830384ea00b561d6d929bcaceb8
refs/heads/master
<repo_name>muhrrynn/FeatherWeightPress<file_sep>/splinks.php <?php /* Created on: 11/02/2008 */ ?> <!-- For Future Reference... When changing your links in this menu, there are two other things you should do: 1) You MUST change the image paths in the splink.css file, or you'll be stuck in 2008 2) You will want to add the oldest link to the arch.php file I've left you notes like this in the other files as well. In this menu, all you have to do is change the portion after href to point the link at the appropriate page. General Label Reference new = the current calendar year ym1 = current year minus 1 ym2 = current year minus 2 --> <ul id="splinks"> <li><a href="resume.php" class="res"></a></li> <li><a href="biog.php" class="bio"></a></li> <li><a href="exhi.php" class="exh"></a></li> <li><a href="2009.php" class="new"></a></li> <li><a href="2008.php" class="ym1"></a></li> <li><a href="2007.php" class="ym2"></a></li> <li><a href="arch.php" class="arc"></a></li> <li><a href="cont.php" class="con"></a></li> <li><a href="index.php" class="hom"></a></li> </ul> <file_sep>/2007.php <?php /* Created on: 11/01/2008 */ ?> <?php include("topnot.php"); ?> <div id="content"> <h1>2007 Work</h1> <div id="gallery"> <a href="images/2007/mis1.jpg" rel="lightbox[2007]" title="<b><i>Mistake Series (Blue Stripe)</i></b><br/>gouache<br/>9 x 19 in"><img src="images/2007/tmis1.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp; <a href="images/2007/mis4.jpg" rel="lightbox[2007]" title="<b><i>Facade</i></b><br/>ink on panel<br/>each 10 x 12 in"><img src="images/2007/tmis4.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp; <a href="images/2007/mis5.jpg" rel="lightbox[2007]" title="<b><i>Mistake Series (Johns)</i></b><br/>ink on paper<br/>15 x 18 in"><img src="images/2007/tmis5.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp;<br /><br /> <a href="images/2007/ss01.jpg" rel="lightbox[2007]" title="<b><i>Mistake Series (Black Target)</i></b><br/>ink on paper<br/>9 x 19 in"><img src="images/2007/tss01.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp; <a href="images/2007/mscheck.jpg" rel="lightbox[2007]" title="<b><i>Mistake Series (Check)</i></b><br/>ink on paper<br/>15 x 20 in"><img src="images/2007/tmscheck.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp; <a href="images/2007/curcal.jpg" rel="lightbox[2007]" title="<b><i>Untitled</i></b><br/>ink on paper<br/>each 9 x 19 in"><img src="images/2007/tcurcal.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp; <br /><br /> </div> </div> <?php include("stubby.php"); ?> <file_sep>/arch.php <?php /* Created on: 11/01/2008 */ ?> <?php include("topnot.php"); ?> <!-- Easy Peasy this part. Just copy one of the existing links and adjust the urls to point to the proper place. I presume you'll eventually want to do sets of three images per row, like the other pages, so the additional code you'll need is as follows: Between the first and second link, add: &nbsp;&nbsp;&nbsp; Between the second and third link, add: &nbsp;&nbsp;&nbsp; Follow the third link with: <br /><br /> It'll probably be eaisest to always add the newest link to the top of the code and adjust the spacing with the above as you add more. Then your top row will always be full. Well... starting in 2011, it will be. Happy coding! --> <div id="content"> <h1>Archive of Works</h1> <a href="cran.php"><img src="images/cranbrook/tshrine.jpg" alt="" /><br /> Cranbrook Installations</a> <br /><br /> </div> <?php include("stubby.php"); ?> <file_sep>/cran.php <?php /* Created on: 11/01/2008 */ ?> <?php include("topnot.php"); ?> <div id="content"> <h1>Cranbrook Installations</h1> <a href="images/cranbrook/shrine.jpg" rel="lightbox[cranbrook]" title="<b><i>Shrine</i></b><br/>Hand bound book with illustrations on hand decorated pedestal<br/>book 4 x 6 in"><img src="images/cranbrook/tshrine.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp; <a href="images/cranbrook/4.jpg" rel="lightbox[cranbrook]" title="<b><i>Watching Over Me</i></b><br/>screen print<br/>each 11 x 15 in"><img src="images/cranbrook/t4.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp; <a href="images/cranbrook/4detail.jpg" rel="lightbox[cranbrook]" title="<b><i>detail- Watching Over Me</i></b><br/>screen print<br/>each 11 x 15 in"><img src="images/cranbrook/t4detail.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp;<br /><br /> <a href="images/cranbrook/pro1.jpg" rel="lightbox[cranbrook]" title="<b><i>Procession / Recession</i></b><br/>screen print<br/>each 53 x 55 in"><img src="images/cranbrook/tpro1.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp; <a href="images/cranbrook/pro2.jpg" rel="lightbox[cranbrook]" title="<b><i>Procession / Recession</i></b><br/>screen print<br/>each 53 x 55 in"><img src="images/cranbrook/tpro2.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp; <a href="images/cranbrook/pro3.jpg" rel="lightbox[cranbrook]" title="<b><i>Procession / Recession</i></b><br/>screen print<br/>each 53 x 55 in"><img src="images/cranbrook/tpro3.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp;<br /><br /> </div> <?php include("stubby.php"); ?> <file_sep>/exhi.php <?php /* Created on: 11/01/2008 */ ?> <?php include("topnot.php"); ?> <div id="content"> <h1>Exhibitions</h1> <h3 style="color: #639ec8;">Solo</h3> <b>2009</b><ul> <li><b>Order Up; blah blah blah blah blah</b>, blah blah blah blah blah, IL</li></ul> <b>2008</b><ul> <li><b>New Pop</b>, blah blah blah blah blah, blah blah blah blah blah, NY</li></ul> <h3 style="color: #639ec8;">Group</h3> <b>2009</b><ul> <li><b>Untitled, blah blah blah blah blah</b>, blah blah blah blah blah, Chicago, IL</li> <li><b>Spring Collection</b>, blah blah blah blah blah, blah blah blah blah blah, IN</li> <li><b>Petra, Sasha and Stephanie</b>, blah blah blah blah blah, blah blah blah blah blah, IN</li></ul> <b>2008</b><ul> <li><b>Winter Collection</b>, blah blah blah blah blah, blah blah blah blah blah, IN</li> </blockquote> </div> <?php include("stubby.php"); ?> <file_sep>/biog.php <?php /* Created on: 11/01/2008 */ ?> <?php include("topnot.php"); ?> <div id="content"> <h1>Biography</h1> <p>Stephanie attended the blah blah blah blah blah (20xx-20xx) obtaining a BFA. While in undergraduate she had the opportunity to intern at two different print shops; blah blah blah blah blah in blah, IL and blah blah blah blah blah in blah, NJ. </p> <p>In 20xx she started graduate school at Cranbrook Academy of Art completing the two year program and earning her MFA in blah blah blah blah blah in 20xx. Upon graduation she moved to blah blah blah blah blah, IN to be with her husband.</p> <p>Stephanie is now the blah blah blah blah blah blah blah blah blah blah. She keeps regular hours in her home studio and considers herself primarily an Artist / Printmaker secondarily a blah blah blah blah blah.</p> </div> <?php include("stubby.php"); ?> <file_sep>/2009.php <?php /* Created on: 11/01/2008 */ ?> <?php include("topnot.php"); ?> <div id="content"> <h1>2009 Work</h1> <a href="images/2008/break.jpg" rel="lightbox[2008]" title="<b><i>On Break</i></b><br/>ink and watercolor on paper<br/>20 x 40 in"><img src="images/2008/tbreak.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp; <a href="images/2008/quickfix.jpg" rel="lightbox[2008]" title="<b><i>Quick Fix</i></b><br/>ink and gouache on paper<br/>each 22 x 30 in"><img src="images/2008/tquickfix.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp; <a href="images/2008/assembly.jpg" rel="lightbox[2008]" title="<b><i>Some Assembly Required</i></b><br/>ink and gouache on paper<br/>each 15 x 20 in"><img src="images/2008/tassembly.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp;<br /><br /> </div> <?php include("stubby.php"); ?> <file_sep>/topnot.php <?php /* Created on: 11/01/2008 */ ?> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en"> <head> <title>Feather Weight Press</title> <meta http-equiv="Content-Type" content="text/html;charset=utf-8" /> <link rel="stylesheet" type="text/css" href="flat.css" /> <link rel="stylesheet" type="text/css" href="splink.css" /> <link rel="stylesheet" type="text/css" href="lightbox/lightbox.css" media="screen" /> <!--[if IE]> <link rel="stylesheet" type="text/css" href="iesucks.css" /> <![endif]--> <script type="text/javascript" src="lightbox/prototype.js"></script> <script type="text/javascript" src="lightbox/scriptaculous.js?load=effects,builder"></script> <script type="text/javascript" src="lightbox/lightbox.js"></script> </head> <body> <div id="framit" class="centered"> <?php include("splinks.php"); ?> <file_sep>/README.md # FeatherWeightPress Php website circa 2008<br/> Bits are archived here to have my own scratch built site to look at and say "hey, I could code back then."<br/> Commented instructions for site owner left in situ for personal amuseument. <file_sep>/cont.php <?php /* Created on: 11/01/2008 */ ?> <?php include("topnot.php"); ?> <div id="content"> <h1>Contact</h1> For information regarding the purchase of works, please email <a href="mailto: &lt;script type='text/javascript'> &lt;!-- var prefix = 'ma' + 'il' + 'to'; var path = 'hr' + 'ef' + '='; var addy24417 = 'sa' + 'les' + '@'; addy24417 = addy24417 + '---' + '---' + '---' + '.' + 'com'; document.write( '&lt;a ' + path + '\'' + prefix + ':' + addy24417 + '\'>' ); document.write( addy24417 ); document.write( '&lt;\/a>' ); //-->\n &lt;/script>&lt;script type='text/javascript'> &lt;!-- document.write( '&lt;span style=\'display: none;\'>' ); //--> &lt;/script>This e-mail address is being protected from spam bots, JavaScript must be enabled to view it &lt;script type='text/javascript'> &lt;!-- document.write( '&lt;/' ); document.write( 'span>' ); //--> &lt;/script>" title="email to:"> <script type='text/javascript'> <!-- var prefix = 'ma' + 'il' + 'to'; var path = 'hr' + 'ef' + '='; var addy24417 = 'sa' + 'les' + '@'; addy24417 = addy24417 + '---' + '---' + '---' + '.' + 'com'; document.write( '<a ' + path + '\'' + prefix + ':' + addy24417 + '\'>' ); document.write( addy24417 ); document.write( '<\/a>' ); //-->\n </script><script type='text/javascript'> <!-- document.write( '<span style=\'display: none;\'>' ); //--> </script>This e-mail address is being protected from spam bots, JavaScript must be enabled to view it <script type='text/javascript'> <!-- document.write( '</' ); document.write( 'span>' ); //--> </script></a><br/><br/> To make comments or ask questions about Stephanie's work, please email <a href="mailto: &lt;script type='text/javascript'> &lt;!-- var prefix = 'ma' + 'il' + 'to'; var path = 'hr' + 'ef' + '='; var addy24417 = 'inq' + 'uire' + '@'; addy24417 = addy24417 + '---' + '---' + '---' + '.' + 'com'; document.write( '&lt;a ' + path + '\'' + prefix + ':' + addy24417 + '\'>' ); document.write( addy24417 ); document.write( '&lt;\/a>' ); //-->\n &lt;/script>&lt;script type='text/javascript'> &lt;!-- document.write( '&lt;span style=\'display: none;\'>' ); //--> &lt;/script>This e-mail address is being protected from spam bots, JavaScript must be enabled to view it &lt;script type='text/javascript'> &lt;!-- document.write( '&lt;/' ); document.write( 'span>' ); //--> &lt;/script>" title="email to:"> <script type='text/javascript'> <!-- var prefix = 'ma' + 'il' + 'to'; var path = 'hr' + 'ef' + '='; var addy24417 = 'inq' + 'uire' + '@'; addy24417 = addy24417 + '---' + '---' + '---' + '.' + 'com'; document.write( '<a ' + path + '\'' + prefix + ':' + addy24417 + '\'>' ); document.write( addy24417 ); document.write( '<\/a>' ); //-->\n </script><script type='text/javascript'> <!-- document.write( '<span style=\'display: none;\'>' ); //--> </script>This e-mail address is being protected from spam bots, JavaScript must be enabled to view it <script type='text/javascript'> <!-- document.write( '</' ); document.write( 'span>' ); //--> </script></a><br /><br /> To contact the site administrator, please email <a href="mailto: &lt;script type='text/javascript'> &lt;!-- var prefix = 'ma' + 'il' + 'to'; var path = 'hr' + 'ef' + '='; var addy24417 = 'bit' + 'ca' + '@'; addy24417 = addy24417 + '---' + '---' + '---' + '.' + 'com'; document.write( '&lt;a ' + path + '\'' + prefix + ':' + addy24417 + '\'>' ); document.write( addy24417 ); document.write( '&lt;\/a>' ); //-->\n &lt;/script>&lt;script type='text/javascript'> &lt;!-- document.write( '&lt;span style=\'display: none;\'>' ); //--> &lt;/script>This e-mail address is being protected from spam bots, JavaScript must be enabled to view it &lt;script type='text/javascript'> &lt;!-- document.write( '&lt;/' ); document.write( 'span>' ); //--> &lt;/script>" title="email to:"> <script type='text/javascript'> <!-- var prefix = 'ma' + 'il' + 'to'; var path = 'hr' + 'ef' + '='; var addy24417 = 'bit' + 'ca' + '@'; addy24417 = addy24417 + '---' + '---' + '---' + '.' + 'com'; document.write( '<a ' + path + '\'' + prefix + ':' + addy24417 + '\'>' ); document.write( addy24417 ); document.write( '<\/a>' ); //-->\n </script><script type='text/javascript'> <!-- document.write( '<span style=\'display: none;\'>' ); //--> </script>This e-mail address is being protected from spam bots, JavaScript must be enabled to view it <script type='text/javascript'> <!-- document.write( '</' ); document.write( 'span>' ); //--> </script></a><br /><br /> Stephanie may be reached by post at: <blockquote> Feather Weight Press<br /> 2020 Vision Street<br /> Somewhere, IN 10110</blockquote> </div> <?php include("stubby.php"); ?> <file_sep>/stubby.php <?php /* Created on: 11/01/2008 */ ?> <div id="footie" class="centered"> <img src="images/b1.gif" width="50" height="33" alt="" />&nbsp;&nbsp; <a href="mailto: &lt;script type='text/javascript'> &lt;!-- var prefix = 'ma' + 'il' + 'to'; var path = 'hr' + 'ef' + '='; var addy24417 = 'sa' + 'les' + '@'; addy24417 = addy24417 + '---' + '---' + '---' + '.' + 'com'; document.write( '&lt;a ' + path + '\'' + prefix + ':' + addy24417 + '\'>' ); document.write( addy24417 ); document.write( '&lt;\/a>' ); //-->\n &lt;/script>&lt;script type='text/javascript'> &lt;!-- document.write( '&lt;span style=\'display: none;\'>' ); //--> &lt;/script>This e-mail address is being protected from spam bots, JavaScript must be enabled to view it &lt;script type='text/javascript'> &lt;!-- document.write( '&lt;/' ); document.write( 'span>' ); //--> &lt;/script>" title="email to:"> <script type='text/javascript'> <!-- var prefix = 'ma' + 'il' + 'to'; var path = 'hr' + 'ef' + '='; var addy24417 = 'sa' + 'les' + '@'; addy24417 = addy24417 + '---' + '---' + '---' + '.' + 'com'; document.write( '<a ' + path + '\'' + prefix + ':' + addy24417 + '\'>' ); document.write( addy24417 ); document.write( '<\/a>' ); //-->\n </script><script type='text/javascript'> <!-- document.write( '<span style=\'display: none;\'>' ); //--> </script>This e-mail address is being protected from spam bots, JavaScript must be enabled to view it <script type='text/javascript'> <!-- document.write( '</' ); document.write( 'span>' ); //--> </script></a>&nbsp;&nbsp; <img src="images/b2.gif" width="50" height="33" alt="" /> </div> </div> </body> </html> <file_sep>/2008.php <?php /* Created on: 11/01/2008 */ ?> <?php include("topnot.php"); ?> <div id="content"> <h1>2008 Work</h1> <b>The Mistake Series</b><BR/><br/> When a facade breaks down, a new or true reality is revealed. Sometimes this new reality is mind altering. Sometimes it creates strife and change. But what happens when the degradation reveals noting, or reveals nothing new? Can it still matter? Does it become humorous or is the break down the true revelation? <BR/><br/> The Mistake Series and most of the work produced in 2008, is a lighthearted approach to my disillusion with modern life. Using humor to neutralize an uncomfortable situation. I question the value of objects, symbols and language from our every day lives; isolating and forcing them to not only stand alone but show their weaknesses. Each object is, in one way or another, deteriorating and revealing it's flaws, structure and suggesting ideas greater than itself. <BR/><br/> <a href="images/2008/break.jpg" rel="lightbox[2008]" title="<b><i>On Break</i></b><br/>ink and watercolor on paper<br/>20 x 40 in"><img src="images/2008/tbreak.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp; <a href="images/2008/quickfix.jpg" rel="lightbox[2008]" title="<b><i>Quick Fix</i></b><br/>ink and gouache on paper<br/>each 22 x 30 in"><img src="images/2008/tquickfix.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp; <a href="images/2008/assembly.jpg" rel="lightbox[2008]" title="<b><i>Some Assembly Required</i></b><br/>ink and gouache on paper<br/>each 15 x 20 in"><img src="images/2008/tassembly.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp;<br /><br /> <a href="images/2008/love.jpg" rel="lightbox[2008]" title="<b><i>Love</i></b><br/>ink on paper<br/>15 x 20 in"><img src="images/2008/tlove.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp; <a href="images/2008/connection.jpg" rel="lightbox[2008]" title="<b><i>Connection</i></b><br/>ink and gouache on paper<br/>each 15 x 20 in"><img src="images/2008/tconnection.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp; <a href="images/2008/missing.jpg" rel="lightbox[2008]" title="<b><i>Missing</i></b><br/>ink on paper<br/>each 15 x 20 in"><img src="images/2008/tmissing.jpg" alt="" /></a> &nbsp;&nbsp;&nbsp;<br /><br /> </div> <?php include("stubby.php"); ?> <file_sep>/index.php <?php /* Created on: 11/01/2008 */ ?> <?php include("topnot.php"); ?> <div id="content"> <h1>Stephanie, Artist</h1> <p>My work uses blah blah blah blah blah to examine what happens when a method of communication breaks down. When visual aids blah blah blah blah blah and the messages blah blah blah blah blah subverted by blah blah blah blah blah. </p> <p> This subversion is blah blah blah blah blah, its blah blah blah blah blah. Sometimes blah blah blah blah blah in the decay, blah blah blah blah blah the culture that made it. Asking the viewer to reexamine blah blah blah blah blah.</p> </div> <?php include("stubby.php"); ?>
41843af61f7d52a3a99b2062c2589c32888fa3f4
[ "Markdown", "PHP" ]
13
PHP
muhrrynn/FeatherWeightPress
a7afbec5a0b497f81f383983374cde69a4d28afe
81295bffebb4050545da4436a0acf032e02edf96
refs/heads/main
<repo_name>eslavich/stcal<file_sep>/src/stcal/jump/jump.py import time import logging import numpy as np from . import twopoint_difference as twopt from . import constants import multiprocessing # 05/18/21: TODO- commented out now; reinstate later log = logging.getLogger(__name__) log.setLevel(logging.DEBUG) def detect_jumps(frames_per_group, data, gdq, pdq, err, gain_2d, readnoise_2d, rejection_thresh, three_grp_thresh, four_grp_thresh, max_jump_to_flag_neighbors, min_jump_to_flag_neighbors, flag_4_neighbors, dqflags): """ This is the high-level controlling routine for the jump detection process. It loads and sets the various input data and parameters needed by each of the individual detection methods and then calls the detection methods in turn. Note that the detection methods are currently set up on the assumption that the input science and error data arrays will be in units of electrons, hence this routine scales those input arrays by the detector gain. The methods assume that the read noise values will be in units of DN. The gain is applied to the science data and error arrays using the appropriate instrument- and detector-dependent values for each pixel of an image. Also, a 2-dimensional read noise array with appropriate values for each pixel is passed to the detection methods. Parameters ---------- frames_per_group : int number of frames per group data : float, 4D array science array gdq : int, 4D array group dq array pdq : int, 2D array pixelg dq array err : float, 4D array error array gain_2d : float, 2D array gain for all pixels readnoise_2d : float, 2D array readnoise for all pixels rejection_thresh : float cosmic ray sigma rejection threshold three_grp_thresh : float cosmic ray sigma rejection threshold for ramps having 3 groups four_grp_thresh : float cosmic ray sigma rejection threshold for ramps having 4 groups max_jump_to_flag_neighbors : float value in units of sigma that sets the upper limit for flagging of neighbors. Any jump above this cutoff will not have its neighbors flagged. min_jump_to_flag_neighbors : float value in units of sigma that sets the lower limit for flagging of neighbors (marginal detections). Any primary jump below this value will not have its neighbors flagged. flag_4_neighbors): bool if set to True (default is True), it will cause the four perpendicular neighbors of all detected jumps to also be flagged as a jump. dqflags: dict A dictionary with at least the following keywords: DO_NOT_USE, SATURATED, JUMP_DET, NO_GAIN_VALUE, GOOD Returns ------- gdq : int, 4D array updated group dq array pdq : int, 2D array updated pixel dq array """ constants.update_dqflags(dqflags) # populate dq flags # Flag the pixeldq where the gain is <=0 or NaN so they will be ignored wh_g = np.where(gain_2d <= 0.) if len(wh_g[0] > 0): pdq[wh_g] = np.bitwise_or(pdq[wh_g], dqflags["NO_GAIN_VALUE"]) pdq[wh_g] = np.bitwise_or(pdq[wh_g], dqflags["DO_NOT_USE"]) wh_g = np.where(np.isnan(gain_2d)) if len(wh_g[0] > 0): pdq[wh_g] = np.bitwise_or(pdq[wh_g], dqflags["NO_GAIN_VALUE"]) pdq[wh_g] = np.bitwise_or(pdq[wh_g], dqflags["DO_NOT_USE"]) # Apply gain to the SCI, ERR, and readnoise arrays so they're in units # of electrons data *= gain_2d err *= gain_2d readnoise_2d *= gain_2d # Apply the 2-point difference method as a first pass log.info('Executing two-point difference method') start = time.time() # Set parameters of input data shape n_rows = data.shape[-2] n_cols = data.shape[-1] n_groups = data.shape[1] n_ints = data.shape[0] row_above_gdq = np.zeros((n_ints, n_groups, n_cols), dtype=np.uint8) previous_row_above_gdq = np.zeros((n_ints, n_groups, n_cols), dtype=np.uint8) row_below_gdq = np.zeros((n_ints, n_groups, n_cols), dtype=np.uint8) # 05/18/21 - When multiprocessing is enabled, the input data cube is split # into a number of row slices, based on the number or avalable cores. # Multiprocessing has been disabled for now, so the nunber of slices # is here set to 1. I'm leaving the related code in to ease the eventual # re-enablement of this code. n_slices = 1 yinc = int(n_rows / n_slices) slices = [] # Slice up data, gdq, readnoise_2d into slices # Each element of slices is a tuple of # (data, gdq, readnoise_2d, rejection_thresh, three_grp_thresh, # four_grp_thresh, nframes) for i in range(n_slices - 1): slices.insert(i, (data[:, :, i * yinc:(i + 1) * yinc, :], gdq[:, :, i * yinc:(i + 1) * yinc, :], readnoise_2d[i * yinc:(i + 1) * yinc, :], rejection_thresh, three_grp_thresh, four_grp_thresh, frames_per_group, flag_4_neighbors, max_jump_to_flag_neighbors, min_jump_to_flag_neighbors)) # last slice get the rest slices.insert(n_slices - 1, (data[:, :, (n_slices - 1) * yinc:n_rows, :], gdq[:, :, (n_slices - 1) * yinc:n_rows, :], readnoise_2d[(n_slices - 1) * yinc:n_rows, :], rejection_thresh, three_grp_thresh, four_grp_thresh, frames_per_group, flag_4_neighbors, max_jump_to_flag_neighbors, min_jump_to_flag_neighbors)) if n_slices == 1: gdq, row_below_dq, row_above_dq = \ twopt.find_crs(data, gdq, readnoise_2d, rejection_thresh, three_grp_thresh, four_grp_thresh, frames_per_group, flag_4_neighbors, max_jump_to_flag_neighbors, min_jump_to_flag_neighbors, dqflags) elapsed = time.time() - start else: log.info("Creating %d processes for jump detection " % n_slices) pool = multiprocessing.Pool(processes=n_slices) # Starts each slice in its own process. Starmap allows more than one # parameter to be passed. real_result = pool.starmap(twopt.find_crs, slices) pool.close() pool.join() k = 0 # Reconstruct gdq, the row_above_gdq, and the row_below_gdq from the # slice result for resultslice in real_result: if len(real_result) == k + 1: # last result gdq[:, :, k * yinc:n_rows, :] = resultslice[0] else: gdq[:, :, k * yinc:(k + 1) * yinc, :] = resultslice[0] row_below_gdq[:, :, :] = resultslice[1] row_above_gdq[:, :, :] = resultslice[2] if k != 0: # For all but the first slice, flag any CR neighbors in the top # row of the previous slice and flag any neighbors in the # bottom row of this slice saved from the top of the previous # slice gdq[:, :, k * yinc - 1, :] = \ np.bitwise_or(gdq[:, :, k * yinc - 1, :], row_below_gdq[:, :, :]) gdq[:, :, k * yinc, :] = \ np.bitwise_or(gdq[:, :, k * yinc, :], previous_row_above_gdq[:, :, :]) # save the neighbors to be flagged that will be in the next slice previous_row_above_gdq = row_above_gdq.copy() k += 1 elapsed = time.time() - start elapsed = time.time() - start log.info('Total elapsed time = %g sec' % elapsed) # Return the updated data quality arrays return gdq, pdq <file_sep>/src/stcal/jump/tests/test_twopoint_difference.py import pytest import numpy as np from stcal.jump.twopoint_difference import find_crs DQFLAGS = {'JUMP_DET': 4, 'SATURATED': 2, 'DO_NOT_USE': 1} @pytest.fixture(scope='function') def setup_cube(): def _cube(ngroups, readnoise=10): nints = 1 nrows = 204 ncols = 204 rej_threshold = 3 nframes = 1 data = np.zeros(shape=(nints, ngroups, nrows, ncols), dtype=np.float32) read_noise = np.full((nrows, ncols), readnoise, dtype=np.float32) gdq = np.zeros(shape=(nints, ngroups, nrows, ncols), dtype=np.uint32) return data, gdq, nframes, read_noise, rej_threshold return _cube def test_nocrs_noflux(setup_cube): ngroups = 5 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups) out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(0 == np.max(out_gdq)) # no CR found def test_5grps_cr3_noflux(setup_cube): ngroups = 5 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups) data[0, 0:2, 100, 100] = 10.0 data[0, 2:5, 100, 100] = 1000 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found assert(2 == np.argmax(out_gdq[0, :, 100, 100])) # find the CR in the expected group def test_5grps_cr2_noflux(setup_cube): ngroups = 5 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups) data[0, 0, 100, 100] = 10.0 data[0, 1:6, 100, 100] = 1000 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found assert(1 == np.argmax(out_gdq[0, :, 100, 100])) # find the CR in the expected group def test_6grps_negative_differences_zeromedian(setup_cube): ngroups = 6 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups) data[0, 0, 100, 100] = 100 data[0, 1, 100, 100] = 90 data[0, 2, 100, 100] = 95 data[0, 3, 100, 100] = 105 data[0, 4, 100, 100] = 100 data[0, 5, 100, 100] = 100 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(0 == np.max(out_gdq)) # no CR was found def test_5grps_cr2_negjumpflux(setup_cube): ngroups = 5 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups) data[0, 0, 100, 100] = 1000.0 data[0, 1:6, 100, 100] = 10 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found assert(1 == np.argmax(out_gdq[0, :, 100, 100])) # find the CR in the expected group def test_3grps_cr2_noflux(setup_cube): ngroups = 3 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups) data[0, 0, 100, 100] = 10.0 data[0, 1:4, 100, 100] = 1000 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found # assert(1,np.argmax(out_gdq[0, :, 100, 100])) # find the CR in the expected group assert(np.array_equal([0, 4, 0], out_gdq[0, :, 100, 100])) def test_4grps_cr2_noflux(setup_cube): ngroups = 4 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups) data[0, 0, 100, 100] = 10.0 data[0, 1:4, 100, 100] = 1000 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found assert(1 == np.argmax(out_gdq[0, :, 100, 100])) # find the CR in the expected group def test_5grps_cr2_nframe2(setup_cube): ngroups = 5 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups) nframes = 2 data[0, 0, 100, 100] = 10.0 data[0, 1, 100, 100] = 500 data[0, 2, 100, 100] = 1002 data[0, 3, 100, 100] = 1001 data[0, 4, 100, 100] = 1005 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found assert(np.array_equal([0, 4, 4, 0, 0], out_gdq[0, :, 100, 100])) @pytest.mark.xfail def test_4grps_twocrs_2nd_4th(setup_cube): ngroups = 4 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups) nframes = 1 data[0, 0, 100, 100] = 10.0 data[0, 1, 100, 100] = 60 data[0, 2, 100, 100] = 60 data[0, 3, 100, 100] = 115 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(np.max(out_gdq) == 4) # a CR was found def test_5grps_twocrs_2nd_5th(setup_cube): ngroups = 5 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups) nframes = 1 data[0, 0, 100, 100] = 10.0 data[0, 1, 100, 100] = 60 data[0, 2, 100, 100] = 60 data[0, 3, 100, 100] = 60 data[0, 4, 100, 100] = 115 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found assert(np.array_equal([0, 4, 0, 0, 4], out_gdq[0, :, 100, 100])) def test_5grps_twocrs_2nd_5thbig(setup_cube): ngroups = 5 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups) nframes = 1 data[0, 0, 100, 100] = 10.0 data[0, 1, 100, 100] = 60 data[0, 2, 100, 100] = 60 data[0, 3, 100, 100] = 60 data[0, 4, 100, 100] = 2115 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found assert(np.array_equal([0, 4, 0, 0, 4], out_gdq[0, :, 100, 100])) def test_10grps_twocrs_2nd_8th_big(setup_cube): ngroups = 10 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups) nframes = 1 data[0, 0, 100, 100] = 10.0 data[0, 1, 100, 100] = 60 data[0, 2, 100, 100] = 60 data[0, 3, 100, 100] = 60 data[0, 4, 100, 100] = 60 data[0, 5, 100, 100] = 60 data[0, 6, 100, 100] = 60 data[0, 7, 100, 100] = 2115 data[0, 8, 100, 100] = 2115 data[0, 9, 100, 100] = 2115 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found assert(np.array_equal([0, 4, 0, 0, 0, 0, 0, 4, 0, 0], out_gdq[0, :, 100, 100])) def test_10grps_twocrs_10percenthit(setup_cube): ngroups = 10 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups) nframes = 2 data[0:200, 0, 100, 100] = 10.0 data[0:200, 1, 100, 100] = 60 data[0:200, 2, 100, 100] = 60 data[0:200, 3, 100, 100] = 60 data[0:200, 4, 100, 100] = 60 data[0:200, 5, 100, 100] = 60 data[0:200, 6, 100, 100] = 60 data[0:200, 7, 100, 100] = 2115 data[0:200, 8, 100, 100] = 2115 data[0:200, 9, 100, 100] = 2115 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found assert(np.array_equal([0, 4, 0, 0, 0, 0, 0, 4, 0, 0], out_gdq[0, :, 100, 100])) def test_5grps_twocrs_2nd_5thbig_nframes2(setup_cube): ngroups = 5 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=10 * np.sqrt(2)) nframes = 2 data[0, 0, 100, 100] = 10.0 data[0, 1, 100, 100] = 60 data[0, 2, 100, 100] = 60 data[0, 3, 100, 100] = 60 data[0, 4, 100, 100] = 2115 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found assert(np.array_equal([0, 4, 0, 0, 4], out_gdq[0, :, 100, 100])) def test_6grps_twocrs_2nd_5th(setup_cube): ngroups = 6 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups) nframes = 1 data[0, 0, 100, 100] = 10.0 data[0, 1, 100, 100] = 60 data[0, 2, 100, 100] = 60 data[0, 3, 100, 100] = 60 data[0, 4, 100, 100] = 115 data[0, 5, 100, 100] = 115 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found assert np.array_equal([0, 4, 0, 0, 4, 0], out_gdq[0, :, 100, 100]) def test_6grps_twocrs_2nd_5th_nframes2(setup_cube): ngroups = 6 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=10 * np.sqrt(2)) nframes = 2 data[0, 0, 100, 100] = 10.0 data[0, 1, 100, 100] = 60 data[0, 2, 100, 100] = 60 data[0, 3, 100, 100] = 60 data[0, 4, 100, 100] = 115 data[0, 5, 100, 100] = 115 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found assert(np.array_equal([0, 4, 0, 0, 4, 0], out_gdq[0, :, 100, 100])) def test_6grps_twocrs_twopixels_nframes2(setup_cube): ngroups = 6 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=10 * np.sqrt(2)) nframes = 2 data[0, 0, 100, 100] = 10.0 data[0, 1, 100, 100] = 60 data[0, 2, 100, 100] = 60 data[0, 3, 100, 100] = 60 data[0, 4, 100, 100] = 115 data[0, 5, 100, 100] = 115 data[0, 0, 200, 100] = 10.0 data[0, 1, 200, 100] = 10.0 data[0, 2, 200, 100] = 60 data[0, 3, 200, 100] = 60 data[0, 4, 200, 100] = 115 data[0, 5, 200, 100] = 115 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found assert(np.array_equal([0, 4, 0, 0, 4, 0], out_gdq[0, :, 100, 100])) assert(np.array_equal([0, 0, 4, 0, 4, 0], out_gdq[0, :, 200, 100])) def test_5grps_cr2_negslope(setup_cube): ngroups = 5 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups) nframes = 1 data[0, 0, 100, 100] = 100.0 data[0, 1, 100, 100] = 0 data[0, 2, 100, 100] = -200 data[0, 3, 100, 100] = -260 data[0, 4, 100, 100] = -360 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found assert(np.array_equal([0, 0, 4, 0, 0], out_gdq[0, :, 100, 100])) def test_6grps_1cr(setup_cube): ngroups = 6 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=10) nframes = 1 data[0, 0, 100, 100] = 0 data[0, 1, 100, 100] = 10 data[0, 2, 100, 100] = 21 data[0, 3, 100, 100] = 33 data[0, 4, 100, 100] = 46 data[0, 5, 100, 100] = 1146 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert (4 == out_gdq[0, 5, 100, 100]) def test_7grps_1cr(setup_cube): ngroups = 7 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=10) nframes = 1 data[0, 0, 100, 100] = 0 data[0, 1, 100, 100] = 10 data[0, 2, 100, 100] = 21 data[0, 3, 100, 100] = 33 data[0, 4, 100, 100] = 46 data[0, 5, 100, 100] = 60 data[0, 6, 100, 100] = 1160 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == out_gdq[0, 6, 100, 100]) def test_8grps_1cr(setup_cube): ngroups = 8 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=10) nframes = 1 data[0, 0, 100, 100] = 0 data[0, 1, 100, 100] = 10 data[0, 2, 100, 100] = 21 data[0, 3, 100, 100] = 33 data[0, 4, 100, 100] = 46 data[0, 5, 100, 100] = 60 data[0, 6, 100, 100] = 1160 data[0, 7, 100, 100] = 1175 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == out_gdq[0, 6, 100, 100]) def test_9grps_1cr_1sat(setup_cube): ngroups = 9 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=10) nframes = 1 data[0, 0, 100, 100] = 0 data[0, 1, 100, 100] = 10 data[0, 2, 100, 100] = 21 data[0, 3, 100, 100] = 33 data[0, 4, 100, 100] = 46 data[0, 5, 100, 100] = 60 data[0, 6, 100, 100] = 1160 data[0, 7, 100, 100] = 1175 data[0, 8, 100, 100] = 6175 gdq[0, 8, 100, 100] = DQFLAGS['SATURATED'] out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == out_gdq[0, 6, 100, 100]) def test_10grps_1cr_2sat(setup_cube): ngroups = 10 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=10) nframes = 1 data[0, 0, 100, 100] = 0 data[0, 1, 100, 100] = 10 data[0, 2, 100, 100] = 21 data[0, 3, 100, 100] = 33 data[0, 4, 100, 100] = 46 data[0, 5, 100, 100] = 60 data[0, 6, 100, 100] = 1160 data[0, 7, 100, 100] = 1175 data[0, 8, 100, 100] = 6175 data[0, 9, 100, 100] = 6175 gdq[0, 8, 100, 100] = DQFLAGS['SATURATED'] gdq[0, 9, 100, 100] = DQFLAGS['SATURATED'] out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == out_gdq[0, 6, 100, 100]) def test_11grps_1cr_3sat(setup_cube): ngroups = 11 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=10) nframes = 1 data[0, 0, 100, 100] = 0 data[0, 1, 100, 100] = 20 data[0, 2, 100, 100] = 39 data[0, 3, 100, 100] = 57 data[0, 4, 100, 100] = 74 data[0, 5, 100, 100] = 90 data[0, 6, 100, 100] = 1160 data[0, 7, 100, 100] = 1175 data[0, 8, 100, 100] = 6175 data[0, 9, 100, 100] = 6175 data[0, 10, 100, 100] = 6175 gdq[0, 8, 100, 100] = DQFLAGS['SATURATED'] gdq[0, 9, 100, 100] = DQFLAGS['SATURATED'] gdq[0, 10, 100, 100] = DQFLAGS['SATURATED'] out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == out_gdq[0, 6, 100, 100]) def test_11grps_0cr_3donotuse(setup_cube): ngroups = 11 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=10) nframes = 1 data[0, 0, 100, 100] = 0 data[0, 1, 100, 100] = 18 data[0, 2, 100, 100] = 39 data[0, 3, 100, 100] = 57 data[0, 4, 100, 100] = 74 data[0, 5, 100, 100] = 90 data[0, 6, 100, 100] = 115 data[0, 7, 100, 100] = 131 data[0, 8, 100, 100] = 150 data[0, 9, 100, 100] = 6175 data[0, 10, 100, 100] = 6175 gdq[0, 0, 100, 100] = DQFLAGS['DO_NOT_USE'] gdq[0, 9, 100, 100] = DQFLAGS['DO_NOT_USE'] gdq[0, 10, 100, 100] = DQFLAGS['DO_NOT_USE'] out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert (np.array_equal([0, 0, 0, 0, 0, 0, 0, 0], out_gdq[0, 1:-2, 100, 100])) def test_5grps_nocr(setup_cube): ngroups = 6 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=10) nframes = 1 data[0, 0, 100, 100] = 0 data[0, 1, 100, 100] = 10 data[0, 2, 100, 100] = 21 data[0, 3, 100, 100] = 33 data[0, 4, 100, 100] = 46 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) def test_6grps_nocr(setup_cube): ngroups = 6 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=10) nframes = 1 data[0, 0, 100, 100] = 0 data[0, 1, 100, 100] = 10 data[0, 2, 100, 100] = 21 data[0, 3, 100, 100] = 33 data[0, 4, 100, 100] = 46 data[0, 5, 100, 100] = 60 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) def test_10grps_cr2_gt3sigma(setup_cube): ngroups = 10 crmag = 16 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=5) nframes = 1 data[0, 0, 100, 100] = 0 data[0, 1:11, 100, 100] = crmag out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found assert(np.array_equal([0, 4, 0, 0, 0, 0, 0, 0, 0, 0], out_gdq[0, :, 100, 100])) def test_10grps_cr2_3sigma_nocr(setup_cube): ngroups = 10 crmag = 15 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=5) nframes = 1 data[0, 0, 100, 100] = 0 data[0, 1:11, 100, 100] = crmag out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(0 == np.max(out_gdq)) # a CR was found assert(np.array_equal([0, 0, 0, 0, 0, 0, 0, 0, 0, 0], out_gdq[0, :, 100, 100])) def test_10grps_cr2_gt3sigma_2frames(setup_cube): ngroups = 10 crmag = 16 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=5 * np.sqrt(2)) nframes = 2 data[0, 0, 100, 100] = 0 data[0, 1:11, 100, 100] = crmag out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found assert(np.array_equal([0, 4, 0, 0, 0, 0, 0, 0, 0, 0], out_gdq[0, :, 100, 100])) def test_10grps_cr2_gt3sigma_2frames_offdiag(setup_cube): ngroups = 10 crmag = 16 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=5 * np.sqrt(2)) nframes = 2 data[0, 0, 100, 110] = 0 data[0, 1:11, 100, 110] = crmag out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(4 == np.max(out_gdq)) # a CR was found assert(np.array_equal([0, 4, 0, 0, 0, 0, 0, 0, 0, 0], out_gdq[0, :, 100, 110])) def test_10grps_cr2_3sigma_2frames_nocr(setup_cube): ngroups = 10 crmag = 15 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=5 * np.sqrt(2)) nframes = 2 data[0, 0, 100, 100] = 0 data[0, 1:11, 100, 100] = crmag out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(0 == np.max(out_gdq)) # a CR was found assert(np.array_equal([0, 0, 0, 0, 0, 0, 0, 0, 0, 0], out_gdq[0, :, 100, 100])) def test_10grps_nocr_2pixels_sigma0(setup_cube): ngroups = 10 crmag = 15 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=5 * np.sqrt(2)) nframes = 1 data[0, 0, 100, 100] = crmag data[0, 1:11, 100, 100] = crmag read_noise[50, 50] = 0.0 read_noise[60, 60] = 0.0 out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert(0 == np.max(out_gdq)) # no CR was found def test_5grps_satat4_crat3(setup_cube): ngroups = 5 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=5 * np.sqrt(2)) nframes = 1 data[0, 0, 100, 100] = 10000 data[0, 1, 100, 100] = 30000 data[0, 2, 100, 100] = 60000 data[0, 3, 100, 100] = 61000 data[0, 4, 100, 100] = 61000 gdq[0, 3, 100, 100] = DQFLAGS['SATURATED'] gdq[0, 4, 100, 100] = DQFLAGS['SATURATED'] out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) # assert(4 == np.max(out_gdq)) # no CR was found assert np.array_equal( [0, 0, DQFLAGS['JUMP_DET'], DQFLAGS['SATURATED'], DQFLAGS['SATURATED']], out_gdq[0, :, 100, 100]) def test_6grps_satat6_crat1(setup_cube): ngroups = 6 # crmag = 1000 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=5 * np.sqrt(2)) nframes = 1 data[0, 0, 100, 100] = 10000 data[0, 1, 100, 100] = 35000 # CR data[0, 2, 100, 100] = 40005 data[0, 3, 100, 100] = 45029 data[0, 4, 100, 100] = 50014 data[0, 5, 100, 101] = 61000 data[0, 0, 100, 101] = 10000 data[0, 1, 100, 101] = 15001 data[0, 2, 100, 101] = 20003 data[0, 3, 100, 101] = 25006 data[0, 4, 100, 101] = 30010 data[0, 5, 100, 101] = 35015 gdq[0, 5, 100, 100] = DQFLAGS['SATURATED'] out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) # assert(4 == np.max(out_gdq)) # no CR was found assert (np.array_equal([0, DQFLAGS['JUMP_DET'], 0, 0, 0, DQFLAGS['SATURATED']], out_gdq[0, :, 100, 100])) @pytest.mark.xfail def test_6grps_satat6_crat1_flagadjpixels(setup_cube): ngroups = 6 # crmag = 1000 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=5 * np.sqrt(2)) nframes = 1 data[0, 0, 100, 100] = 10000 data[0, 1, 100, 100] = 35000 # CR data[0, 2, 100, 100] = 40005 data[0, 3, 100, 100] = 45029 data[0, 4, 100, 100] = 50014 data[0, 5, 100, 101] = 61000 data[0, 0, 100, 101] = 10000 data[0, 1, 100, 101] = 15001 data[0, 2, 100, 101] = 20003 data[0, 3, 100, 101] = 25006 data[0, 4, 100, 101] = 30010 data[0, 5, 100, 101] = 35015 gdq[0, 5, 100, 100] = DQFLAGS['SATURATED'] out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) # assert(4 == np.max(out_gdq)) # no CR was found assert (np.array_equal([0, DQFLAGS['JUMP_DET'], 0, 0, 0, DQFLAGS['SATURATED']], out_gdq[0, :, 100, 100])) assert (np.array_equal([0, DQFLAGS['JUMP_DET'], 0, 0, 0, DQFLAGS['SATURATED']], out_gdq[0, :, 99, 100])) def test_10grps_satat8_crsat3and6(setup_cube): ngroups = 10 # crmag = 1000 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=5 * np.sqrt(2)) nframes = 1 data[0, 0, 100, 100] = 0 data[0, 1, 100, 100] = 5000 data[0, 2, 100, 100] = 15000 # CR data[0, 3, 100, 100] = 20000 data[0, 4, 100, 100] = 25000 data[0, 5, 100, 100] = 40000 # CR data[0, 6, 100, 100] = 45000 data[0, 7:11, 100, 100] = 61000 gdq[0, 7:11, 100, 100] = DQFLAGS['SATURATED'] out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) # assert(4 == np.max(out_gdq)) # no CR was found assert np.array_equal( [0, 0, DQFLAGS['JUMP_DET'], 0, 0, DQFLAGS['JUMP_DET'], 0, DQFLAGS['SATURATED'], DQFLAGS['SATURATED'], DQFLAGS['SATURATED']], out_gdq[0, :, 100, 100]) def test_median_with_saturation(setup_cube): ngroups = 10 # crmag = 1000 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=5 * np.sqrt(2)) nframes = 1 data[0, 0, 100, 100] = 0 data[0, 1, 100, 100] = 4500 data[0, 2, 100, 100] = 9100 data[0, 3, 100, 100] = 13800 data[0, 4, 100, 100] = 18600 data[0, 5, 100, 100] = 40000 # CR data[0, 6, 100, 100] = 44850 data[0, 7, 100, 100] = 49900 data[0, 8:10, 100, 100] = 60000 gdq[0, 7:10, 100, 100] = DQFLAGS['SATURATED'] print(np.diff(data[0, :, 100, 100])) out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert (np.array_equal([0, 0, 0, 0, 0, 4, 0, 2, 2, 2], out_gdq[0, :, 100, 100])) def test_median_with_saturation_even_num_sat_frames(setup_cube): ngroups = 10 # crmag = 1000 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=5 * np.sqrt(2)) nframes = 1 data[0, 0, 100, 100] = 0 data[0, 1, 100, 100] = 4500 data[0, 2, 100, 100] = 9100 data[0, 3, 100, 100] = 13800 data[0, 4, 100, 100] = 18600 data[0, 5, 100, 100] = 40000 # CR data[0, 6, 100, 100] = 44850 data[0, 7, 100, 100] = 49900 data[0, 8:10, 100, 100] = 60000 gdq[0, 6:10, 100, 100] = DQFLAGS['SATURATED'] print(np.diff(data[0, :, 100, 100])) out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert (np.array_equal([0, 0, 0, 0, 0, 4, 2, 2, 2, 2], out_gdq[0, :, 100, 100])) def test_median_with_saturation_odd_number_final_difference(setup_cube): ngroups = 9 # crmag = 1000 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=5 * np.sqrt(2)) nframes = 1 data[0, 0, 100, 100] = 0 data[0, 1, 100, 100] = 4500 data[0, 2, 100, 100] = 9100 data[0, 3, 100, 100] = 13800 data[0, 4, 100, 100] = 18600 data[0, 5, 100, 100] = 40000 # CR data[0, 6, 100, 100] = 44850 data[0, 7, 100, 100] = 49900 data[0, 8:9, 100, 100] = 60000 gdq[0, 6:9, 100, 100] = DQFLAGS['SATURATED'] print(np.diff(data[0, :, 100, 100])) out_gdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert (np.array_equal([0, 0, 0, 0, 0, 4, 2, 2, 2], out_gdq[0, :, 100, 100])) def test_first_last_group(setup_cube): ngroups = 7 nframes = 1 data, gdq, nframes, read_noise, rej_threshold = setup_cube(ngroups, readnoise=25.0) # set up the data so that if the first and last group are used in jump # detection it would cause a jump to be detected between group 1-2 # and group 6-7. Add a jump between 3 and 4 just to make sure jump detection is working # set group 1 to be 10,000 data[0, 0, 100, 100] = 10000.0 # set groups 1,2 - to be around 30,000 data[0, 1, 100, 100] = 30000.0 data[0, 2, 100, 100] = 30020.0 # set up a jump to make sure it is detected data[0, 3, 100, 100] = 40000.0 data[0, 4, 100, 100] = 40020.0 data[0, 5, 100, 100] = 40040.0 # set group 6 to be 50,000 data[0, 6, 100, 100] = 50000.0 gdq[0, 0, 100, 100] = DQFLAGS['DO_NOT_USE'] gdq[0, 6, 100, 100] = DQFLAGS['DO_NOT_USE'] outgdq, row_below_gdq, row_above_gdq = find_crs(data, gdq, read_noise, rej_threshold, rej_threshold, rej_threshold, nframes, False, 200, 10, DQFLAGS) assert outgdq[0, 0, 100, 100] == DQFLAGS['DO_NOT_USE'] assert outgdq[0, 6, 100, 100] == DQFLAGS['DO_NOT_USE'] assert outgdq[0, 3, 100, 100] == DQFLAGS['JUMP_DET'] <file_sep>/src/stcal/ramp_fitting/utils.py #! /usr/bin/env python # # utils.py: utility functions import logging import multiprocessing import numpy as np import warnings from . import constants log = logging.getLogger(__name__) log.setLevel(logging.DEBUG) # Replace zero or negative variances with this: LARGE_VARIANCE = 1.e8 class OptRes: """ Object to hold optional results for all good pixels for y-intercept, slope, uncertainty for y-intercept, uncertainty for slope, inverse variance, first frame (for pedestal image), and cosmic ray magnitude. """ def __init__(self, n_int, imshape, max_seg, nreads, save_opt): """ Initialize the optional attributes. These are 4D arrays for the segment-specific values of the y-intercept, the slope, the uncertainty associated with both, the weights, the approximate cosmic ray magnitudes, and the inverse variance. These are 3D arrays for the integration-specific first frame and pedestal values. Parameters ---------- n_int : int number of integrations in data set imshape : tuple shape of 2D image max_seg : int maximum number of segments fit nreads : int number of reads in an integration save_opt : bool save optional fitting results """ self.slope_seg = np.zeros((n_int,) + (max_seg,) + imshape, dtype=np.float32) if save_opt: self.yint_seg = np.zeros((n_int,) + (max_seg,) + imshape, dtype=np.float32) self.sigyint_seg = np.zeros((n_int,) + (max_seg,) + imshape, dtype=np.float32) self.sigslope_seg = np.zeros((n_int,) + (max_seg,) + imshape, dtype=np.float32) self.inv_var_seg = np.zeros((n_int,) + (max_seg,) + imshape, dtype=np.float32) self.firstf_int = np.zeros((n_int,) + imshape, dtype=np.float32) self.ped_int = np.zeros((n_int,) + imshape, dtype=np.float32) self.cr_mag_seg = np.zeros((n_int,) + (nreads,) + imshape, dtype=np.float32) self.var_p_seg = np.zeros((n_int,) + (max_seg,) + imshape, dtype=np.float32) self.var_r_seg = np.zeros((n_int,) + (max_seg,) + imshape, dtype=np.float32) def init_2d(self, npix, max_seg, save_opt): """ Initialize the 2D segment-specific attributes for the current data section. Parameters ---------- npix : integer number of pixels in section of 2D array max_seg : integer maximum number of segments that will be fit within an integration, calculated over all pixels and all integrations save_opt : bool save optional fitting results Returns ------- None """ self.slope_2d = np.zeros((max_seg, npix), dtype=np.float32) if save_opt: self.interc_2d = np.zeros((max_seg, npix), dtype=np.float32) self.siginterc_2d = np.zeros((max_seg, npix), dtype=np.float32) self.sigslope_2d = np.zeros((max_seg, npix), dtype=np.float32) self.inv_var_2d = np.zeros((max_seg, npix), dtype=np.float32) self.firstf_2d = np.zeros((max_seg, npix), dtype=np.float32) self.var_s_2d = np.zeros((max_seg, npix), dtype=np.float32) self.var_r_2d = np.zeros((max_seg, npix), dtype=np.float32) def reshape_res(self, num_int, rlo, rhi, sect_shape, ff_sect, save_opt): """ Loop over the segments and copy the reshaped 2D segment-specific results for the current data section to the 4D output arrays. Parameters ---------- num_int : int integration number rlo : int first column of section rhi : int last column of section sect_sect : tuple shape of section image ff_sect : ndarray first frame data, 2-D float save_opt : bool save optional fitting results Returns ------- """ for ii_seg in range(0, self.slope_seg.shape[1]): self.slope_seg[num_int, ii_seg, rlo:rhi, :] = \ self.slope_2d[ii_seg, :].reshape(sect_shape) if save_opt: self.yint_seg[num_int, ii_seg, rlo:rhi, :] = \ self.interc_2d[ii_seg, :].reshape(sect_shape) self.slope_seg[num_int, ii_seg, rlo:rhi, :] = \ self.slope_2d[ii_seg, :].reshape(sect_shape) self.sigyint_seg[num_int, ii_seg, rlo:rhi, :] = \ self.siginterc_2d[ii_seg, :].reshape(sect_shape) self.sigslope_seg[num_int, ii_seg, rlo:rhi, :] = \ self.sigslope_2d[ii_seg, :].reshape(sect_shape) self.inv_var_seg[num_int, ii_seg, rlo:rhi, :] = \ self.inv_var_2d[ii_seg, :].reshape(sect_shape) self.firstf_int[num_int, rlo:rhi, :] = ff_sect def append_arr(self, num_seg, g_pix, intercept, slope, sig_intercept, sig_slope, inv_var, save_opt): """ Add the fitting results for the current segment to the 2d arrays. Parameters ---------- num_seg : ndarray counter for segment number within the section, 1-D int g_pix : ndarray pixels having fitting results in current section, 1-D int intercept : ndarray intercepts for pixels in current segment and section, 1-D float slope : ndarray slopes for pixels in current segment and section, 1-D float sig_intercept : ndarray uncertainties of intercepts for pixels in current segment and section, 1-D float sig_slope : ndarray uncertainties of slopes for pixels in current segment and section, 1-D float inv_var : ndarray reciprocals of variances for fits of pixels in current segment and section, 1-D float save_opt : bool save optional fitting results Returns ------- None """ self.slope_2d[num_seg[g_pix], g_pix] = slope[g_pix] if save_opt: self.interc_2d[num_seg[g_pix], g_pix] = intercept[g_pix] self.siginterc_2d[num_seg[g_pix], g_pix] = sig_intercept[g_pix] self.sigslope_2d[num_seg[g_pix], g_pix] = sig_slope[g_pix] self.inv_var_2d[num_seg[g_pix], g_pix] = inv_var[g_pix] def shrink_crmag(self, n_int, dq_cube, imshape, nreads): """ Compress the 4D cosmic ray magnitude array for the current integration, removing all groups whose cr magnitude is 0 for pixels having at least one group with a non-zero magnitude. For every integration, the depth of the array is equal to the maximum number of cosmic rays flagged in all pixels in all integrations. This routine currently involves a loop over all pixels having at least 1 group flagged; if this algorithm takes too long for datasets having an overabundance of cosmic rays, this routine will require further optimization. Parameters ---------- n_int : int number of integrations in dataset dq_cube : ndarray input data quality array, 4-D flag imshape : tuple shape of a single input image nreads : int number of reads in an integration Returns ---------- None """ # Loop over data integrations to find max num of crs flagged per pixel # (this could exceed the maximum number of segments fit) max_cr = 0 for ii_int in range(0, n_int): dq_int = dq_cube[ii_int, :, :, :] dq_cr = np.bitwise_and(constants.dqflags["JUMP_DET"], dq_int) max_cr_int = (dq_cr > 0.).sum(axis=0).max() max_cr = max(max_cr, max_cr_int) # Allocate compressed array based on max number of crs cr_com = np.zeros((n_int,) + (max_cr,) + imshape, dtype=np.float32) # Loop over integrations and groups: for those pix having a cr, add # the magnitude to the compressed array for ii_int in range(0, n_int): cr_mag_int = self.cr_mag_seg[ii_int, :, :, :] cr_int_has_cr = np.where(cr_mag_int.sum(axis=0) != 0) # Initialize number of crs for each image pixel for this integration end_cr = np.zeros(imshape, dtype=np.int16) for k_rd in range(nreads): # loop over pixels having a CR for nn in range(len(cr_int_has_cr[0])): y, x = cr_int_has_cr[0][nn], cr_int_has_cr[1][nn] if (cr_mag_int[k_rd, y, x] > 0.): cr_com[ii_int, end_cr[y, x], y, x] = cr_mag_int[k_rd, y, x] end_cr[y, x] += 1 max_num_crs = end_cr.max() if max_num_crs == 0: max_num_crs = 1 self.cr_mag_seg = np.zeros(shape=(n_int, 1, imshape[0], imshape[1])) else: self.cr_mag_seg = cr_com[:, :max_num_crs, :, :] def output_optional(self, effintim): """ These results are the cosmic ray magnitudes in the segment-specific results for the count rates, y-intercept, uncertainty in the slope, uncertainty in the y-intercept, pedestal image, fitting weights, and the uncertainties in the slope due to poisson noise only and read noise only, and the integration-specific results for the pedestal image. The slopes are divided by the effective integration time here to yield the count rates. Any variance values that are a large fraction of the default value LARGE_VARIANCE correspond to non-existent segments, so will be set to 0 here before output. Parameters ---------- effintim : float effective integration time for a single group Returns ------- opt_info : tuple The tuple of computed optional results arrays for fitting. """ self.var_p_seg[self.var_p_seg > 0.4 * LARGE_VARIANCE] = 0. self.var_r_seg[self.var_r_seg > 0.4 * LARGE_VARIANCE] = 0. # Suppress, then re-enable, arithmetic warnings warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning) warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning) # Tiny 'weights' values correspond to non-existent segments, so set to 0. self.weights[1. / self.weights > 0.4 * LARGE_VARIANCE] = 0. warnings.resetwarnings() self.slope_seg /= effintim opt_info = (self.slope_seg, self.sigslope_seg, self.var_p_seg, self.var_r_seg, self.yint_seg, self.sigyint_seg, self.ped_int, self.weights, self.cr_mag_seg) return opt_info def print_full(self): # pragma: no cover """ Diagnostic function for printing optional output arrays; most useful for tiny datasets Parameters ---------- None Returns ------- None """ print('Will now print all optional output arrays - ') print(' yint_seg: ') print((self.yint_seg)) print(' ') print(' slope_seg: ') print(self.slope_seg) print(' ') print(' sigyint_seg: ') print(self.sigyint_seg) print(' ') print(' sigslope_seg: ') print(self.sigslope_seg) print(' ') print(' inv_var_2d: ') print((self.inv_var_2d)) print(' ') print(' firstf_int: ') print((self.firstf_int)) print(' ') print(' ped_int: ') print((self.ped_int)) print(' ') print(' cr_mag_seg: ') print((self.cr_mag_seg)) def alloc_arrays_1(n_int, imshape): """ Allocate arrays for integration-specific results and segment-specific results and variances. Parameters ---------- n_int : int number of integrations imshape : tuple shape of a single image Returns ------- dq_int : ndarray Cube of integration-specific group data quality values, 3-D flag median_diffs_2d : ndarray Estimated median slopes, 2-D float num_seg_per_int : ndarray Cube of numbers of segments for all integrations and pixels, 3-D int sat_0th_group_int : ndarray Integration-specific slice whose value for a pixel is 1 if the initial group of the ramp is saturated, 3-D uint8 """ dq_int = np.zeros((n_int,) + imshape, dtype=np.uint32) num_seg_per_int = np.zeros((n_int,) + imshape, dtype=np.uint8) # for estimated median slopes median_diffs_2d = np.zeros(imshape, dtype=np.float32) sat_0th_group_int = np.zeros((n_int,) + imshape, dtype=np.uint8) return (dq_int, median_diffs_2d, num_seg_per_int, sat_0th_group_int) def alloc_arrays_2(n_int, imshape, max_seg): """ Allocate arrays for integration-specific results and segment-specific results and variances. Parameters ---------- n_int : int number of integrations imshape : tuple shape of a single image max_seg : int maximum number of segments fit Returns ------- var_p3 : ndarray Cube of integration-specific values for the slope variance due to Poisson noise only, 3-D float var_r3 : ndarray Cube of integration-specific values for the slope variance due to readnoise only, 3-D float var_p4 : ndarray Hypercube of segment- and integration-specific values for the slope variance due to Poisson noise only, 4-D float var_r4 : ndarray Hypercube of segment- and integration-specific values for the slope variance due to read noise only, 4-D float var_both4 : ndarray Hypercube of segment- and integration-specific values for the slope variance due to combined Poisson noise and read noise, 4-D float var_both3 : ndarray Cube of segment- and integration-specific values for the slope variance due to combined Poisson noise and read noise, 3-D float inv_var_both4 : ndarray Hypercube of reciprocals of segment- and integration-specific values for the slope variance due to combined Poisson noise and read noise, 4-D float s_inv_var_p3 : ndarray Cube of reciprocals of segment- and integration-specific values for the slope variance due to Poisson noise only, summed over segments, 3-D float s_inv_var_r3 : ndarray Cube of reciprocals of segment- and integration-specific values for the slope variance due to read noise only, summed over segments, 3-D float s_inv_var_both3 : ndarray Cube of reciprocals of segment- and integration-specific values for the slope variance due to combined Poisson noise and read noise, summed over segments, 3-D float segs_4 : ndarray Hypercube of lengths of segments for all integrations and pixels, 4-D int """ # Initialize variances so that non-existing ramps and segments will have # negligible contributions # Integration-specific: var_p3 = np.zeros((n_int,) + imshape, dtype=np.float32) + LARGE_VARIANCE var_r3 = var_p3.copy() var_both3 = var_p3.copy() s_inv_var_p3 = np.zeros_like(var_p3) s_inv_var_r3 = np.zeros_like(var_p3) s_inv_var_both3 = np.zeros_like(var_p3) # Segment-specific: var_p4 = np.zeros((n_int,) + (max_seg,) + imshape, dtype=np.float32) + LARGE_VARIANCE var_r4 = var_p4.copy() var_both4 = var_p4.copy() inv_var_both4 = np.zeros_like(var_p4) # number of segments segs_4 = np.zeros((n_int,) + (max_seg,) + imshape, dtype=np.uint8) return (var_p3, var_r3, var_p4, var_r4, var_both4, var_both3, inv_var_both4, s_inv_var_p3, s_inv_var_r3, s_inv_var_both3, segs_4) def calc_slope_vars(rn_sect, gain_sect, gdq_sect, group_time, max_seg): """ Calculate the segment-specific variance arrays for the given integration. Parameters ---------- rn_sect : ndarray read noise values for all pixels in data section, 2-D float gain_sect : ndarray gain values for all pixels in data section, 2-D float gdq_sect : ndarray data quality flags for pixels in section, 3-D int group_time : float Time increment between groups, in seconds. max_seg : int maximum number of segments fit Returns ------- den_r3 : ndarray for a given integration, the reciprocal of the denominator of the segment-specific variance of the segment's slope due to read noise, 3-D float den_p3 : ndarray for a given integration, the reciprocal of the denominator of the segment-specific variance of the segment's slope due to Poisson noise, 3-D float num_r3 : ndarray numerator of the segment-specific variance of the segment's slope due to read noise, 3-D float segs_beg_3 : ndarray lengths of segments for all pixels in the given data section and integration, 3-D int """ (nreads, asize2, asize1) = gdq_sect.shape npix = asize1 * asize2 imshape = (asize2, asize1) # Create integration-specific sections of input arrays for determination # of the variances. gdq_2d = gdq_sect[:, :, :].reshape((nreads, npix)) gain_1d = gain_sect.reshape(npix) gdq_2d_nan = gdq_2d.copy() # group dq with SATS will be replaced by nans gdq_2d_nan = gdq_2d_nan.astype(np.float32) wh_sat = np.where(np.bitwise_and(gdq_2d, constants.dqflags["SATURATED"])) if len(wh_sat[0]) > 0: gdq_2d_nan[wh_sat] = np.nan # set all SAT groups to nan del wh_sat # Get lengths of semiramps for all pix [number_of_semiramps, number_of_pix] segs = np.zeros_like(gdq_2d) # Counter of semiramp for each pixel sr_index = np.zeros(npix, dtype=np.uint8) pix_not_done = np.ones(npix, dtype=bool) # initialize to True i_read = 0 # Loop over reads for all pixels to get segments (segments per pixel) while (i_read < nreads and np.any(pix_not_done)): gdq_1d = gdq_2d_nan[i_read, :] wh_good = np.where(gdq_1d == 0) # good groups # if this group is good, increment those pixels' segments' lengths if len(wh_good[0]) > 0: segs[sr_index[wh_good], wh_good] += 1 del wh_good # Locate any CRs that appear before the first SAT group... wh_cr = np.where( gdq_2d_nan[i_read, :].astype(np.int32) & constants.dqflags["JUMP_DET"] > 0) # ... but not on final read: if (len(wh_cr[0]) > 0 and (i_read < nreads - 1)): sr_index[wh_cr[0]] += 1 segs[sr_index[wh_cr], wh_cr] += 1 del wh_cr # If current group is a NaN, this pixel is done (pix_not_done is False) wh_nan = np.where(np.isnan(gdq_2d_nan[i_read, :])) if len(wh_nan[0]) > 0: pix_not_done[wh_nan[0]] = False del wh_nan i_read += 1 segs = segs.astype(np.uint8) segs_beg = segs[:max_seg, :] # the leading nonzero lengths # Create reshaped version [ segs, y, x ] to simplify computation segs_beg_3 = segs_beg.reshape(max_seg, imshape[0], imshape[1]) segs_beg_3 = remove_bad_singles(segs_beg_3) # Create a version 1 less for later calculations for the variance due to # Poisson, with a floor=1 to handle single-group segments wh_pos_3 = np.where(segs_beg_3 > 1) segs_beg_3_m1 = segs_beg_3.copy() segs_beg_3_m1[wh_pos_3] -= 1 segs_beg_3_m1[segs_beg_3_m1 < 1] = 1 # For a segment, the variance due to Poisson noise # = slope/(tgroup * gain * (ngroups-1)), # where slope is the estimated median slope, tgroup is the group time, # and ngroups is the number of groups in the segment. # Here the denominator of this quantity will be computed, which will be # later multiplied by the estimated median slope. # Suppress, then re-enable, harmless arithmetic warnings, as NaN will be # checked for and handled later warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning) warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning) den_p3 = 1. / (group_time * gain_1d.reshape(imshape) * segs_beg_3_m1) warnings.resetwarnings() # For a segment, the variance due to readnoise noise # = 12 * readnoise**2 /(ngroups_seg**3. - ngroups_seg)/( tgroup **2.) num_r3 = 12. * (rn_sect / group_time)**2. # always >0 # Reshape for every group, every pixel in section num_r3 = np.dstack([num_r3] * max_seg) num_r3 = np.transpose(num_r3, (2, 0, 1)) # Denominator den_r3 = 1./(segs_beg_3 **3.-segs_beg_3). The minimum number # of allowed groups is 2, which will apply if there is actually only 1 # group; in this case den_r3 = 1/6. This covers the case in which there is # only one good group at the beginning of the integration, so it will be # be compared to the plane of (near) zeros resulting from the reset. For # longer segments, this value is overwritten below. den_r3 = num_r3.copy() * 0. + 1. / 6 wh_seg_pos = np.where(segs_beg_3 > 1) # Suppress, then, re-enable harmless arithmetic warnings, as NaN will be # checked for and handled later warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning) warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning) den_r3[wh_seg_pos] = 1. / (segs_beg_3[wh_seg_pos] ** 3. - segs_beg_3[wh_seg_pos]) # overwrite where segs>1 warnings.resetwarnings() return (den_r3, den_p3, num_r3, segs_beg_3) def calc_pedestal(num_int, slope_int, firstf_int, dq_first, nframes, groupgap, dropframes1): """ The pedestal is calculated by extrapolating the final slope for each pixel from its value at the first sample in the integration to an exposure time of zero; this calculation accounts for the values of nframes and groupgap. Any pixel that is saturated on the 1st group is given a pedestal value of 0. Parameters ---------- num_int : int integration number slope_int : ndarray cube of integration-specific slopes, 3-D float firstf_int : ndarray integration-specific first frame array, 3-D float dq_first : ndarray DQ of the initial group for all ramps in the given integration, 2-D flag nframes : int number of frames averaged per group; from the NFRAMES keyword. Does not contain the groupgap. groupgap : int number of frames dropped between groups, from the GROUPGAP keyword. dropframes1 : int number of frames dropped at the beginning of every integration, from the DRPFRMS1 keyword. Returns ------- ped : ndarray pedestal image, 2-D float """ ff_all = firstf_int[num_int, :, :].astype(np.float32) ped = ff_all - slope_int[num_int, ::] * \ (((nframes + 1.) / 2. + dropframes1) / (nframes + groupgap)) sat_flag = constants.dqflags["SATURATED"] ped[np.bitwise_and(dq_first, sat_flag) == sat_flag] = 0 ped[np.isnan(ped)] = 0. return ped def output_integ(slope_int, dq_int, effintim, var_p3, var_r3, var_both3, int_times): """ For the OLS algorithm, construct the output integration-specific results. Any variance values that are a large fraction of the default value LARGE_VARIANCE correspond to non-existent segments, so will be set to 0 here before output. Parameters ---------- model : instance of Data Model DM object for input slope_int : ndarray Data cube of weighted slopes for each integration, 3-D float dq_int : ndarray Data cube of DQ arrays for each integration, 3-D int effintim : float Effective integration time per integration var_p3 : ndarray Cube of integration-specific values for the slope variance due to Poisson noise only, 3-D float var_r3 : ndarray Cube of integration-specific values for the slope variance due to read noise only, 3-D float var_both3 : ndarray Cube of integration-specific values for the slope variance due to read noise and Poisson noise, 3-D float int_times : bintable, or None The INT_TIMES table, if it exists in the input, else None Returns ------- integ_info : tuple The tuple of computed integration ramp fitting arrays. """ # Suppress harmless arithmetic warnings for now warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning) warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning) var_p3[var_p3 > 0.4 * LARGE_VARIANCE] = 0. var_r3[var_r3 > 0.4 * LARGE_VARIANCE] = 0. var_both3[var_both3 > 0.4 * LARGE_VARIANCE] = 0. data = slope_int / effintim err = np.sqrt(var_both3) dq = dq_int var_poisson = var_p3 var_rnoise = var_r3 int_times = int_times integ_info = (data, dq, var_poisson, var_rnoise, int_times, err) # Reset the warnings filter to its original state warnings.resetwarnings() return integ_info ''' # BEGIN remove GLS def gls_output_integ(model, slope_int, slope_err_int, dq_int): """ For the GLS algorithm, construct the output integration-specific results. Parameters ---------- model : instance of Data Model DM object for input slope_int : ndarray Data cube of weighted slopes for each integration, 3-D float slope_err_int : ndarray Data cube of slope errors for each integration, 3-D float dq_int : ndarray Data cube of DQ arrays for each integration, 3-D flag Returns ------- cubemod : Data Model object """ # Suppress harmless arithmetic warnings for now warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning) warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning) cubemod = datamodels.CubeModel() cubemod.data = slope_int cubemod.err = slope_err_int cubemod.dq = dq_int # Reset the warnings filter to its original state warnings.resetwarnings() cubemod.update(model) # keys from input needed for photom step return cubemod def gls_output_optional(model, intercept_int, intercept_err_int, pedestal_int, ampl_int, ampl_err_int): # pragma: no cover """Construct the optional results for the GLS algorithm. Extended Summary ---------------- Construct the GLS-specific optional output data. These results are the Y-intercepts, uncertainties in the intercepts, pedestal (first group extrapolated back to zero time), cosmic ray magnitudes, and uncertainties in the CR magnitudes. Parameters ---------- model : instance of Data Model Data model object for input; this is used only for the file name. intercept_int : 3-D ndarray, float32, shape (n_int, ny, nx) Y-intercept for each integration, at each pixel. intercept_err_int : 3-D ndarray, float32, shape (n_int, ny, nx) Uncertainties for Y-intercept for each integration, at each pixel. pedestal_int : 3-D ndarray, float32, shape (n_int, ny, nx) The pedestal, for each integration and each pixel. ampl_int : 4-D ndarray, float32, shape (n_int, ny, nx, max_num_cr) Cosmic-ray amplitudes for each integration, at each pixel, and for each CR hit in the ramp. max_num_cr will be the maximum number of CRs within the ramp for any pixel, or it will be one if there were no CRs at all. ampl_err_int : 4-D ndarray, float32, shape (n_int, ny, nx, max_num_cr) Uncertainties for cosmic-ray amplitudes for each integration, at each pixel, and for each CR in the ramp. Returns ------- gls_ramp_model : GLS_RampFitModel object GLS-specific ramp fit data for the exposure. """ gls_ramp_model = datamodels.GLS_RampFitModel() gls_ramp_model.yint = intercept_int gls_ramp_model.sigyint = intercept_err_int gls_ramp_model.pedestal = pedestal_int gls_ramp_model.crmag = ampl_int gls_ramp_model.sigcrmag = ampl_err_int return gls_ramp_model def gls_pedestal(first_group, slope_int, s_mask, frame_time, nframes_used): # pragma: no cover """Calculate the pedestal for the GLS case. The pedestal is the first group, but extrapolated back to zero time using the slope obtained by the fit to the whole ramp. The time of the first group is the frame time multiplied by (M + 1) / 2, where M is the number of frames per group, not including the number (if any) of skipped frames. The input arrays and output pedestal are slices of the full arrays. They are just the relevant data for the current integration (assuming that this function is called within a loop over integrations), and they may include only a subset of image lines. For example, this function might be called with slope_int argument given as: `slope_int[num_int, rlo:rhi, :]`. The input and output parameters are in electrons. Parameters ---------- first_group : ndarray A slice of the first group in the ramp, 2-D float slope_int : ndarray The slope obtained by GLS ramp fitting. This is a slice for the current integration and a subset of the image lines, 2-D float s_mask : ndarray True for ramps that were saturated in the first group, 2-D bool frame_time : float The time to read one frame, in seconds. nframes_used : int Number of frames that were averaged together to make a group. Exludes the groupgap. Returns ------- pedestal : ndarray This is a slice of the full pedestal array, and it's for the current integration, 2-D float """ M = float(nframes_used) pedestal = first_group - slope_int * frame_time * (M + 1.) / 2. if s_mask.any(): pedestal[s_mask] = 0. return pedestal # END remove GLS ''' def shift_z(a, off): """ Shift input 3D array by requested offset in z-direction, padding shifted array (of the same size) by leading or trailing zeros as needed. Parameters ---------- a : ndarray input array, 3-D float off : int offset in z-direction Returns ------- b : ndarray shifted array, 3-D float """ # set initial and final indices along z-direction for original and # shifted 3D arrays ai_z = int((abs(off) + off) / 2) af_z = a.shape[0] + int((-abs(off) + off) / 2) bi_z = a.shape[0] - af_z bf_z = a.shape[0] - ai_z b = a * 0 b[bi_z:bf_z, :, :] = a[ai_z:af_z, :, :] return b ''' # GLS function def get_efftim_ped(model): """ Calculate the effective integration time for a single group, and return the number of frames per group, and the number of frames dropped between groups. Parameters ---------- model : instance of Data Model DM object for input Returns ------- effintim : float effective integration time for a single group nframes : int number of frames averaged per group; from the NFRAMES keyword. groupgap : int number of frames dropped between groups; from the GROUPGAP keyword. dropframes1 : int number of frames dropped at the beginning of every integration; from the DRPFRMS1 keyword, or 0 if the keyword is missing """ groupgap = model.meta.exposure.groupgap nframes = model.meta.exposure.nframes frame_time = model.meta.exposure.frame_time dropframes1 = model.meta.exposure.drop_frames1 if (dropframes1 is None): # set to default if missing dropframes1 = 0 log.debug('Missing keyword DRPFRMS1, so setting to default value of 0') try: effintim = (nframes + groupgap) * frame_time except TypeError: log.error('Can not retrieve values needed to calculate integ. time') log.debug('Calculating effective integration time for a single group using:') log.debug(' groupgap: %s' % (groupgap)) log.debug(' nframes: %s' % (nframes)) log.debug(' frame_time: %s' % (frame_time)) log.debug(' dropframes1: %s' % (dropframes1)) log.info('Effective integration time per group: %s' % (effintim)) return effintim, nframes, groupgap, dropframes1 # GLS function def get_dataset_info(model): """ Extract values for the number of groups, the number of pixels, dataset shapes, the number of integrations, the instrument name, the frame time, and the observation time. Parameters ---------- model : instance of Data Model DM object for input Returns ------- nreads : int number of reads in input dataset npix : int number of pixels in 2D array imshape : tuple shape of 2D image cubeshape : tuple shape of input dataset n_int : int number of integrations instrume : str instrument frame_time : float integration time from TGROUP ngroups : int number of groups per integration group_time : float Time increment between groups, in seconds. """ instrume = model.meta.instrument.name frame_time = model.meta.exposure.frame_time ngroups = model.meta.exposure.ngroups group_time = model.meta.exposure.group_time n_int = model.data.shape[0] nreads = model.data.shape[1] asize2 = model.data.shape[2] asize1 = model.data.shape[3] # If nreads and ngroups are not the same, override the value of ngroups # with nreads, which is more likely to be correct, since it's based on # the image shape. if nreads != ngroups: log.warning('The value from the key NGROUPS does not (but should) match') log.warning(' the value of nreads from the data; will use value of') log.warning(' nreads: %s' % (nreads)) ngroups = nreads npix = asize2 * asize1 # number of pixels in 2D array imshape = (asize2, asize1) cubeshape = (nreads,) + imshape return nreads, npix, imshape, cubeshape, n_int, instrume, frame_time, \ ngroups, group_time # GLS function def get_more_info(model): # pragma: no cover """Get information used by GLS algorithm. Parameters ---------- model : instance of Data Model DM object for input Returns ------- group_time : float Time increment between groups, in seconds. nframes_used : int Number of frames that were averaged together to make a group, i.e. excluding skipped frames. saturated_flag : int Group data quality flag that indicates a saturated pixel. jump_flag : int Group data quality flag that indicates a cosmic ray hit. """ group_time = model.meta.exposure.group_time nframes_used = model.meta.exposure.nframes saturated_flag = constants.dqflags["SATURATED"] jump_flag = constants.dqflags["JUMP_DET"] return (group_time, nframes_used, saturated_flag, jump_flag) # GLS function def get_max_num_cr(gdq_cube, jump_flag): # pragma: no cover """ Find the maximum number of cosmic-ray hits in any one pixel. Parameters ---------- gdq_cube : ndarray The group data quality array, 3-D flag jump_flag : int The data quality flag indicating a cosmic-ray hit. Returns ------- max_num_cr : int The maximum number of cosmic-ray hits for any pixel. """ cr_flagged = np.empty(gdq_cube.shape, dtype=np.uint8) cr_flagged[:] = np.where(np.bitwise_and(gdq_cube, jump_flag), 1, 0) max_num_cr = cr_flagged.sum(axis=0, dtype=np.int32).max() del cr_flagged return max_num_cr ''' def reset_bad_gain(pdq, gain): """ For pixels in the gain array that are either non-positive or NaN, reset the the corresponding pixels in the pixel DQ array to NO_GAIN_VALUE and DO_NOT_USE so that they will be ignored. Parameters ---------- pdq : ndarray pixel dq array of input model, 2-D int gain : ndarray gain array from reference file, 2-D float Returns ------- pdq : ndarray pixleldq array of input model, reset to NO_GAIN_VALUE and DO_NOT_USE for pixels in the gain array that are either non-positive or NaN., 2-D flag """ ''' with warnings.catch_warnings(): warnings.filterwarnings("ignore", "invalid value.*", RuntimeWarning) wh_g = np.where(gain <= 0.) ''' wh_g = np.where(gain <= 0.) if len(wh_g[0]) > 0: pdq[wh_g] = np.bitwise_or(pdq[wh_g], constants.dqflags["NO_GAIN_VALUE"]) pdq[wh_g] = np.bitwise_or(pdq[wh_g], constants.dqflags["DO_NOT_USE"]) wh_g = np.where(np.isnan(gain)) if len(wh_g[0]) > 0: pdq[wh_g] = np.bitwise_or(pdq[wh_g], constants.dqflags["NO_GAIN_VALUE"]) pdq[wh_g] = np.bitwise_or(pdq[wh_g], constants.dqflags["DO_NOT_USE"]) return pdq def remove_bad_singles(segs_beg_3): """ For the current integration and data section, remove all segments having only a single group if there are other segments in the ramp. This method allows for the possibility that a ramp can have multiple (necessarily consecutive) 1-group segments, which in principle could occur if there are consecutive cosmic rays. Parameters ---------- segs_beg_3 : ndarray lengths of all segments for all ramps in the given data section and integration; some of these ramps may contain segments having a single group, and another segment, 3-D int Returns ------- segs_beg_3 : ndarray lengths of all segments for all ramps in the given data section and integration; segments having a single group, and another segment will be removed, 3-D int """ max_seg = segs_beg_3.shape[0] # get initial number of ramps having single-group segments tot_num_single_grp_ramps = len(np.where((segs_beg_3 == 1) & (segs_beg_3.sum(axis=0) > 1))[0]) while(tot_num_single_grp_ramps > 0): # until there are no more single-group segments for ii_0 in range(max_seg): slice_0 = segs_beg_3[ii_0, :, :] for ii_1 in range(max_seg): # correctly includes EARLIER segments if (ii_0 == ii_1): # don't compare with itself continue slice_1 = segs_beg_3[ii_1, :, :] # Find ramps of a single-group segment and another segment # either earlier or later wh_y, wh_x = np.where((slice_0 == 1) & (slice_1 > 0)) if (len(wh_y) == 0): # Are none, so go to next pair of segments to check continue # Remove the 1-group segment segs_beg_3[ii_0:-1, wh_y, wh_x] = segs_beg_3[ii_0 + 1:, wh_y, wh_x] # Zero the last segment entry for the ramp, which would otherwise # remain non-zero due to the shift segs_beg_3[-1, wh_y, wh_x] = 0 del wh_y, wh_x tot_num_single_grp_ramps = len(np.where((segs_beg_3 == 1) & (segs_beg_3.sum(axis=0) > 1))[0]) return segs_beg_3 def fix_sat_ramps(sat_0th_group_int, var_p3, var_both3, slope_int, dq_int): """ For ramps within an integration that are saturated on the initial group, reset the integration-specific variances and slope so they will have no contribution. Parameters ---------- sat_0th_group_int : ndarray Integration-specific slice whose value for a pixel is 1 if the initial group of the ramp is saturated, 3-D uint8 var_p3 : ndarray Cube of integration-specific values for the slope variance due to Poisson noise only; some ramps may be saturated in the initial group, 3-D float var_both3 : ndarray Cube of segment- and integration-specific values for the slope variance due to combined Poisson noise and read noise; some ramps may be saturated in the initial group, 3-D float slope_int : ndarray Cube of integration-specific slopes. Some ramps may be saturated in the initial group, 3-D float dq_int : ndarray Cube of integration-specific DQ flags, 3-D flag Returns ------- var_p3 : ndarray Cube of integration-specific values for the slope variance due to Poisson noise only; for ramps that are saturated in the initial group, this variance has been reset to a huge value to minimize the ramps contribution, 3-D float var_both3 : ndarray Cube of segment- and integration-specific values for the slope variance due to combined Poisson noise and read noise; for ramps that are saturated in the initial group, this variance has been reset to a huge value to minimize the ramps contribution, 3-D float slope_int : ndarray Cube of integration-specific slopes; for ramps that are saturated in the initial group, this variance has been reset to a huge value to minimize the ramps contribution, 3-D float dq_int : ndarray Cube of integration-specific DQ flags. For ramps that are saturated in the initial group, the flag 'DO_NOT_USE' is added, 3-D flag """ var_p3[sat_0th_group_int > 0] = LARGE_VARIANCE var_both3[sat_0th_group_int > 0] = LARGE_VARIANCE slope_int[sat_0th_group_int > 0] = 0. dq_int[sat_0th_group_int > 0] = np.bitwise_or( dq_int[sat_0th_group_int > 0], constants.dqflags["DO_NOT_USE"]) return var_p3, var_both3, slope_int, dq_int def do_all_sat(pixeldq, groupdq, imshape, n_int, save_opt): """ For an input exposure where all groups in all integrations are saturated, the DQ in the primary and integration-specific output products are updated, and the other arrays in all output products are populated with zeros. Parameters ---------- model : instance of Data Model DM object for input imshape : (int, int) tuple shape of 2D image n_int : int number of integrations save_opt : bool save optional fitting results Returns ------- image_info : tuple The tuple of computed ramp fitting arrays. integ_info : tuple The tuple of computed integration fitting arrays. opt_info : tuple The tuple of computed optional results arrays for fitting. """ # Create model for the primary output. Flag all pixels in the pixiel DQ # extension as SATURATED and DO_NOT_USE. pixeldq = np.bitwise_or(pixeldq, constants.dqflags["SATURATED"]) pixeldq = np.bitwise_or(pixeldq, constants.dqflags["DO_NOT_USE"]) data = np.zeros(imshape, dtype=np.float32) dq = pixeldq var_poisson = np.zeros(imshape, dtype=np.float32) var_rnoise = np.zeros(imshape, dtype=np.float32) err = np.zeros(imshape, dtype=np.float32) image_info = (data, dq, var_poisson, var_rnoise, err) # Create model for the integration-specific output. The 3D group DQ created # is based on the 4D group DQ of the model, and all pixels in all # integrations will be flagged here as DO_NOT_USE (they are already flagged # as SATURATED). The INT_TIMES extension will be left as None. if n_int > 1: m_sh = groupdq.shape # (integ, grps/integ, y, x ) groupdq_3d = np.zeros((m_sh[0], m_sh[2], m_sh[3]), dtype=np.uint32) for ii in range(n_int): # add SAT flag to existing groupdq in each slice groupdq_3d[ii, :, :] = np.bitwise_or.reduce(groupdq[ii, :, :, :], axis=0) groupdq_3d = np.bitwise_or(groupdq_3d, constants.dqflags["DO_NOT_USE"]) data = np.zeros((n_int,) + imshape, dtype=np.float32) dq = groupdq_3d var_poisson = np.zeros((n_int,) + imshape, dtype=np.float32) var_rnoise = np.zeros((n_int,) + imshape, dtype=np.float32) int_times = None err = np.zeros((n_int,) + imshape, dtype=np.float32) integ_info = (data, dq, var_poisson, var_rnoise, int_times, err) else: integ_info = None # Create model for the optional output if save_opt: new_arr = np.zeros((n_int,) + (1,) + imshape, dtype=np.float32) slope = new_arr sigslope = new_arr var_poisson = new_arr var_rnoise = new_arr yint = new_arr sigyint = new_arr pedestal = np.zeros((n_int,) + imshape, dtype=np.float32) weights = new_arr crmag = new_arr opt_info = (slope, sigslope, var_poisson, var_rnoise, yint, sigyint, pedestal, weights, crmag) else: opt_info = None log.info('All groups of all integrations are saturated.') return image_info, integ_info, opt_info def log_stats(c_rates): """ Optionally log statistics of detected cosmic rays Parameters ---------- c_rates : ndarray weighted count rate, 2-D float Returns ------- None """ wh_c_0 = np.where(c_rates == 0.) # insuff data or no signal log.debug('The number of pixels having insufficient data') log.debug('due to excessive CRs or saturation %d:', len(wh_c_0[0])) log.debug('Count rates - min, mean, max, std: %f, %f, %f, %f' % (c_rates.min(), c_rates.mean(), c_rates.max(), c_rates.std())) def compute_slices(max_cores): """ Computes the number of slices to be created for multiprocessing. Parameters ---------- max_cores : str Number of cores to use for multiprocessing. If set to 'none' (the default), then no multiprocessing will be done. The other allowable values are 'quarter', 'half', and 'all'. This is the fraction of cores to use for multi-proc. The total number of cores includes the SMT cores (Hyper Threading for Intel). Returns ------- number_slices : int The number of slices for multiprocessing. """ if max_cores == 'none': number_slices = 1 else: num_cores = multiprocessing.cpu_count() log.debug(f'Found {num_cores} possible cores to use for ramp fitting') if max_cores == 'quarter': number_slices = num_cores // 4 or 1 elif max_cores == 'half': number_slices = num_cores // 2 or 1 elif max_cores == 'all': number_slices = num_cores else: number_slices = 1 return number_slices def dq_compress_final(dq_int, n_int): """ Combine the integration-specific dq arrays (which have already been compressed and combined with the PIXELDQ array) to create the dq array of the primary output product. Parameters ---------- dq_int : ndarray cube of combined dq arrays for all data sections in a single integration, 3-D flag n_int : int total number of integrations in data set Returns ------- f_dq : ndarray combination of all integration's pixeldq arrays, 2-D flag """ f_dq = dq_int[0, :, :] for jj in range(1, n_int): f_dq = np.bitwise_or(f_dq, dq_int[jj, :, :]) return f_dq def dq_compress_sect(gdq_sect, pixeldq_sect): """ Get ramp locations where the data has been flagged as saturated in the 4D GROUPDQ array for the current data section, find the corresponding image locations, and set the SATURATED flag in those locations in the PIXELDQ array. Similarly, get the ramp locations where the data has been flagged as a jump detection in the 4D GROUPDQ array, find the corresponding image locations, and set the COSMIC_BEFORE flag in those locations in the PIXELDQ array. These modifications to the section of the PIXELDQ array are not used to flag groups for any computations; they are used only in the integration- specific output. Parameters ---------- gdq_sect : ndarray cube of GROUPDQ array for a data section, 3-D flag pixeldq_sect : ndarray dq array of data section of input model, 2-D flag Returns ------- pixeldq_sect : ndarray dq array of data section updated with saturated and jump-detected flags, 2-D flag """ sat_flag = constants.dqflags["SATURATED"] jump_flag = constants.dqflags["JUMP_DET"] sat_loc_r = np.bitwise_and(gdq_sect, sat_flag) sat_loc_im = np.where(sat_loc_r.sum(axis=0) > 0) pixeldq_sect[sat_loc_im] = np.bitwise_or(pixeldq_sect[sat_loc_im], sat_flag) cr_loc_r = np.bitwise_and(gdq_sect, jump_flag) cr_loc_im = np.where(cr_loc_r.sum(axis=0) > 0) pixeldq_sect[cr_loc_im] = np.bitwise_or(pixeldq_sect[cr_loc_im], jump_flag) return pixeldq_sect <file_sep>/src/stcal/ramp_fitting/ramp_fit.py #! /usr/bin/env python # # ramp_fit.py - calculate weighted mean of slope, based on Massimo # Robberto's "On the Optimal Strategy to fit MULTIACCUM # ramps in the presence of cosmic rays." # (JWST-STScI-0001490,SM-12; 07/25/08). The derivation # is a generalization for >1 cosmic rays, calculating # the slope and variance of the slope for each section # of the ramp (in between cosmic rays). The intervals are # determined from the input data quality arrays. # # Note: # In this module, comments on the 'first group','second group', etc are # 1-based, unless noted otherwise. import numpy as np import logging from . import constants # from . import gls_fit # used only if algorithm is "GLS" from . import ols_fit # used only if algorithm is "OLS" from . import ramp_fit_class log = logging.getLogger(__name__) log.setLevel(logging.DEBUG) BUFSIZE = 1024 * 300000 # 300Mb cache size for data section def create_ramp_fit_class(model, dqflags=None): """ Create an internal ramp fit class from a data model. Parameters ---------- model : data model input data model, assumed to be of type RampModel dqflags : dict The data quality flags needed for ramp fitting. Return ------ ramp_data : ramp_fit_class.RampData The internal ramp class. """ ramp_data = ramp_fit_class.RampData() ramp_data.set_arrays(model) ramp_data.set_meta(model) ramp_data.set_dqflags(dqflags) return ramp_data def ramp_fit(model, buffsize, save_opt, readnoise_2d, gain_2d, algorithm, weighting, max_cores, dqflags): """ Calculate the count rate for each pixel in all data cube sections and all integrations, equal to the slope for all sections (intervals between cosmic rays) of the pixel's ramp divided by the effective integration time. The weighting parameter must currently be set to 'optim', to use the optimal weighting (paper by Fixsen, ref. TBA) will be used in the fitting; this is currently the only supported weighting scheme. Parameters ---------- model : data model input data model, assumed to be of type RampModel buffsize : int size of data section (buffer) in bytes save_opt : bool calculate optional fitting results readnoise_2d : ndarray 2-D array readnoise for all pixels gain_2d : ndarray 2-D array gain for all pixels algorithm : str 'OLS' specifies that ordinary least squares should be used; 'GLS' specifies that generalized least squares should be used. weighting : str 'optimal' specifies that optimal weighting should be used; currently the only weighting supported. max_cores : str Number of cores to use for multiprocessing. If set to 'none' (the default), then no multiprocessing will be done. The other allowable values are 'quarter', 'half', and 'all'. This is the fraction of cores to use for multi-proc. The total number of cores includes the SMT cores (Hyper Threading for Intel). dqflags : dict A dictionary with at least the following keywords: DO_NOT_USE, SATURATED, JUMP_DET, NO_GAIN_VALUE, UNRELIABLE_SLOPE Returns ------- image_info : tuple The tuple of computed ramp fitting arrays. integ_info : tuple The tuple of computed integration fitting arrays. opt_info : tuple The tuple of computed optional results arrays for fitting. gls_opt_model : GLS_RampFitModel object or None (Unused for now) Object containing optional GLS-specific ramp fitting data for the exposure """ # Create an instance of the internal ramp class, using only values needed # for ramp fitting from the to remove further ramp fitting dependence on # data models. ramp_data = create_ramp_fit_class(model, dqflags) return ramp_fit_data( ramp_data, buffsize, save_opt, readnoise_2d, gain_2d, algorithm, weighting, max_cores, dqflags) def ramp_fit_data(ramp_data, buffsize, save_opt, readnoise_2d, gain_2d, algorithm, weighting, max_cores, dqflags): """ This function begins the ramp fit computation after the creation of the RampData class. It determines the proper path for computation to take depending on the choice of ramp fitting algorithms (which is only ordinary least squares right now) and the choice of single or muliprocessing. ramp_data : RampData Input data necessary for computing ramp fitting. buffsize : int size of data section (buffer) in bytes save_opt : bool calculate optional fitting results readnoise_2d : ndarray 2-D array readnoise for all pixels gain_2d : ndarray 2-D array gain for all pixels algorithm : str 'OLS' specifies that ordinary least squares should be used; 'GLS' specifies that generalized least squares should be used. weighting : str 'optimal' specifies that optimal weighting should be used; currently the only weighting supported. max_cores : str Number of cores to use for multiprocessing. If set to 'none' (the default), then no multiprocessing will be done. The other allowable values are 'quarter', 'half', and 'all'. This is the fraction of cores to use for multi-proc. The total number of cores includes the SMT cores (Hyper Threading for Intel). dqflags : dict A dictionary with at least the following keywords: DO_NOT_USE, SATURATED, JUMP_DET, NO_GAIN_VALUE, UNRELIABLE_SLOPE Returns ------- image_info : tuple The tuple of computed ramp fitting arrays. integ_info : tuple The tuple of computed integration fitting arrays. opt_info : tuple The tuple of computed optional results arrays for fitting. gls_opt_model : GLS_RampFitModel object or None (Unused for now) Object containing optional GLS-specific ramp fitting data for the exposure """ constants.update_dqflags(dqflags) if None in constants.dqflags.values(): raise ValueError("Some of the DQ flags required for ramp_fitting are None.") if algorithm.upper() == "GLS": # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # !!!!! Reference to ReadModel and GainModel changed to simple ndarrays !!!!! # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! # new_model, int_model, gls_opt_model = gls_fit.gls_ramp_fit( # model, buffsize, save_opt, readnoise_model, gain_model, max_cores) image_info, integ_info, gls_opt_model = None, None, None opt_info = None else: # Get readnoise array for calculation of variance of noiseless ramps, and # gain array in case optimal weighting is to be done nframes = ramp_data.nframes readnoise_2d *= gain_2d / np.sqrt(2. * nframes) # Compute ramp fitting using ordinary least squares. image_info, integ_info, opt_info = ols_fit.ols_ramp_fit_multi( ramp_data, buffsize, save_opt, readnoise_2d, gain_2d, weighting, max_cores) gls_opt_model = None return image_info, integ_info, opt_info, gls_opt_model <file_sep>/src/stcal/jump/twopoint_difference.py """ Two-Point Difference method for finding outliers in a 4-D ramp data array. The scheme used in this variation of the method uses numpy array methods to compute first-differences and find the max outlier in each pixel while still working in the full 4-D data array. This makes detection of the first outlier very fast. We then iterate pixel-by-pixel over only those pixels that are already known to contain an outlier, to look for any additional outliers and set the appropriate DQ mask for all outliers in the pixel. This is MUCH faster than doing all the work on a pixel-by-pixel basis. """ import logging import numpy as np log = logging.getLogger(__name__) log.setLevel(logging.DEBUG) HUGE_NUM = np.finfo(np.float32).max def find_crs(data, group_dq, read_noise, normal_rej_thresh, two_diff_rej_thresh, three_diff_rej_thresh, nframes, flag_4_neighbors, max_jump_to_flag_neighbors, min_jump_to_flag_neighbors, dqflags): """ Find CRs/Jumps in each integration within the input data array. The input data array is assumed to be in units of electrons, i.e. already multiplied by the gain. We also assume that the read noise is in units of electrons. We also assume that there are at least three groups in the integrations. This was checked by jump_step before this routine is called. Parameters ---------- data: float, 4D array (num_ints, num_groups, num_rows, num_cols) input ramp data groupdq : int, 4D array group DQ flags read_noise : float, 2D array The read noise of each pixel normal_rej_thresh : float cosmic ray sigma rejection threshold two_diff_rej_thresh : float cosmic ray sigma rejection threshold for ramps having 3 groups three_diff_rej_thresh : float cosmic ray sigma rejection threshold for ramps having 4 groups nframes : int The number of frames that are included in the group average flag_4_neighbors : bool if set to True (default is True), it will cause the four perpendicular neighbors of all detected jumps to also be flagged as a jump. max_jump_to_flag_neighbors : float value in units of sigma that sets the upper limit for flagging of neighbors. Any jump above this cutoff will not have its neighbors flagged. min_jump_to_flag_neighbors : float value in units of sigma that sets the lower limit for flagging of neighbors (marginal detections). Any primary jump below this value will not have its neighbors flagged. Returns ------- gdq : int, 4D array group DQ array with reset flags row_below_gdq : int, 3D array (num_ints, num_groups, num_cols) pixels below current row also to be flagged as a CR row_above_gdq : int, 3D array (num_ints, num_groups, num_cols) pixels above current row also to be flagged as a CR """ gdq = group_dq.copy() # Get data characteristics nints, ngroups, nrows, ncols = data.shape ndiffs = ngroups - 1 # Create arrays for output row_above_gdq = np.zeros((nints, ngroups, ncols), dtype=np.uint8) row_below_gdq = np.zeros((nints, ngroups, ncols), dtype=np.uint8) # Square the read noise values, for use later read_noise_2 = read_noise ** 2 # Set saturated values in the input data array to NaN, so they don't get # used in any of the subsequent calculations data[np.where(np.bitwise_and(gdq, dqflags["SATURATED"]))] = np.nan # Set pixels flagged as DO_NOT_USE in the input to NaN, so they don't get # used in any of the subsequent calculations. MIRI exposures can sometimes # have all pixels in the first and last groups flagged with DO_NOT_USE. data[np.where(np.bitwise_and(gdq, dqflags["DO_NOT_USE"]))] = np.nan # Loop over multiple integrations for integ in range(nints): log.info(f'Working on integration {integ + 1}:') # Compute first differences of adjacent groups up the ramp # note: roll the ngroups axis of data array to the end, to make # memory access to the values for a given pixel faster. # New form of the array has dimensions [nrows, ncols, ngroups]. first_diffs = np.diff(np.rollaxis(data[integ], axis=0, start=3), axis=2) positive_first_diffs = np.abs(first_diffs) # sat_groups is a 3D array that is true when the group is saturated sat_groups = np.isnan(positive_first_diffs) # number_sat_groups is a 2D array with the count of saturated groups # for each pixel number_sat_groups = sat_groups.sum(axis=2) # Make all the first diffs for saturated groups be equal to # 100,000 to put them above the good values in the sorted index first_diffs[np.isnan(first_diffs)] = 100000. # Here we sort the 3D array along the last axis, which is the group # axis. np.argsort returns a 3D array with the last axis containing # the indices that would yield the groups in order. sort_index = np.argsort(positive_first_diffs) # median_diffs is a 2D array with the clipped median of each pixel median_diffs = get_clipped_median_array(ndiffs, number_sat_groups, first_diffs, sort_index) # Compute uncertainties as the quadrature sum of the poisson noise # in the first difference signal and read noise. Because the first # differences can be biased by CRs/jumps, we use the median signal # for computing the poisson noise. Here we lower the read noise # by the square root of number of frames in the group. # Sigma is a 2D array. sigma = np.sqrt(np.abs(median_diffs) + read_noise_2 / nframes) # Reset sigma to exclude pixels with both readnoise and signal=0 sigma_0_pixels = np.where(sigma == 0.) if len(sigma_0_pixels[0] > 0): log.debug(f'Found {len(sigma_0_pixels[0])} pixels with sigma=0') log.debug('which will be reset so that no jump will be detected') sigma[sigma_0_pixels] = HUGE_NUM # Compute distance of each sample from the median in units of sigma; # note that the use of "abs" means we'll detect positive and negative # outliers. ratio is a 2D array with the units of sigma deviation of # the difference from the median. ratio = np.abs(first_diffs - median_diffs[:, :, np.newaxis]) /\ sigma[:, :, np.newaxis] ratio3d = np.reshape(ratio, (nrows, ncols, ndiffs)) # Get the group index for each pixel of the largest non-saturated # group, assuming the indices are sorted. 2 is subtracted from ngroups # because we are using differences and there is one less difference # than the number of groups. This is a 2-D array. max_value_index = ngroups - 2 - number_sat_groups # Extract from the sorted group indices the index of the largest # non-saturated group. row, col = np.where(number_sat_groups >= 0) max_index1d = sort_index[row, col, max_value_index[row, col]] # reshape to a 2-D array : max_index1 = np.reshape(max_index1d, (nrows, ncols)) max_ratio2d = np.reshape(ratio3d[row, col, max_index1[row, col]], (nrows, ncols)) max_index1d = sort_index[row, col, 1] max_index2d = np.reshape(max_index1d, (nrows, ncols)) last_ratio = np.reshape(ratio3d[row, col, max_index2d[row, col]], (nrows, ncols)) # Get the row and column indices of pixels whose largest non-saturated # ratio is above the threshold, First search all the pixels that have # at least four good groups, these will use the normal threshold row4cr, col4cr = np.where(np.logical_and(ndiffs - number_sat_groups >= 4, max_ratio2d > normal_rej_thresh)) # For pixels with only three good groups, use the three diff threshold row3cr, col3cr = np.where(np.logical_and(ndiffs - number_sat_groups == 3, max_ratio2d > three_diff_rej_thresh)) # Finally, for pixels with only two good groups, compare the SNR of the # last good group to the two diff threshold row2cr, col2cr = np.where(last_ratio > two_diff_rej_thresh) log.info(f'From highest outlier Two-point found {len(row4cr)} pixels \ with at least one CR and at least four groups') log.info(f'From highest outlier Two-point found {len(row3cr)} pixels \ with at least one CR and three groups') log.info(f'From highest outlier Two-point found {len(row2cr)} pixels \ with at least one CR and two groups') # get the rows,col pairs for all pixels with at least one CR all_crs_row = np.concatenate((row4cr, row3cr, row2cr)) all_crs_col = np.concatenate((col4cr, col3cr, col2cr)) # Loop over all pixels that we found the first CR in number_pixels_with_cr = len(all_crs_row) for j in range(number_pixels_with_cr): # Extract the first diffs for this pixel with at least one CR, # yielding a 1D array pix_masked_diffs = first_diffs[all_crs_row[j], all_crs_col[j]] # Get the scalar readnoise^2 and number of saturated groups for # this pixel. pix_rn2 = read_noise_2[all_crs_row[j], all_crs_col[j]] pix_sat_groups = number_sat_groups[all_crs_row[j], all_crs_col[j]] # Create a CR mask and set 1st CR to be found # cr_mask=0 designates a CR pix_cr_mask = np.ones(pix_masked_diffs.shape, dtype=bool) number_CRs_found = 1 pix_sorted_index = sort_index[all_crs_row[j], all_crs_col[j], :] # setting largest diff to be a CR pix_cr_mask[pix_sorted_index[ndiffs - pix_sat_groups - 1]] = 0 new_CR_found = True # Loop and see if there is more than one CR, setting the mask as # you go, stop when only 1 diffs is left. while new_CR_found and ((ndiffs - number_CRs_found - pix_sat_groups) > 1): new_CR_found = False largest_diff = ndiffs - number_CRs_found - pix_sat_groups # For this pixel get a new median difference excluding the # number of CRs found and the number of saturated groups pix_med_diff = get_clipped_median_vector( ndiffs, number_CRs_found + pix_sat_groups, pix_masked_diffs, pix_sorted_index) # Recalculate the noise and ratio for this pixel now that we # have rejected a CR pix_poisson_noise = np.sqrt(np.abs(pix_med_diff)) pix_sigma = np.sqrt(pix_poisson_noise * pix_poisson_noise + pix_rn2 / nframes) pix_ratio = np.abs(pix_masked_diffs - pix_med_diff) / pix_sigma rej_thresh = get_rej_thresh( largest_diff, two_diff_rej_thresh, three_diff_rej_thresh, normal_rej_thresh) # Check if largest remaining difference is above threshold if pix_ratio[pix_sorted_index[largest_diff - 1]] > rej_thresh: new_CR_found = True pix_cr_mask[pix_sorted_index[largest_diff - 1]] = 0 number_CRs_found += 1 # Found all CRs for this pixel. Set CR flags in input DQ array for # this pixel gdq[integ, 1:, all_crs_row[j], all_crs_col[j]] = \ np.bitwise_or(gdq[integ, 1:, all_crs_row[j], all_crs_col[j]], dqflags["JUMP_DET"] * np.invert(pix_cr_mask)) # Flag neighbors of pixels with detected jumps, if requested if flag_4_neighbors: cr_group, cr_row, cr_col = np.where(np.bitwise_and(gdq[integ], dqflags["JUMP_DET"])) for j in range(len(cr_group)): # Jumps must be in a certain range to have neighbors flagged if ratio[cr_row[j], cr_col[j], cr_group[j] - 1] < \ max_jump_to_flag_neighbors and \ ratio[cr_row[j], cr_col[j], cr_group[j] - 1] > \ min_jump_to_flag_neighbors: # This section saves flagged neighbors that are above or # below the current range of row. If this method # running in a single process, the row above and below are # not used. If it is running in multiprocessing mode, then # the rows above and below need to be returned to # find_jumps to use when it reconstructs the full group dq # array from the slices. if cr_row[j] != 0: gdq[integ, cr_group[j], cr_row[j] - 1, cr_col[j]] =\ np.bitwise_or(gdq[integ, cr_group[j], cr_row[j] - 1, cr_col[j]], dqflags["JUMP_DET"]) else: row_below_gdq[integ, cr_group[j], cr_col[j]] = \ dqflags["JUMP_DET"] if cr_row[j] != nrows - 1: gdq[integ, cr_group[j], cr_row[j] + 1, cr_col[j]] = \ np.bitwise_or(gdq[integ, cr_group[j], cr_row[j] + 1, cr_col[j]], dqflags["JUMP_DET"]) else: row_above_gdq[integ, cr_group[j], cr_col[j]] = \ dqflags["JUMP_DET"] # Here we are just checking that we don't flag neighbors of # jumps that are off the detector. if cr_col[j] != 0: gdq[integ, cr_group[j], cr_row[j], cr_col[j] - 1] =\ np.bitwise_or(gdq[integ, cr_group[j], cr_row[j], cr_col[j] - 1], dqflags["JUMP_DET"]) if cr_col[j] != ncols - 1: gdq[integ, cr_group[j], cr_row[j], cr_col[j] + 1] =\ np.bitwise_or(gdq[integ, cr_group[j], cr_row[j], cr_col[j] + 1], dqflags["JUMP_DET"]) # All done return gdq, row_below_gdq, row_above_gdq def get_rej_thresh(num_usable_diffs, two_group_thresh, three_group_thresh, normal_thresh): """ Return the rejection threshold depending on how many useable diffs there are left in the pixel. Parameters ---------- num_usable_diffs : int number of differences in pixel two_group_thresh : float cosmic ray sigma rejection threshold for ramps having 3 groups three_group_thresh : float cosmic ray sigma rejection threshold for ramps having 4 groups normal_thresh : float cosmic ray sigma rejection threshold Returns ------- thresh: float rejection threshold """ if num_usable_diffs == 2: return two_group_thresh elif num_usable_diffs == 3: return three_group_thresh else: return normal_thresh def get_clipped_median_array(num_diffs, diffs_to_ignore, input_array, sorted_index): """ This routine will return the clipped median for input_array which is a three dimensional array of first differences. It will ignore the largest differences (diffs_to_ignore) for each pixel and compute the median of the remaining differences. This is only called once for the entire array. Parameters ---------- num_diffs : int number of first difference, equal to the number of groups-1 diffs_to_ignore : int, 2D array number of saturated groups per pixerl input_array : int, 3D array first differences of adjacent groups sorted_index : int, 3D array first differences, sorted along the groups axis Returns ------- pix_med_diff : int, 2D array clipped median for the array of first differences """ pix_med_diff = np.zeros_like(diffs_to_ignore) pix_med_index = np.zeros_like(diffs_to_ignore) # Process pixels with four or more good differences row4, col4 = np.where(num_diffs - diffs_to_ignore >= 4) # ignore largest value and number of CRs found when finding new median # Check to see if this is a 2-D array or 1-D # Get the index of the median value always excluding the highest value # In addition, decrease the index by 1 for every two diffs_to_ignore, # these will be saturated values in this case # row, col = np.indices(diffs_to_ignore.shape) pix_med_index[row4, col4] = \ sorted_index[row4, col4, (num_diffs - (diffs_to_ignore[row4, col4] + 1)) // 2] pix_med_diff[row4, col4] = input_array[row4, col4, pix_med_index[row4, col4]] # For pixels with an even number of differences the median is the mean of # the two central values. So we need to get the value the other central # difference one lower in the sorted index that the one found above. even_group_rows, even_group_cols = \ np.where(np.logical_and(num_diffs - diffs_to_ignore - 1 % 2 == 0, num_diffs - diffs_to_ignore >= 4)) pix_med_index2 = np.zeros_like(pix_med_index) pix_med_index2[even_group_rows, even_group_cols] = \ sorted_index[even_group_rows, even_group_cols, (num_diffs - (diffs_to_ignore[even_group_rows, even_group_cols] + 3)) // 2] # Average together the two central values pix_med_diff[even_group_rows, even_group_cols] = \ (pix_med_diff[even_group_rows, even_group_cols] + input_array[even_group_rows, even_group_cols, pix_med_index2[even_group_rows, even_group_cols]]) / 2.0 # Process pixels with three good differences row3, col3 = np.where(num_diffs - diffs_to_ignore == 3) # ignore largest value and number of CRs found when finding new median # Check to see if this is a 2-D array or 1-D # Get the index of the median value always excluding the highest value # In addition, decrease the index by 1 for every two diffs_to_ignore, # these will be saturated values in this case # row, col = np.indices(diffs_to_ignore.shape) if len(row3) > 0: pix_med_index[row3, col3] = \ sorted_index[row3, col3, (num_diffs - (diffs_to_ignore[row3, col3])) // 2] pix_med_diff[row3, col3] = \ input_array[row3, col3, pix_med_index[row3, col3]] # Process pixels with two good differences row2, col2 = np.where(num_diffs - diffs_to_ignore == 2) if len(row2) > 0: pix_med_index[row2, col2] = sorted_index[row2, col2, 0] pix_med_diff[row2, col2] = input_array[row2, col2, pix_med_index[row2, col2]] return pix_med_diff def get_clipped_median_vector(num_diffs, diffs_to_ignore, input_vector, sorted_index): """ This routine will return the clipped median for the first differences of the input pixel (input_vector). It will ignore the input number of largest differences (diffs_to_ignore). As cosmic rays are found, the diffs_to_ignore will increase. Parameters ---------- num_diffs : int number of first difference, equal to the number of groups-1 diffs_to_ignore : int, 2D array number of saturated groups per pixerl input_array : int, 1D array first differences of adjacent groups for a pixel sorted_index : int, 3D array first differences, sorted along the groups axis Returns ------- pix_med_diff : int, vector clipped median for the vector of first differences """ if num_diffs - diffs_to_ignore == 2: # For the two diff case we just return the smallest value instead of # the median. return np.min(input_vector[sorted_index[0:1]]) elif num_diffs - diffs_to_ignore == 3: # For the three diff case we do not reject the largest diff when the # median is calculated. skip_max_diff = 0 else: # For the four or more diff case we will skip the largest diff. skip_max_diff = 1 # Find the median difference pix_med_index = \ sorted_index[int(((num_diffs - skip_max_diff - diffs_to_ignore) / 2))] pix_med_diff = input_vector[pix_med_index] # If there is an even number of differences, then average the two values # in the middle. if (num_diffs - diffs_to_ignore - skip_max_diff) % 2 == 0: # even number pix_med_index2 = \ sorted_index[int((num_diffs - skip_max_diff - diffs_to_ignore) / 2) - 1] pix_med_diff = (pix_med_diff + input_vector[pix_med_index2]) / 2.0 return pix_med_diff <file_sep>/src/stcal/ramp_fitting/ols_fit.py #! /usr/bin/env python import logging from multiprocessing.pool import Pool as Pool import numpy as np import time import warnings from . import constants from . import ramp_fit_class from . import utils log = logging.getLogger(__name__) log.setLevel(logging.DEBUG) BUFSIZE = 1024 * 300000 # 300Mb cache size for data section log = logging.getLogger(__name__) log.setLevel(logging.DEBUG) def ols_ramp_fit_multi( ramp_data, buffsize, save_opt, readnoise_2d, gain_2d, weighting, max_cores): """ Setup the inputs to ols_ramp_fit with and without multiprocessing. The inputs will be sliced into the number of cores that are being used for multiprocessing. Because the data models cannot be pickled, only numpy arrays are passed and returned as parameters to ols_ramp_fit. Parameters ---------- ramp_data : RampData Input data necessary for computing ramp fitting. buffsize : int size of data section (buffer) in bytes (not used) save_opt : bool calculate optional fitting results readnoise_2d : ndarray readnoise for all pixels gain_2d : ndarray gain for all pixels algorithm : str 'OLS' specifies that ordinary least squares should be used; 'GLS' specifies that generalized least squares should be used. weighting : str 'optimal' specifies that optimal weighting should be used; currently the only weighting supported. max_cores : str Number of cores to use for multiprocessing. If set to 'none' (the default), then no multiprocessing will be done. The other allowable values are 'quarter', 'half', and 'all'. This is the fraction of cores to use for multi-proc. The total number of cores includes the SMT cores (Hyper Threading for Intel). Returns ------- image_info : tuple The tuple of computed ramp fitting arrays. integ_info : tuple The tuple of computed integration fitting arrays. opt_info : tuple The tuple of computed optional results arrays for fitting. gls_opt_model : GLS_RampFitModel object or None Object containing optional GLS-specific ramp fitting data for the exposure """ # Determine number of slices to use for multi-processor computations number_slices = utils.compute_slices(max_cores) # Copy the int_times table for TSO data int_times = ramp_data.int_times # For MIRI datasets having >1 group, if all pixels in the final group are # flagged as DO_NOT_USE, resize the input model arrays to exclude the # final group. Similarly, if leading groups 1 though N have all pixels # flagged as DO_NOT_USE, those groups will be ignored by ramp fitting, and # the input model arrays will be resized appropriately. If all pixels in # all groups are flagged, return None for the models. if ramp_data.instrument_name == 'MIRI' and ramp_data.data.shape[1] > 1: miri_ans = discard_miri_groups(ramp_data) # The function returns False if the removed groups leaves no data to be # processed. If this is the case, return None for all expected variables # returned by ramp_fit if miri_ans is not True: return [None] * 3 # Call ramp fitting for the single processor (1 data slice) case if number_slices == 1: # Single threaded computation image_info, integ_info, opt_info = ols_ramp_fit_single( ramp_data, int_times, buffsize, save_opt, readnoise_2d, gain_2d, weighting) if image_info is None: return None, None, None return image_info, integ_info, opt_info # Call ramp fitting for multi-processor (multiple data slices) case else: image_info, integ_info, opt_info = ols_ramp_fit_multiprocessing( ramp_data, int_times, buffsize, save_opt, readnoise_2d, gain_2d, weighting, number_slices) return image_info, integ_info, opt_info def ols_ramp_fit_multiprocessing( ramp_data, int_times, buffsize, save_opt, readnoise_2d, gain_2d, weighting, number_slices): """ Fit a ramp using ordinary least squares. Calculate the count rate for each pixel in all data cube sections and all integrations, equal to the weighted slope for all sections (intervals between cosmic rays) of the pixel's ramp divided by the effective integration time. The data is spread across the desired number of processors (>1). Parameters ---------- ramp_data: RampData Input data necessary for computing ramp fitting. int_times : None Not used buffsize : int The working buffer size save_opt : bool Whether to return the optional output model readnoise_2d : ndarray The read noise of each pixel gain_2d : ndarray The gain of each pixel weighting : str 'optimal' is the only valid value number_slices: int The number of slices to partition the data into for multiprocessing. Return ------ image_info: tuple The tuple of computed ramp fitting arrays. integ_info: tuple The tuple of computed integration fitting arrays. opt_info: tuple The tuple of computed optional results arrays for fitting. """ log.info(f"Number of processors used for multiprocessing: {number_slices}") slices, rows_per_slice = compute_slices_for_starmap( ramp_data, int_times, buffsize, save_opt, readnoise_2d, gain_2d, weighting, number_slices) pool = Pool(processes=number_slices) pool_results = pool.starmap(ols_ramp_fit_single, slices) pool.close() pool.join() # Reassemble results image_info, integ_info, opt_info = assemble_pool_results( ramp_data, save_opt, pool_results, rows_per_slice) return image_info, integ_info, opt_info def assemble_pool_results(ramp_data, save_opt, pool_results, rows_per_slice): """ Takes the list of results from the starmap pool method and assembles the slices into primary tuples to be returned by `ramp_fit`. Parameters ---------- ramp_data: RampData The data needed for ramp fitting. save_opt: bool The option to save the optional results. pool_results: list The list of return values from ols_ramp_fit_single for each slice. Each slice is run through ols_ramp_fit_single, which returns three tuples of ndarrays, so pool_results is a list of tuples. Each tuple contains: image_info, integ_info, opt_info rows_per_slice: list The number of rows in each slice. Return ------ image_info: tuple The tuple of computed ramp fitting arrays. integ_info: tuple The tuple of computed integration fitting arrays. opt_info: tuple The tuple of computed optional results arrays for fitting. """ # Create output arrays for each output tuple. The input ramp data and # slices are needed for this. image_info, integ_info, opt_info = create_output_info( ramp_data, pool_results, save_opt) # Loop over the slices and assemble each slice into the main return arrays. current_row_start = 0 for k, result in enumerate(pool_results): image_slice, integ_slice, opt_slice = result nrows = rows_per_slice[k] get_image_slice(image_info, image_slice, current_row_start, nrows) get_integ_slice(integ_info, integ_slice, current_row_start, nrows) if save_opt: get_opt_slice(opt_info, opt_slice, current_row_start, nrows) current_row_start = current_row_start + nrows # Handle integration times return image_info, integ_info, opt_info def get_image_slice(image_info, image_slice, row_start, nrows): """ Populates the image output information from each slice. image_info: tuple The output image information to populate from the slice. image_slice: tuple The output slice used to populate the output arrays. row_start: int The start row the current slice at which starts. nrows: int The number of rows int the current slice. """ data, dq, var_poisson, var_rnoise, err = image_info sdata, sdq, svar_poisson, svar_rnoise, serr = image_slice srow, erow = row_start, row_start + nrows data[srow:erow, :] = sdata dq[srow:erow, :] = sdq var_poisson[srow:erow, :] = svar_poisson var_rnoise[srow:erow, :] = svar_rnoise err[srow:erow, :] = serr def get_integ_slice(integ_info, integ_slice, row_start, nrows): """ Populates the integration output information from each slice. integ_info: tuple The output integration information to populate from the slice. integ_slice: tuple The output slice used to populate the output arrays. row_start: int The start row the current slice at which starts. nrows: int The number of rows int the current slice. """ data, dq, var_poisson, var_rnoise, int_times, err = integ_info idata, idq, ivar_poisson, ivar_rnoise, iint_times, ierr = integ_slice srow, erow = row_start, row_start + nrows data[:, srow:erow, :] = idata dq[:, srow:erow, :] = idq var_poisson[:, srow:erow, :] = ivar_poisson var_rnoise[:, srow:erow, :] = ivar_rnoise err[:, srow:erow, :] = ierr def get_opt_slice(opt_info, opt_slice, row_start, nrows): """ Populates the optional output information from each slice. opt_info: tuple The output optional information to populate from the slice. opt_slice: tuple The output slice used to populate the output arrays. row_start: int The start row the current slice at which starts. nrows: int The number of rows int the current slice. """ (slope, sigslope, var_poisson, var_rnoise, yint, sigyint, pedestal, weights, crmag) = opt_info (oslope, osigslope, ovar_poisson, ovar_rnoise, oyint, osigyint, opedestal, oweights, ocrmag) = opt_slice srow, erow = row_start, row_start + nrows # The optional results product is of variable size in its second dimension. # The number of segments/cosmic rays determine the final products size. # Because each slice is computed indpendently, the number of segments may # differ from segment to segment. The final output product is created # using the max size for this dimension. To ensure correct assignment is # done during this step, the second dimension, as well as the row # dimension, must be specified. slope[:, :oslope.shape[1], srow:erow, :] = oslope sigslope[:, :osigslope.shape[1], srow:erow, :] = osigslope var_poisson[:, :ovar_poisson.shape[1], srow:erow, :] = ovar_poisson var_rnoise[:, :ovar_rnoise.shape[1], srow:erow, :] = ovar_rnoise yint[:, :oyint.shape[1], srow:erow, :] = oyint sigyint[:, :osigyint.shape[1], srow:erow, :] = osigyint weights[:, :oweights.shape[1], srow:erow, :] = oweights crmag[:, :ocrmag.shape[1], srow:erow, :] = ocrmag pedestal[:, srow:erow, :] = opedestal # Different shape (3-D, not 4-D) def create_output_info(ramp_data, pool_results, save_opt): """ Creates the output arrays and tuples for ramp fitting reassembly for mulitprocessing. Parameters ---------- ramp_data: RampData The original ramp fitting data. pool_results: list The list of results for each slice from multiprocessing. save_opt: bool The option to save optional results. """ tot_ints, tot_ngroups, tot_rows, tot_cols = ramp_data.data.shape imshape = (tot_rows, tot_cols) integ_shape = (tot_ints, tot_rows, tot_cols) # Create the primary product data = np.zeros(imshape, dtype=np.float32) dq = np.zeros(imshape, dtype=np.uint32) var_poisson = np.zeros(imshape, dtype=np.float32) var_rnoise = np.zeros(imshape, dtype=np.float32) err = np.zeros(imshape, dtype=np.float32) image_info = (data, dq, var_poisson, var_rnoise, err) # Create the integration products idata = np.zeros(integ_shape, dtype=np.float32) idq = np.zeros(integ_shape, dtype=np.uint32) ivar_poisson = np.zeros(integ_shape, dtype=np.float32) ivar_rnoise = np.zeros(integ_shape, dtype=np.float32) ierr = np.zeros(integ_shape, dtype=np.float32) int_times = ramp_data.int_times integ_info = (idata, idq, ivar_poisson, ivar_rnoise, int_times, ierr) # Create the optional results product if save_opt: max_segs, max_crs = get_max_segs_crs(pool_results) opt_shape = (tot_ints, max_segs, tot_rows, tot_cols) crmag_shape = (tot_ints, max_crs, tot_rows, tot_cols) oslope = np.zeros(opt_shape, dtype=np.float32) osigslope = np.zeros(opt_shape, dtype=np.float32) ovar_poisson = np.zeros(opt_shape, dtype=np.float32) ovar_rnoise = np.zeros(opt_shape, dtype=np.float32) oyint = np.zeros(opt_shape, dtype=np.float32) osigyint = np.zeros(opt_shape, dtype=np.float32) oweights = np.zeros(opt_shape, dtype=np.float32) # Different shape opedestal = np.zeros(integ_shape, dtype=np.float32) ocrmag = np.zeros(crmag_shape, dtype=np.float32) opt_info = (oslope, osigslope, ovar_poisson, ovar_rnoise, oyint, osigyint, opedestal, oweights, ocrmag) else: opt_info = None return image_info, integ_info, opt_info def get_max_segs_crs(pool_results): """ Computes the max number of segments computed needed for the second dimension of the optional results output. Parameter --------- pool_results: list The list of results for each slice from multiprocessing. Return ------ seg_max: int The maximum segment computed over all slices. """ seg_max = 1 crs_max = 0 for result in pool_results: image_slice, integ_slice, opt_slice = result oslice_slope = opt_slice[0] nsegs = oslice_slope.shape[1] if nsegs > seg_max: seg_max = nsegs olice_crmag = opt_slice[-1] ncrs = olice_crmag.shape[1] if ncrs > crs_max: crs_max = ncrs return seg_max, crs_max def compute_slices_for_starmap( ramp_data, int_times, buffsize, save_opt, readnoise_2d, gain_2d, weighting, number_slices): """ Creates the slices needed for each process for multiprocessing. The slices for the arguments needed for ols_ramp_fit_single. ramp_data: RampData The ramp data to be sliced. int_times : None Not used buffsize : int The working buffer size save_opt : bool Whether to return the optional output model readnoise_2d : ndarray The read noise of each pixel gain_2d : ndarray The gain of each pixel weighting : str 'optimal' is the only valid value number_slices: int The number of slices to partition the data into for multiprocessing. Return ------ slices: list The list of arguments for each processor for multiprocessing. """ nrows = ramp_data.data.shape[2] rslices = rows_per_slice(number_slices, nrows) slices = [] start_row = 0 for k in range(len(rslices)): ramp_slice = slice_ramp_data(ramp_data, start_row, rslices[k]) rnoise_slice = readnoise_2d[start_row:start_row + rslices[k], :].copy() gain_slice = gain_2d[start_row:start_row + rslices[k], :].copy() slices.insert( k, (ramp_slice, int_times, buffsize, save_opt, rnoise_slice, gain_slice, weighting)) start_row = start_row + rslices[k] return slices, rslices def rows_per_slice(nslices, nrows): """ Compute the number of rows per slice. Parameters ---------- nslices: int The number of slices to partition the rows. nrows: int The number of rows to partition. Return ______ rslices: list The number of rows for each slice. """ quotient = nrows // nslices remainder = nrows % nslices no_inc = nslices - remainder if remainder > 0: # Ensure the number of rows per slice is no more than a # difference of one. first = [quotient + 1] * remainder second = [quotient] * no_inc rslices = first + second else: rslices = [quotient] * nslices return rslices def slice_ramp_data(ramp_data, start_row, nrows): """ Slices the ramp data by rows, where the arrays contain all rows in [start_row, start_row+nrows). Parameters ---------- ramp_data: RampData The ramp data to slice. start_rows: int The start row of the slice. nrows: int The number of rows in the slice. Return ------ ramp_data_slice: RampData The slice of the ramp_data. """ ramp_data_slice = ramp_fit_class.RampData() # Slice data by row data = ramp_data.data[:, :, start_row:start_row + nrows, :].copy() err = ramp_data.err[:, :, start_row:start_row + nrows, :].copy() groupdq = ramp_data.groupdq[:, :, start_row:start_row + nrows, :].copy() pixeldq = ramp_data.pixeldq[start_row:start_row + nrows, :].copy() ramp_data_slice.set_arrays( data, err, groupdq, pixeldq, ramp_data.int_times) # Carry over meta data. ramp_data_slice.set_meta( name=ramp_data.instrument_name, frame_time=ramp_data.frame_time, group_time=ramp_data.group_time, groupgap=ramp_data.groupgap, nframes=ramp_data.nframes, drop_frames1=ramp_data.drop_frames1) # Carry over DQ flags. ramp_data_slice.flags_do_not_use = ramp_data.flags_do_not_use ramp_data_slice.flags_jump_det = ramp_data.flags_jump_det ramp_data_slice.flags_saturated = ramp_data.flags_saturated ramp_data_slice.flags_no_gain_val = ramp_data.flags_no_gain_val ramp_data_slice.flags_unreliable_slope = ramp_data.flags_unreliable_slope return ramp_data_slice def ols_ramp_fit_single( ramp_data, int_times, buffsize, save_opt, readnoise_2d, gain_2d, weighting): """ Fit a ramp using ordinary least squares. Calculate the count rate for each pixel in all data cube sections and all integrations, equal to the weighted slope for all sections (intervals between cosmic rays) of the pixel's ramp divided by the effective integration time. Parameters ---------- ramp_data : RampData Input data necessary for computing ramp fitting. int_times : None Not used buffsize : int The working buffer size save_opt : bool Whether to return the optional output model readnoise_2d : ndarray The read noise of each pixel gain_2d : ndarray The gain of each pixel weighting : str 'optimal' is the only valid value Return ------ image_info : tuple The tuple of computed ramp fitting arrays. integ_info : tuple The tuple of computed integration fitting arrays. opt_info : tuple The tuple of computed optional results arrays for fitting. """ # For multiprocessing, a new process requires the DQ flags to be updated, # since they are global variables. constants.update_dqflags_from_ramp_data(ramp_data) if None in constants.dqflags.values(): raise ValueError("Some of the DQ flags required for ramp_fitting are None.") tstart = time.time() # Save original shapes for writing to log file, as these may change for MIRI n_int, ngroups, nrows, ncols = ramp_data.data.shape orig_ngroups = ngroups orig_cubeshape = (ngroups, nrows, ncols) if ngroups == 1: log.warning('Dataset has NGROUPS=1, so count rates for each integration ') log.warning('will be calculated as the value of that 1 group divided by ') log.warning('the group exposure time.') # In this 'First Pass' over the data, loop over integrations and data # sections to calculate the estimated median slopes, which will be used # to calculate the variances. This is the same method to estimate slopes # as is done in the jump detection step, except here CR-affected and # saturated groups have already been flagged. The actual, fit, slopes for # each segment are also calculated here. fit_slopes_ans = ramp_fit_slopes( ramp_data, gain_2d, readnoise_2d, save_opt, weighting) if fit_slopes_ans[0] == "saturated": return fit_slopes_ans[1:] # In this 'Second Pass' over the data, loop over integrations and data # sections to calculate the variances of the slope using the estimated # median slopes from the 'First Pass'. These variances are due to Poisson # noise only, read noise only, and the combination of Poisson noise and # read noise. The integration-specific variances are 3D arrays, and the # segment-specific variances are 4D arrays. variances_ans = ramp_fit_compute_variances( ramp_data, gain_2d, readnoise_2d, fit_slopes_ans) # Now that the segment-specific and integration-specific variances have # been calculated, the segment-specific, integration-specific, and # overall slopes will be calculated. The integration-specific slope is # calculated as a weighted average of the segments in the integration: # slope_int = sum_over_segs(slope_seg/var_seg)/ sum_over_segs(1/var_seg) # The overall slope is calculated as a weighted average of the segments in # all integrations: # slope = sum_over_integs_and_segs(slope_seg/var_seg)/ # sum_over_integs_and_segs(1/var_seg) image_info, integ_info, opt_info = ramp_fit_overall( ramp_data, orig_cubeshape, orig_ngroups, buffsize, fit_slopes_ans, variances_ans, save_opt, int_times, tstart) return image_info, integ_info, opt_info def discard_miri_groups(ramp_data): """ For MIRI datasets having >1 group, if all pixels in the final group are flagged as DO_NOT_USE, resize the input model arrays to exclude the final group. Similarly, if leading groups 1 though N have all pixels flagged as DO_NOT_USE, those groups will be ignored by ramp fitting, and the input model arrays will be resized appropriately. If all pixels in all groups are flagged, return None for the models. Parameters ---------- ramp_data : RampData Input data necessary for computing ramp fitting. Returns ------- bool : False if no data to process after discarding unusable data. True if useable data available for further processing. """ data = ramp_data.data err = ramp_data.err groupdq = ramp_data.groupdq jump_flag = constants.dqflags["JUMP_DET"] n_int, ngroups, nrows, ncols = data.shape num_bad_slices = 0 # number of initial groups that are all DO_NOT_USE while np.all(np.bitwise_and(groupdq[:, 0, :, :], constants.dqflags["DO_NOT_USE"])): num_bad_slices += 1 ngroups -= 1 # Check if there are remaining groups before accessing data if ngroups < 1: # no usable data log.error('1. All groups have all pixels flagged as DO_NOT_USE,') log.error(' so will not process this dataset.') return False groupdq = groupdq[:, 1:, :, :] # Where the initial group of the just-truncated data is a cosmic ray, # remove the JUMP_DET flag from the group dq for those pixels so # that those groups will be included in the fit. wh_cr = np.where(np.bitwise_and(groupdq[:, 0, :, :], jump_flag)) num_cr_1st = len(wh_cr[0]) for ii in range(num_cr_1st): groupdq[wh_cr[0][ii], 0, wh_cr[1][ii], wh_cr[2][ii]] -= jump_flag if num_bad_slices > 0: data = data[:, num_bad_slices:, :, :] err = err[:, num_bad_slices:, :, :] log.info('Number of leading groups that are flagged as DO_NOT_USE: %s', num_bad_slices) # If all groups were flagged, the final group would have been picked up # in the while loop above, ngroups would have been set to 0, and Nones # would have been returned. If execution has gotten here, there must # be at least 1 remaining group that is not all flagged. if np.all(np.bitwise_and(groupdq[:, -1, :, :], constants.dqflags["DO_NOT_USE"])): ngroups -= 1 # Check if there are remaining groups before accessing data if ngroups < 1: # no usable data log.error('2. All groups have all pixels flagged as DO_NOT_USE,') log.error(' so will not process this dataset.') return False data = data[:, :-1, :, :] err = err[:, :-1, :, :] groupdq = groupdq[:, :-1, :, :] log.info('MIRI dataset has all pixels in the final group flagged as DO_NOT_USE.') # Next block is to satisfy github issue 1681: # "MIRI FirstFrame and LastFrame minimum number of groups" if ngroups < 2: log.warning('MIRI datasets require at least 2 groups/integration') log.warning('(NGROUPS), so will not process this dataset.') return False ramp_data.data = data ramp_data.err = err ramp_data.groupdq = groupdq return True def ramp_fit_slopes(ramp_data, gain_2d, readnoise_2d, save_opt, weighting): """ Calculate effective integration time (once EFFINTIM has been populated accessible, will use that instead), and other keywords that will needed if the pedestal calculation is requested. Note 'nframes' is the number of given by the NFRAMES keyword, and is the number of frames averaged on-board for a group, i.e., it does not include the groupgap. Parameters ---------- ramp_data : RampData Input data necessary for computing ramp fitting. gain_2d : ndarrays gain for all pixels readnoise_2d : ndarrays readnoise for all pixels save_opt : bool calculate optional fitting results weighting : str 'optimal' specifies that optimal weighting should be used; currently the only weighting supported. Return ------ max_seg : int Maximum possible number of segments over all groups and segments gdq_cube_shape : ndarray Group DQ dimensions effintim : float effective integration time for a single group f_max_seg : int Actual maximum number of segments over all groups and segments dq_int : ndarray The pixel dq for each integration for each pixel sum_weight : ndarray The sum of the weights for each pixel num_seg_per_int : ndarray Cube of numbers of segments for all integrations and pixels, 3-D int sat_0th_group_int : ndarray Integration-specific slice whose value for a pixel is 1 if the initial group of the ramp is saturated, 3-D uint8 opt_res : OptRes Object to hold optional results for all good pixels. pixeldq : ndarray The input 2-D pixel DQ flags inv_var : ndarray values of 1/variance for good pixels, 1-D float med_rates : ndarray Rate array """ sat_flag = constants.dqflags["SATURATED"] jump_flag = constants.dqflags["JUMP_DET"] # Get image data information data = ramp_data.data err = ramp_data.err groupdq = ramp_data.groupdq inpixeldq = ramp_data.pixeldq # Get instrument and exposure data frame_time = ramp_data.frame_time group_time = ramp_data.group_time groupgap = ramp_data.groupgap nframes = ramp_data.nframes # Get needed sizes and shapes n_int, ngroups, nrows, ncols = data.shape imshape = (nrows, ncols) cubeshape = (ngroups,) + imshape # If all the pixels have their initial groups flagged as saturated, the DQ # in the primary and integration-specific output products are updated, # the other arrays in all output products are populated with zeros, and # the output products are returned to ramp_fit(). If the initial group of # a ramp is saturated, it is assumed that all groups are saturated. first_gdq = groupdq[:, 0, :, :] if np.all(np.bitwise_and(first_gdq, sat_flag)): image_info, integ_info, opt_info = utils.do_all_sat( inpixeldq, groupdq, imshape, n_int, save_opt) return "saturated", image_info, integ_info, opt_info # Calculate effective integration time (once EFFINTIM has been populated # and accessible, will use that instead), and other keywords that will # needed if the pedestal calculation is requested. Note 'nframes' # is the number of given by the NFRAMES keyword, and is the number of # frames averaged on-board for a group, i.e., it does not include the # groupgap. effintim = (nframes + groupgap) * frame_time # Get GROUP DQ and ERR arrays from input file gdq_cube = groupdq gdq_cube_shape = gdq_cube.shape # Get max number of segments fit in all integrations max_seg, num_CRs = calc_num_seg(gdq_cube, n_int) del gdq_cube f_max_seg = 0 # final number to use, usually overwritten by actual value dq_int, median_diffs_2d, num_seg_per_int, sat_0th_group_int =\ utils.alloc_arrays_1(n_int, imshape) opt_res = utils.OptRes(n_int, imshape, max_seg, ngroups, save_opt) # Get Pixel DQ array from input file. The incoming RampModel has uint32 # PIXELDQ, but ramp fitting will update this array here by flagging # the 2D PIXELDQ locations where the ramp data has been previously # flagged as jump-detected or saturated. These additional bit values # require this local variable to be uint16, and it will be used as the # (uint16) PIXELDQ in the outgoing ImageModel. pixeldq = inpixeldq.copy() pixeldq = utils.reset_bad_gain(pixeldq, gain_2d) # Flag bad pixels in gain # In this 'First Pass' over the data, loop over integrations and data # sections to calculate the estimated median slopes, which will be used # to calculate the variances. This is the same method to estimate slopes # as is done in the jump detection step, except here CR-affected and # saturated groups have already been flagged. The actual, fit, slopes for # each segment are also calculated here. # Loop over data integrations: for num_int in range(0, n_int): # Loop over data sections for rlo in range(0, cubeshape[1], nrows): rhi = rlo + nrows if rhi > cubeshape[1]: rhi = cubeshape[1] # Skip data section if it is all NaNs # data_sect = np.float32(data[num_int, :, :, :]) data_sect = data[num_int, :, :, :] if np.all(np.isnan(data_sect)): log.error('Current data section is all nans, so not processing the section.') continue # first frame section for 1st group of current integration ff_sect = data[num_int, 0, rlo:rhi, :] # Get appropriate sections gdq_sect = groupdq[num_int, :, :, :] rn_sect = readnoise_2d[rlo:rhi, :] gain_sect = gain_2d[rlo:rhi, :] # Reset all saturated groups in the input data array to NaN where_sat = np.where(np.bitwise_and(gdq_sect, sat_flag)) data_sect[where_sat] = np.NaN del where_sat # Compute the first differences of all groups first_diffs_sect = np.diff(data_sect, axis=0) # If the dataset has only 1 group/integ, assume the 'previous group' # is all zeros, so just use data as the difference if first_diffs_sect.shape[0] == 0: first_diffs_sect = data_sect.copy() else: # Similarly, for datasets having >1 group/integ and having # single-group segments, just use the data as the difference wh_nan = np.where(np.isnan(first_diffs_sect[0, :, :])) if len(wh_nan[0]) > 0: first_diffs_sect[0, :, :][wh_nan] = data_sect[0, :, :][wh_nan] del wh_nan # Mask all the first differences that are affected by a CR, # starting at group 1. The purpose of starting at index 1 is # to shift all the indices down by 1, so they line up with the # indices in first_diffs. i_group, i_yy, i_xx, = np.where(np.bitwise_and(gdq_sect[1:, :, :], jump_flag)) first_diffs_sect[i_group, i_yy, i_xx] = np.NaN del i_group, i_yy, i_xx # Check for pixels in which there is good data in 0th group, but # all first_diffs for this ramp are NaN because there are too # few good groups past the 0th. Due to the shortage of good # data, the first_diffs will be set here equal to the data in # the 0th group. wh_min = np.where(np.logical_and( np.isnan(first_diffs_sect).all(axis=0), np.isfinite(data_sect[0, :, :]))) if len(wh_min[0] > 0): first_diffs_sect[0, :, :][wh_min] = data_sect[0, :, :][wh_min] del wh_min # All first differences affected by saturation and CRs have been set # to NaN, so compute the median of all non-NaN first differences. with warnings.catch_warnings(): warnings.filterwarnings("ignore", "All-NaN.*", RuntimeWarning) nan_med = np.nanmedian(first_diffs_sect, axis=0) nan_med[np.isnan(nan_med)] = 0. # if all first_diffs_sect are nans median_diffs_2d[rlo:rhi, :] += nan_med # Calculate the slope of each segment # note that the name "opt_res", which stands for "optional results", # is deceiving; this in fact contains all the per-integration and # per-segment results that will eventually be used to compute the # final slopes, sigmas, etc. for the main (non-optional) products t_dq_cube, inv_var, opt_res, f_max_seg, num_seg = \ calc_slope(data_sect, gdq_sect, frame_time, opt_res, save_opt, rn_sect, gain_sect, max_seg, ngroups, weighting, f_max_seg) del gain_sect # Populate 3D num_seg { integ, y, x } with 2D num_seg for this data # section (y,x) and integration (num_int) sect_shape = data_sect.shape[-2:] num_seg_per_int[num_int, rlo:rhi, :] = num_seg.reshape(sect_shape) # Populate integ-spec slice which is set if 0th group has SAT wh_sat0 = np.where(np.bitwise_and(gdq_sect[0, :, :], sat_flag)) if len(wh_sat0[0]) > 0: sat_0th_group_int[num_int, rlo:rhi, :][wh_sat0] = 1 del wh_sat0 pixeldq_sect = pixeldq[rlo:rhi, :].copy() dq_int[num_int, rlo:rhi, :] = utils.dq_compress_sect(t_dq_cube, pixeldq_sect).copy() del t_dq_cube # Loop over the segments and copy the reshaped 2D segment-specific # results for the current data section to the 4D output arrays. opt_res.reshape_res(num_int, rlo, rhi, sect_shape, ff_sect, save_opt) if save_opt: # Calculate difference between each slice and the previous slice # as approximation to cosmic ray amplitude for those pixels # having their DQ set for cosmic rays data_diff = data_sect - utils.shift_z(data_sect, -1) dq_cr = np.bitwise_and(jump_flag, gdq_sect) opt_res.cr_mag_seg[num_int, :, rlo:rhi, :] = data_diff * (dq_cr != 0) del data_diff del data_sect del ff_sect del gdq_sect if pixeldq_sect is not None: del pixeldq_sect # Compute the final 2D array of differences; create rate array median_diffs_2d /= n_int med_rates = median_diffs_2d / group_time del median_diffs_2d del first_diffs_sect ramp_data.data = data ramp_data.err = err ramp_data.groupdq = groupdq ramp_data.pixeldq = inpixeldq return max_seg, gdq_cube_shape, effintim, f_max_seg, dq_int, num_seg_per_int,\ sat_0th_group_int, opt_res, pixeldq, inv_var, med_rates def ramp_fit_compute_variances(ramp_data, gain_2d, readnoise_2d, fit_slopes_ans): """ In this 'Second Pass' over the data, loop over integrations and data sections to calculate the variances of the slope using the estimated median slopes from the 'First Pass'. These variances are due to Poisson noise only, read noise only, and the combination of Poisson noise and read noise. The integration-specific variances are 3D arrays, and the segment-specific variances are 4D arrays. The naming convention for the arrays: 'var': a variance 'p3': intermediate 3D array for variance due to Poisson noise 'r4': intermediate 4D array for variance due to read noise 'both4': intermediate 4D array for combined variance due to both Poisson and read noise 'inv_<X>': intermediate array = 1/<X> 's_inv_<X>': intermediate array = 1/<X>, summed over integrations Parameters ---------- ramp_data : ramp_fit_class.RampData Input data necessary for computing ramp fitting. gain_2d : ndarray gain for all pixels readnoise_2d : ndarray The read noise for each pixel fit_slopes_ans : tuple Contains intermediate values computed in the first pass over the data. Return ------ var_p3 : ndarray 3-D variance based on Poisson noise var_r3 : ndarray 3-D variance based on read noise var_p4 : ndarray 4-D variance based on Poisson noise var_r4 : ndarray 4-D variance based on read noise var_both4 : ndarray 4-D array for combined variance due to both Poisson and read noise var_both3 : ndarray 3-D array for combined variance due to both Poisson and read noise inv_var_both4 : ndarray 1 / var_both4 s_inv_var_p3 : ndarray 1 / var_p3, summed over integrations s_inv_var_r3 : ndarray 1 / var_r3, summed over integrations s_inv_var_both3 : ndarray 1 / var_both3, summed over integrations """ # Get image data information data = ramp_data.data err = ramp_data.err groupdq = ramp_data.groupdq inpixeldq = ramp_data.pixeldq # Get instrument and exposure data group_time = ramp_data.group_time # Get needed sizes and shapes n_int, ngroups, nrows, ncols = data.shape imshape = (nrows, ncols) cubeshape = (ngroups,) + imshape max_seg = fit_slopes_ans[0] num_seg_per_int = fit_slopes_ans[5] med_rates = fit_slopes_ans[10] var_p3, var_r3, var_p4, var_r4, var_both4, var_both3, \ inv_var_both4, s_inv_var_p3, s_inv_var_r3, s_inv_var_both3, segs_4 = \ utils.alloc_arrays_2(n_int, imshape, max_seg) # Loop over data integrations for num_int in range(n_int): # Loop over data sections for rlo in range(0, cubeshape[1], nrows): rhi = rlo + nrows if rhi > cubeshape[1]: rhi = cubeshape[1] gdq_sect = groupdq[num_int, :, rlo:rhi, :] rn_sect = readnoise_2d[rlo:rhi, :] gain_sect = gain_2d[rlo:rhi, :] # Calculate results needed to compute the variance arrays den_r3, den_p3, num_r3, segs_beg_3 = \ utils.calc_slope_vars(rn_sect, gain_sect, gdq_sect, group_time, max_seg) segs_4[num_int, :, rlo:rhi, :] = segs_beg_3 # Suppress harmless arithmetic warnings for now warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning) warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning) var_p4[num_int, :, rlo:rhi, :] = den_p3 * med_rates[rlo:rhi, :] # Find the segment variance due to read noise and convert back to DN var_r4[num_int, :, rlo:rhi, :] = num_r3 * den_r3 / gain_sect**2 # Reset the warnings filter to its original state warnings.resetwarnings() del den_r3, den_p3, num_r3, segs_beg_3 del gain_sect del gdq_sect # The next 4 statements zero out entries for non-existing segments, and # set the variances for segments having negative slopes (the segment # variance is proportional to the median estimated slope) to # outrageously large values so that they will have negligible # contributions. var_p4[num_int, :, :, :] *= (segs_4[num_int, :, :, :] > 0) # Suppress, then re-enable harmless arithmetic warnings warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning) warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning) var_p4[var_p4 <= 0.] = utils.LARGE_VARIANCE var_r4[num_int, :, :, :] *= (segs_4[num_int, :, :, :] > 0) var_r4[var_r4 <= 0.] = utils.LARGE_VARIANCE # The sums of inverses of the variances are needed for later # variance calculations. s_inv_var_p3[num_int, :, :] = (1. / var_p4[num_int, :, :, :]).sum(axis=0) var_p3[num_int, :, :] = 1. / s_inv_var_p3[num_int, :, :] s_inv_var_r3[num_int, :, :] = (1. / var_r4[num_int, :, :, :]).sum(axis=0) var_r3[num_int, :, :] = 1. / s_inv_var_r3[num_int, :, :] # Huge variances correspond to non-existing segments, so are reset to 0 # to nullify their contribution. var_p3[var_p3 > 0.1 * utils.LARGE_VARIANCE] = 0. warnings.resetwarnings() var_both4[num_int, :, :, :] = var_r4[num_int, :, :, :] + var_p4[num_int, :, :, :] inv_var_both4[num_int, :, :, :] = 1. / var_both4[num_int, :, :, :] # Want to retain values in the 4D arrays only for the segments that each # pixel has, so will zero out values for the higher indices. Creating # and manipulating intermediate arrays (views, such as var_p4_int # will zero out the appropriate indices in var_p4 and var_r4.) # Extract the slice of 4D arrays for the current integration var_p4_int = var_p4[num_int, :, :, :] # [ segment, y, x ] inv_var_both4_int = inv_var_both4[num_int, :, :, :] # Zero out non-existing segments var_p4_int *= (segs_4[num_int, :, :, :] > 0) inv_var_both4_int *= (segs_4[num_int, :, :, :] > 0) # reshape these arrays to simplify masking [ segment, 1D pixel ] var_p4_int2 = var_p4_int.reshape( (var_p4_int.shape[0], var_p4_int.shape[1] * var_p4_int.shape[2])) s_inv_var_both3[num_int, :, :] = (inv_var_both4[num_int, :, :, :]).sum(axis=0) # Suppress, then re-enable harmless arithmetic warnings warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning) warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning) var_both3[num_int, :, :] = 1. / s_inv_var_both3[num_int, :, :] warnings.resetwarnings() del var_p4_int del var_p4_int2 del gain_2d var_p4 *= (segs_4[:, :, :, :] > 0) # Zero out non-existing segments var_r4 *= (segs_4[:, :, :, :] > 0) # Delete lots of arrays no longer needed if inv_var_both4_int is not None: del inv_var_both4_int if med_rates is not None: del med_rates if num_seg_per_int is not None: del num_seg_per_int if readnoise_2d is not None: del readnoise_2d if rn_sect is not None: del rn_sect if segs_4 is not None: del segs_4 ramp_data.data = data ramp_data.err = err ramp_data.groupdq = groupdq ramp_data.pixeldq = inpixeldq return var_p3, var_r3, var_p4, var_r4, var_both4, var_both3, inv_var_both4, \ s_inv_var_p3, s_inv_var_r3, s_inv_var_both3 def ramp_fit_overall( ramp_data, orig_cubeshape, orig_ngroups, buffsize, fit_slopes_ans, variances_ans, save_opt, int_times, tstart): """ Computes the final/overall slope and variance values using the intermediate computations previously computed. Parameters ---------- ramp_data : RampData Input data necessary for computing ramp fitting. orig_cubeshape : tuple Original shape cube of input dataset orig_ngroups : int Original number of groups buffsize : int Size of data section (buffer) in bytes fit_slopes_ans : tuple Return values from ramp_fit_slopes variances_ans : tuple Return values from ramp_fit_compute_variances save_opt : bool Calculate optional fitting results. int_times : bintable, or None The INT_TIMES table, if it exists in the input, else None tstart : float Start time. Return ------ image_info : tuple The tuple of computed ramp fitting arrays. integ_info : tuple The tuple of computed integration fitting arrays. opt_info : tuple The tuple of computed optional results arrays for fitting. """ # Get image data information data = ramp_data.data groupdq = ramp_data.groupdq # Get instrument and exposure data instrume = ramp_data.instrument_name groupgap = ramp_data.groupgap nframes = ramp_data.nframes dropframes1 = ramp_data.drop_frames1 if dropframes1 is None: # set to default if missing dropframes1 = 0 log.debug('Missing keyword DRPFRMS1, so setting to default value of 0') # Get needed sizes and shapes n_int, ngroups, nrows, ncols = data.shape imshape = (nrows, ncols) # Unpack intermediate computations from preious steps max_seg, gdq_cube_shape, effintim, f_max_seg, dq_int, num_seg_per_int = fit_slopes_ans[:6] sat_0th_group_int, opt_res, pixeldq, inv_var, med_rates = fit_slopes_ans[6:] var_p3, var_r3, var_p4, var_r4, var_both4, var_both3 = variances_ans[:6] inv_var_both4, s_inv_var_p3, s_inv_var_r3, s_inv_var_both3 = variances_ans[6:] slope_by_var4 = opt_res.slope_seg.copy() / var_both4 del var_both4 s_slope_by_var3 = slope_by_var4.sum(axis=1) # sum over segments (not integs) s_slope_by_var2 = s_slope_by_var3.sum(axis=0) # sum over integrations s_inv_var_both2 = s_inv_var_both3.sum(axis=0) # Compute the 'dataset-averaged' slope # Suppress, then re-enable harmless arithmetic warnings warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning) warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning) slope_dataset2 = s_slope_by_var2 / s_inv_var_both2 warnings.resetwarnings() del s_slope_by_var2, s_slope_by_var3, slope_by_var4 del s_inv_var_both2, s_inv_var_both3 # Replace nans in slope_dataset2 with 0 (for non-existing segments) slope_dataset2[np.isnan(slope_dataset2)] = 0. # Compute the integration-specific slope the_num = (opt_res.slope_seg * inv_var_both4).sum(axis=1) the_den = (inv_var_both4).sum(axis=1) # Suppress, then re-enable harmless arithmetic warnings warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning) warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning) slope_int = the_num / the_den warnings.resetwarnings() del the_num, the_den # Clean up ramps that are SAT on their initial groups; set ramp parameters # for variances and slope so they will not contribute var_p3, var_both3, slope_int, dq_int = utils.fix_sat_ramps( sat_0th_group_int, var_p3, var_both3, slope_int, dq_int) if sat_0th_group_int is not None: del sat_0th_group_int # Loop over data integrations to calculate integration-specific pedestal if save_opt: dq_slice = np.zeros((gdq_cube_shape[2], gdq_cube_shape[3]), dtype=np.uint32) for num_int in range(0, n_int): dq_slice = groupdq[num_int, 0, :, :] opt_res.ped_int[num_int, :, :] = \ utils.calc_pedestal(num_int, slope_int, opt_res.firstf_int, dq_slice, nframes, groupgap, dropframes1) del dq_slice # Collect optional results for output if save_opt: gdq_cube = groupdq opt_res.shrink_crmag(n_int, gdq_cube, imshape, ngroups) del gdq_cube # Some contributions to these vars may be NaN as they are from ramps # having PIXELDQ=DO_NOT_USE var_p4[np.isnan(var_p4)] = 0. var_r4[np.isnan(var_r4)] = 0. # Truncate results at the maximum number of segments found opt_res.slope_seg = opt_res.slope_seg[:, :f_max_seg, :, :] opt_res.sigslope_seg = opt_res.sigslope_seg[:, :f_max_seg, :, :] opt_res.yint_seg = opt_res.yint_seg[:, :f_max_seg, :, :] opt_res.sigyint_seg = opt_res.sigyint_seg[:, :f_max_seg, :, :] opt_res.weights = (inv_var_both4[:, :f_max_seg, :, :])**2. opt_res.var_p_seg = var_p4[:, :f_max_seg, :, :] opt_res.var_r_seg = var_r4[:, :f_max_seg, :, :] opt_info = opt_res.output_optional(effintim) else: opt_info = None if inv_var_both4 is not None: del inv_var_both4 if var_p4 is not None: del var_p4 if var_r4 is not None: del var_r4 if inv_var is not None: del inv_var if pixeldq is not None: del pixeldq # Output integration-specific results to separate file integ_info = utils.output_integ( slope_int, dq_int, effintim, var_p3, var_r3, var_both3, int_times) if opt_res is not None: del opt_res if slope_int is not None: del slope_int del var_p3 del var_r3 del var_both3 if int_times is not None: del int_times # Divide slopes by total (summed over all integrations) effective # integration time to give count rates. c_rates = slope_dataset2 / effintim # Compress all integration's dq arrays to create 2D PIXELDDQ array for # primary output final_pixeldq = utils.dq_compress_final(dq_int, n_int) if dq_int is not None: del dq_int tstop = time.time() utils.log_stats(c_rates) log.debug('Instrument: %s', instrume) log.debug('Number of pixels in 2D array: %d', nrows * ncols) log.debug('Shape of 2D image: (%d, %d)' % (imshape)) log.debug('Shape of data cube: (%d, %d, %d)' % (orig_cubeshape)) log.debug('Buffer size (bytes): %d', buffsize) log.debug('Number of rows per buffer: %d', nrows) log.info('Number of groups per integration: %d', orig_ngroups) log.info('Number of integrations: %d', n_int) log.debug('The execution time in seconds: %f', tstop - tstart) # Compute the 2D variances due to Poisson and read noise var_p2 = 1 / (s_inv_var_p3.sum(axis=0)) var_r2 = 1 / (s_inv_var_r3.sum(axis=0)) # Huge variances correspond to non-existing segments, so are reset to 0 # to nullify their contribution. with warnings.catch_warnings(): warnings.filterwarnings("ignore", "invalid value.*", RuntimeWarning) var_p2[var_p2 > 0.1 * utils.LARGE_VARIANCE] = 0. var_r2[var_r2 > 0.1 * utils.LARGE_VARIANCE] = 0. # Some contributions to these vars may be NaN as they are from ramps # having PIXELDQ=DO_NOT_USE var_p2[np.isnan(var_p2)] = 0. var_r2[np.isnan(var_r2)] = 0. # Suppress, then re-enable, harmless arithmetic warning warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning) err_tot = np.sqrt(var_p2 + var_r2) warnings.resetwarnings() del s_inv_var_p3 del s_inv_var_r3 # Create new model for the primary output. data = c_rates.astype(np.float32) dq = final_pixeldq.astype(np.uint32) var_poisson = var_p2.astype(np.float32) var_rnoise = var_r2.astype(np.float32) err = err_tot.astype(np.float32) image_info = (data, dq, var_poisson, var_rnoise, err) return image_info, integ_info, opt_info def calc_power(snr): """ Using the given SNR, calculate the weighting exponent, which is from `<NAME>., <NAME>., <NAME>., <NAME>, Nieto, <NAME>., <NAME>., & <NAME>., 2000, PASP, 112, 1350`. Parameters ---------- snr : ndarray signal-to-noise for the ramp segments, 1-D float Returns ------- pow_wt : ndarray weighting exponent, 1-D float """ pow_wt = snr.copy() * 0.0 pow_wt[np.where(snr > 5.)] = 0.4 pow_wt[np.where(snr > 10.)] = 1.0 pow_wt[np.where(snr > 20.)] = 3.0 pow_wt[np.where(snr > 50.)] = 6.0 pow_wt[np.where(snr > 100.)] = 10.0 return pow_wt.ravel() def interpolate_power(snr): """ Using the given SNR, interpolate the weighting exponent, which is from `<NAME>., <NAME>., <NAME>., <NAME>, Nieto, <NAME>., <NAME>., & <NAME>., 2000, PASP, 112, 1350`. Parameters ---------- snr : ndarray signal-to-noise for the ramp segments, 1-D float Returns ------- pow_wt : ndarray weighting exponent, 1-D float """ pow_wt = snr.copy() * 0.0 pow_wt[np.where(snr > 5.)] = ((snr[snr > 5] - 5) / (10 - 5)) * 0.6 + 0.4 pow_wt[np.where(snr > 10.)] = ((snr[snr > 10] - 10) / (20 - 10)) * 2.0 + 1.0 pow_wt[np.where(snr > 20.)] = ((snr[snr > 20] - 20)) / (50 - 20) * 3.0 + 3.0 pow_wt[np.where(snr > 50.)] = ((snr[snr > 50] - 50)) / (100 - 50) * 4.0 + 6.0 pow_wt[np.where(snr > 100.)] = 10.0 return pow_wt.ravel() def calc_slope(data_sect, gdq_sect, frame_time, opt_res, save_opt, rn_sect, gain_sect, i_max_seg, ngroups, weighting, f_max_seg): """ Compute the slope of each segment for each pixel in the data cube section for the current integration. Each segment has its slope fit in fit_lines(); that slope and other quantities from the fit are added to the 'optional result' object by append_arr() from the appropriate 'CASE' (type of segment) in fit_next_segment(). Parameters ---------- data_sect : ndarray section of input data cube array, 3-D float gdq_sect : ndarray section of GROUPDQ data quality array, 3-D int frame_time : float integration time opt_res : OptRes object Contains all quantities derived from fitting all segments in all pixels in all integrations, which will eventually be used to compute per-integration and per-exposure quantities for all pixels. It's also used to populate the optional product, when requested. save_opt : bool save optional fitting results rn_sect : ndarray read noise values for all pixels in data section gain_sect : ndarray gain values for all pixels in data section i_max_seg : int used for size of initial allocation of arrays for optional results; maximum possible number of segments within the ramp, based on the number of CR flags ngroups : int number of groups per integration weighting : str 'optimal' specifies that optimal weighting should be used; currently the only weighting supported. f_max_seg : int actual maximum number of segments within a ramp, based on the fitting of all ramps; later used when truncating arrays before output. Returns ------- gdq_sect : ndarray data quality flags for pixels in section, 3-D int inv_var : ndarray values of 1/variance for good pixels, 1-D float opt_res : OptRes object contains all quantities related to fitting for use in computing final slopes, variances, etc. and is used to populate the optional output f_max_seg : int actual maximum number of segments within a ramp, updated here based on fitting ramps in the current data section; later used when truncating arrays before output. num_seg : ndarray numbers of segments for good pixels, 1-D int """ ngroups, nrows, ncols = data_sect.shape npix = nrows * ncols # number of pixels in section of 2D array all_pix = np.arange(npix) arange_ngroups_col = np.arange(ngroups)[:, np.newaxis] start = np.zeros(npix, dtype=np.int32) # lowest channel in fit # Highest channel in fit initialized to last read end = np.zeros(npix, dtype=np.int32) + (ngroups - 1) pixel_done = (end < 0) # False until processing is done inv_var = np.zeros(npix, dtype=np.float32) # inverse of fit variance num_seg = np.zeros(npix, dtype=np.int32) # number of segments per pixel # End stack array - endpoints for each pixel # initialize with ngroups for each pixel; set 1st channel to 0 end_st = np.zeros((ngroups + 1, npix), dtype=np.int32) end_st[0, :] = ngroups - 1 # end_heads is initially a tuple populated with every pixel that is # either saturated or contains a cosmic ray based on the input DQ # array, so is sized to accomodate the maximum possible number of # pixels flagged. It is later compressed to be an array denoting # the number of endpoints per pixel. end_heads = np.ones(npix * ngroups, dtype=np.int32) # Create nominal 2D ERR array, which is 1st slice of # avged_data_cube * readtime err_2d_array = data_sect[0, :, :] * frame_time # Suppress, then re-enable, harmless arithmetic warnings ''' warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning) warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning) ''' err_2d_array[err_2d_array < 0] = 0 warnings.resetwarnings() # Frames >= start and <= end will be masked. However, the first channel # to be included in fit will be the read in which a cosmic ray has # been flagged mask_2d = ((arange_ngroups_col >= start[np.newaxis, :]) & (arange_ngroups_col <= end[np.newaxis, :])) end = 0 # array no longer needed # Section of GROUPDQ dq section, excluding bad dq values in mask gdq_sect_r = np.reshape(gdq_sect, (ngroups, npix)) mask_2d[gdq_sect_r != 0] = False # saturated or CR-affected mask_2d_init = mask_2d.copy() # initial flags for entire ramp wh_f = np.where(np.logical_not(mask_2d)) these_p = wh_f[1] # coordinates of pixels flagged as False these_r = wh_f[0] # reads of pixels flagged as False del wh_f # Populate end_st to contain the set of end points for each pixel. # Populate end_heads to initially include every pixel that is either # saturated or contains a cosmic ray. Skips the duplicated final group # for saturated pixels. Saturated pixels resulting in a contiguous set # of intervals of length 1 will later be flagged as too short # to fit well. for ii, val in enumerate(these_p): if these_r[ii] != (ngroups - 1): end_st[end_heads[these_p[ii]], these_p[ii]] = these_r[ii] end_heads[these_p[ii]] += 1 # Sort and reverse array to handle the order that saturated pixels # were added end_st.sort(axis=0) end_st = end_st[::-1] # Reformat to designate the number of endpoints per pixel; compress # to specify number of groups per pixel end_heads = (end_st > 0).sum(axis=0) # Create object to hold optional results opt_res.init_2d(npix, i_max_seg, save_opt) # LS fit until 'ngroups' iterations or all pixels in # section have been processed for iter_num in range(ngroups): if pixel_done.all(): break # frames >= start and <= end_st will be included in fit mask_2d = \ ((arange_ngroups_col >= start) & (arange_ngroups_col < (end_st[end_heads[all_pix] - 1, all_pix] + 1))) mask_2d[gdq_sect_r != 0] = False # RE-exclude bad group dq values # for all pixels, update arrays, summing slope and variance f_max_seg, num_seg = \ fit_next_segment(start, end_st, end_heads, pixel_done, data_sect, mask_2d, mask_2d_init, inv_var, num_seg, opt_res, save_opt, rn_sect, gain_sect, ngroups, weighting, f_max_seg) if f_max_seg is None: f_max_seg = 1 arange_ngroups_col = 0 all_pix = 0 return gdq_sect, inv_var, opt_res, f_max_seg, num_seg def fit_next_segment(start, end_st, end_heads, pixel_done, data_sect, mask_2d, mask_2d_init, inv_var, num_seg, opt_res, save_opt, rn_sect, gain_sect, ngroups, weighting, f_max_seg): """ Call routine to LS fit masked data for a single segment for all pixels in data section. Then categorize each pixel's fitting interval based on interval length, and whether the interval is at the end of the array. Update the start array, the end stack array, the end_heads array which contains the number of endpoints. For pixels in which the fitting intervals are long enough, the resulting slope and variance are added to the appropriate stack arrays. The first channel to fit in a segment is either the first group in the ramp, or a group in which a cosmic ray has been flagged. Parameters ---------- start : ndarray lowest channel in fit, 1-D int end_st : ndarray stack array of endpoints, 2-D int end_heads : ndarray number of endpoints for each pixel, 1-D int pixel_done : ndarray whether each pixel's calculations are completed, 1-D bool data_sect : ndarray data cube section, 3-D float mask_2d : ndarray delineates which channels to fit for each pixel, 2-D bool mask_2d_init : ndarray copy of intial mask_2d, 2-D bool inv_var : ndarray values of 1/variance for good pixels, 1-D float num_seg : ndarray numbers of segments for good pixels, 1-D int opt_res : OptRes object all fitting quantities, used to compute final results and to populate optional output product save_opt : bool save optional fitting results rn_sect : ndarray read noise values for all pixels in data section gain_sect : ndarray gain values for all pixels in data section ngroups : int number of groups per integration weighting : str 'optimal' specifies that optimal weighting should be used; currently the only weighting supported. f_max_seg : int actual maximum number of segments within a ramp, updated here based on fitting ramps in the current data section; later used when truncating arrays before output. Returns ------- f_max_seg : int actual maximum number of segments within a ramp, updated here based on fitting ramps in the current data section; later used when truncating arrays before output. num_seg : ndarray numbers of segments for good pixels, 1-D int """ ngroups, nrows, ncols = data_sect.shape all_pix = np.arange(nrows * ncols) ramp_mask_sum = mask_2d_init.sum(axis=0) # Compute fit quantities for the next segment of all pixels # Each returned array below is 1D, for all npix pixels for current segment slope, intercept, variance, sig_intercept, sig_slope = \ fit_lines(data_sect, mask_2d, rn_sect, gain_sect, ngroups, weighting) end_locs = end_st[end_heads[all_pix] - 1, all_pix] # Set the fitting interval length; for a segment having >1 groups, this is # the number of groups-1 l_interval = end_locs - start wh_done = (start == -1) # done pixels l_interval[wh_done] = 0 # set interval lengths for done pixels to 0 # Create array to set when each good pixel is classified for the current # semiramp (to enable unclassified pixels to have their arrays updated) got_case = np.zeros((ncols * nrows), dtype=bool) # Special case fit with NGROUPS being 1 or 2. if ngroups == 1 or ngroups == 2: return fit_short_ngroups( ngroups, start, end_st, end_heads, pixel_done, all_pix, inv_var, num_seg, slope, intercept, variance, sig_intercept, sig_slope, opt_res, save_opt, mask_2d_init, ramp_mask_sum) # CASE: Long enough (semiramp has >2 groups), at end of ramp wh_check = np.where((l_interval > 1) & (end_locs == ngroups - 1) & (~pixel_done)) if len(wh_check[0]) > 0: f_max_seg = fit_next_segment_long_end_of_ramp( wh_check, start, end_st, end_heads, pixel_done, got_case, f_max_seg, inv_var, num_seg, slope, intercept, variance, sig_intercept, sig_slope, opt_res, save_opt) # CASE: Long enough (semiramp has >2 groups ), not at array end (meaning # final group for this semiramp is not final group of the whole ramp) wh_check = np.where((l_interval > 2) & (end_locs != ngroups - 1) & ~pixel_done) if len(wh_check[0]) > 0: f_max_seg = fit_next_segment_long_not_end_of_ramp( wh_check, start, end_st, end_heads, pixel_done, got_case, f_max_seg, inv_var, num_seg, slope, intercept, variance, sig_intercept, sig_slope, opt_res, save_opt, mask_2d_init, end_locs, ngroups) # CASE: interval too short to fit normally (only 2 good groups) # At end of array, NGROUPS>1, but exclude NGROUPS==2 datasets # as they are covered in `fit_short_ngroups`. wh_check = np.where((l_interval == 1) & (end_locs == ngroups - 1) & (ngroups > 2) & (~pixel_done)) if len(wh_check[0]) > 0: f_max_seg = fit_next_segment_short_seg_at_end( wh_check, start, end_st, end_heads, pixel_done, got_case, f_max_seg, inv_var, num_seg, slope, intercept, variance, sig_intercept, sig_slope, opt_res, save_opt, mask_2d_init) # CASE: full-length ramp has 2 good groups not at array end wh_check = np.where((l_interval == 2) & (ngroups > 2) & (end_locs != ngroups - 1) & ~pixel_done) if len(wh_check[0]) > 0: f_max_seg = fit_next_segment_short_seg_not_at_end( wh_check, start, end_st, end_heads, pixel_done, got_case, f_max_seg, inv_var, num_seg, slope, intercept, variance, sig_intercept, sig_slope, opt_res, save_opt, mask_2d_init, end_locs, ngroups) # CASE: full-length ramp has a good group on 0th group of the entire ramp, # and no later good groups. Will use single good group data as the slope. wh_check = np.where( mask_2d_init[0, :] & ~mask_2d_init[1, :] & (ramp_mask_sum == 1) & ~pixel_done) if len(wh_check[0]) > 0: f_max_seg = fit_next_segment_only_good_0th_group( wh_check, start, end_st, end_heads, pixel_done, got_case, f_max_seg, inv_var, num_seg, slope, intercept, variance, sig_intercept, sig_slope, opt_res, save_opt, mask_2d_init) # CASE: the segment has a good 0th group and a bad 1st group. wh_check = np.where(mask_2d_init[0, :] & ~mask_2d_init[1, :] & ~pixel_done & (end_locs == 1) & (start == 0)) if len(wh_check[0]) > 0: fit_next_segment_good_0th_bad_1st( wh_check, start, end_st, end_heads, got_case, ngroups) # CASE OTHER: all other types of segments not covered earlier. No segments # handled here have adequate data, but the stack arrays are updated. wh_check = np.asarray(np.where(~pixel_done & ~got_case)) if len(wh_check[0]) > 0: fit_next_segment_all_other(wh_check, start, end_st, end_heads, ngroups) return f_max_seg, num_seg def fit_next_segment_all_other(wh_check, start, end_st, end_heads, ngroups): """ Catch all other types of segments not covered earlier. No segments handled here have adequate data, but the stack arrays are updated. - increment start array - remove current end from end stack - decrement number of ends Parameters ---------- wh_check : ndarray pixels for current segment processing and updating, 1-D start : ndarray lowest channel in fit, 1-D int end_st : ndarray stack array of endpoints, 2-D int end_heads : ndarray number of endpoints for each pixel, 1-D int ngroups : int number of groups in exposure """ these_pix = wh_check[0] start[these_pix] += 1 start[start > ngroups - 1] = ngroups - 1 # to keep at max level end_st[end_heads[these_pix] - 1, these_pix] = 0 end_heads[these_pix] -= 1 end_heads[end_heads < 0.] = 0. def fit_next_segment_good_0th_bad_1st( wh_check, start, end_st, end_heads, got_case, ngroups): """ The segment has a good 0th group and a bad 1st group. For the data from the 0th good group of this segment to possibly be used as a slope, that group must necessarily be the 0th group of the entire ramp. It is possible to have a single 'good' group segment after the 0th group of the ramp; in that case the 0th group and the 1st group would both have to be CRs, and the data of the 0th group would not be included as a slope. For a good 0th group in a ramp followed by a bad 1st group there must be good groups later in the segment because if there were not, the segment would be done in `fit_next_segment_only_good_0th_group`. In this situation, since here are later good groups in the segment, those later good groups will be used in the slope computation, and the 0th good group will not be. As a result, for all instances of these types of segments, the data in the initial good group will not be used in the slope calculation, but the arrays for the indices for the ramp (end_st, etc) are appropriately adjusted. - increment start array - remove current end from end stack - decrement number of ends Parameters ---------- wh_check : ndarray pixels for current segment processing and updating, 1-D start : ndarray lowest channel in fit, 1-D int end_st : ndarray stack array of endpoints, 2-D int end_heads : ndarray number of endpoints for each pixel, 1-D int got_case : ndarray classification of pixel for current semiramp, 1-D ngroups : int number of groups in exposure """ these_pix = wh_check[0] got_case[these_pix] = True start[these_pix] += 1 start[start > ngroups - 1] = ngroups - 1 # to keep at max level end_st[end_heads[these_pix] - 1, these_pix] = 0 end_heads[these_pix] -= 1 end_heads[end_heads < 0.] = 0. def fit_next_segment_only_good_0th_group( wh_check, start, end_st, end_heads, pixel_done, got_case, f_max_seg, inv_var, num_seg, slope, intercept, variance, sig_intercept, sig_slope, opt_res, save_opt, mask_2d_init): """ Full-length ramp has a good group on 0th group of the entire ramp, and no later good groups. Will use single good group data as the slope. - set start to -1 to designate all fitting done - remove current end from end stack - set number of end to 0 - add slopes and variances to running sums - set pixel_done to True to designate all fitting done Parameters ---------- wh_check : ndarray pixels for current segment processing and updating, 1-D start : ndarray lowest channel in fit, 1-D int end_st : ndarray stack array of endpoints, 2-D int end_heads : ndarray number of endpoints for each pixel, 1-D int pixel_done : ndarray whether each pixel's calculations are completed, 1-D bool got_case : ndarray classification of pixel for current semiramp, 1-D f_max_seg : int actual maximum number of segments within a ramp, updated here based on fitting ramps in the current data section; later used when truncating arrays before output. inv_var : ndarray values of 1/variance for good pixels, 1-D float num_seg : ndarray numbers of segments for good pixels, 1-D int slope : ndarray weighted slope for current iteration's pixels for data section, 1-D float intercept : ndarray y-intercepts from fit for data section, 1-D float variance : ndarray variance of residuals for fit for data section, 1-D float sig_intercept : ndarray sigma of y-intercepts from fit for data section, 1-D float sig_slope : ndarray sigma of slopes from fit for data section (for a single segment), 1-D float opt_res : OptRes object all fitting quantities, used to compute final results and to populate optional output product save_opt : bool save optional fitting results mask_2d_init : ndarray copy of intial mask_2d, 2-D bool Returns ------- f_max_seg : int actual maximum number of segments within a ramp, updated here based on fitting ramps in the current data section; later used when truncating arrays before output. """ these_pix = wh_check[0] got_case[these_pix] = True start[these_pix] = -1 end_st[end_heads[these_pix] - 1, these_pix] = 0 end_heads[these_pix] = 0 pixel_done[these_pix] = True # all processing for pixel is completed inv_var[these_pix] += 1.0 / variance[these_pix] # Append results to arrays opt_res.append_arr(num_seg, these_pix, intercept, slope, sig_intercept, sig_slope, inv_var, save_opt) num_seg[these_pix] += 1 f_max_seg = max(f_max_seg, num_seg.max()) return f_max_seg def fit_next_segment_short_seg_not_at_end( wh_check, start, end_st, end_heads, pixel_done, got_case, f_max_seg, inv_var, num_seg, slope, intercept, variance, sig_intercept, sig_slope, opt_res, save_opt, mask_2d_init, end_locs, ngroups): """ Special case Full-length ramp has 2 good groups not at array end - use the 2 good reads to get the slope - set start to -1 to designate all fitting done - remove current end from end stack - set number of end to 0 - add slopes and variances to running sums - set pixel_done to True to designate all fitting done For segments of this type, the final good group in the segment is followed by a group that is flagged as a CR and/or SAT and is not the final group in the ramp, and the variable `l_interval` used below is equal to 2, which is the number of the segment's groups. Parameters ---------- wh_check : ndarray pixels for current segment processing and updating, 1-D start : ndarray lowest channel in fit, 1-D int end_st : ndarray stack array of endpoints, 2-D int end_heads : ndarray number of endpoints for each pixel, 1-D int pixel_done : ndarray whether each pixel's calculations are completed, 1-D bool got_case : ndarray classification of pixel for current semiramp, 1-D f_max_seg : int actual maximum number of segments within a ramp, updated here based on fitting ramps in the current data section; later used when truncating arrays before output. inv_var : ndarray values of 1/variance for good pixels, 1-D float num_seg : ndarray numbers of segments for good pixels, 1-D int slope : ndarray weighted slope for current iteration's pixels for data section, 1-D float intercept : ndarray y-intercepts from fit for data section, 1-D float variance : ndarray variance of residuals for fit for data section, 1-D float sig_intercept : ndarray sigma of y-intercepts from fit for data section, 1-D float sig_slope : ndarray sigma of slopes from fit for data section (for a single segment), 1-D float opt_res : OptRes object all fitting quantities, used to compute final results and to populate optional output product save_opt : bool save optional fitting results mask_2d_init : ndarray copy of intial mask_2d, 2-D bool end_locs : ndarray end locations, 1-D ngroups : int number of groups in exposure Returns ------- f_max_seg : int actual maximum number of segments within a ramp, updated here based on fitting ramps in the current data section; later used when truncating arrays before output. """ # Copy mask, as will modify when calculating the number of later good groups c_mask_2d_init = mask_2d_init.copy() these_pix = wh_check[0] got_case[these_pix] = True # Suppress, then re-enable, harmless arithmetic warnings warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning) warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning) inv_var[these_pix] += 1.0 / variance[these_pix] warnings.resetwarnings() # create array: 0...ngroups-1 in a column for each pixel arr_ind_all = np.array( [np.arange(ngroups), ] * c_mask_2d_init.shape[1]).transpose() wh_c_start_all = np.zeros(mask_2d_init.shape[1], dtype=np.uint8) wh_c_start_all[these_pix] = start[these_pix] # set to False all groups before start group c_mask_2d_init[arr_ind_all < wh_c_start_all] = 0 tot_good_groups = c_mask_2d_init.sum(axis=0) # Select pixels having at least 2 later good groups (these later good # groups are a segment whose slope will be calculated) wh_more = np.where(tot_good_groups[these_pix] > 1) pix_more = these_pix[wh_more] start[pix_more] = end_locs[pix_more] end_st[end_heads[pix_more] - 1, pix_more] = 0 end_heads[pix_more] -= 1 # Select pixels having less than 2 later good groups (these later good # groups will not be used) wh_only = np.where(tot_good_groups[these_pix] <= 1) pix_only = these_pix[wh_only] start[pix_only] = -1 end_st[end_heads[pix_only] - 1, pix_only] = 0 end_heads[pix_only] = 0 pixel_done[pix_only] = True # all processing for pixel is completed end_heads[(end_heads < 0.)] = 0. # Append results to arrays opt_res.append_arr(num_seg, these_pix, intercept, slope, sig_intercept, sig_slope, inv_var, save_opt) num_seg[these_pix] += 1 f_max_seg = max(f_max_seg, num_seg.max()) return f_max_seg def fit_next_segment_short_seg_at_end( wh_check, start, end_st, end_heads, pixel_done, got_case, f_max_seg, inv_var, num_seg, slope, intercept, variance, sig_intercept, sig_slope, opt_res, save_opt, mask_2d_init): """ Interval too short to fit normally (only 2 good groups) At end of array, NGROUPS>1, but exclude NGROUPS==2 datasets as they are covered in `fit_short_groups`. - set start to -1 to designate all fitting done - remove current end from end stack - set number of ends to 0 - add slopes and variances to running sums - set pixel_done to True to designate all fitting done For segments of this type, the final good group is the final group in the ramp, and the variable `l_interval` used below = 1, and the number of groups in the segment = 2 Parameters ---------- wh_check : ndarray pixels for current segment processing and updating, 1-D start : ndarray lowest channel in fit, 1-D int end_st : ndarray stack array of endpoints, 2-D int end_heads : ndarray number of endpoints for each pixel, 1-D int pixel_done : ndarray whether each pixel's calculations are completed, 1-D bool got_case : ndarray classification of pixel for current semiramp, 1-D f_max_seg : int actual maximum number of segments within a ramp, updated here based on fitting ramps in the current data section; later used when truncating arrays before output. inv_var : ndarray values of 1/variance for good pixels, 1-D float num_seg : ndarray numbers of segments for good pixels, 1-D int slope : ndarray weighted slope for current iteration's pixels for data section, 1-D float intercept : ndarray y-intercepts from fit for data section, 1-D float variance : ndarray variance of residuals for fit for data section, 1-D float sig_intercept : ndarray sigma of y-intercepts from fit for data section, 1-D float sig_slope : ndarray sigma of slopes from fit for data section (for a single segment), 1-D float opt_res : OptRes object all fitting quantities, used to compute final results and to populate optional output product save_opt : bool save optional fitting results mask_2d_init : ndarray copy of intial mask_2d, 2-D bool Returns ------- f_max_seg : int actual maximum number of segments within a ramp, updated here based on fitting ramps in the current data section; later used when truncating arrays before output. """ # Require that pixels to be processed here have at least 1 good group out # of the final 2 groups (these ramps have 2 groups and are at the end of # the array). wh_list = [] num_wh = len(wh_check[0]) for ii in range(num_wh): # locate pixels with at least 1 good group this_pix = wh_check[0][ii] sum_final_2 = mask_2d_init[start[this_pix]:, this_pix].sum() if sum_final_2 > 0: wh_list.append(wh_check[0][ii]) # add to list to be fit if len(wh_list) > 0: these_pix = np.asarray(wh_list) got_case[these_pix] = True start[these_pix] = -1 end_st[end_heads[these_pix] - 1, these_pix] = 0 end_heads[these_pix] = 0 pixel_done[these_pix] = True g_pix = these_pix[variance[these_pix] > 0.] # good pixels if len(g_pix) > 0: inv_var[g_pix] += 1.0 / variance[g_pix] # Append results to arrays opt_res.append_arr(num_seg, g_pix, intercept, slope, sig_intercept, sig_slope, inv_var, save_opt) num_seg[g_pix] += 1 f_max_seg = max(f_max_seg, num_seg.max()) return f_max_seg def fit_next_segment_long_not_end_of_ramp( wh_check, start, end_st, end_heads, pixel_done, got_case, f_max_seg, inv_var, num_seg, slope, intercept, variance, sig_intercept, sig_slope, opt_res, save_opt, mask_2d_init, end_locs, ngroups): """ Special case fitting long segment at the end of ramp. Long enough (semiramp has >2 groups ), not at array end (meaning final group for this semiramp is not final group of the whole ramp) - remove current end from end stack - decrement number of ends - add slopes and variances to running sums For segments of this type, the final good group in the segment is a CR and/or SAT and is not the final group in the ramp, and the variable `l_interval` used below is equal to the number of the segment's groups. Parameters ---------- wh_check : ndarray pixels for current segment processing and updating, 1-D start : ndarray lowest channel in fit, 1-D int end_st : ndarray stack array of endpoints, 2-D int end_heads : ndarray number of endpoints for each pixel, 1-D int pixel_done : ndarray whether each pixel's calculations are completed, 1-D bool got_case : ndarray classification of pixel for current semiramp, 1-D f_max_seg : int actual maximum number of segments within a ramp, updated here based on fitting ramps in the current data section; later used when truncating arrays before output. inv_var : ndarray values of 1/variance for good pixels, 1-D float num_seg : ndarray numbers of segments for good pixels, 1-D int slope : ndarray weighted slope for current iteration's pixels for data section, 1-D float intercept : ndarray y-intercepts from fit for data section, 1-D float variance : ndarray variance of residuals for fit for data section, 1-D float sig_intercept : ndarray sigma of y-intercepts from fit for data section, 1-D float sig_slope : ndarray sigma of slopes from fit for data section (for a single segment), 1-D float opt_res : OptRes object all fitting quantities, used to compute final results and to populate optional output product save_opt : bool save optional fitting results end_locs : ndarray end locations, 1-D mask_2d_init : ndarray copy of intial mask_2d, 2-D bool ngroups : int number of groups in exposure Returns ------- f_max_seg : int actual maximum number of segments within a ramp, updated here based on fitting ramps in the current data section; later used when truncating arrays before output. """ these_pix = wh_check[0] got_case[these_pix] = True start[these_pix] = end_locs[these_pix] end_st[end_heads[these_pix] - 1, these_pix] = 0 end_heads[these_pix] -= 1 end_heads[end_heads < 0.] = 0. g_pix = these_pix[variance[these_pix] > 0.] # good pixels if len(g_pix) > 0: inv_var[g_pix] += 1.0 / variance[g_pix] # Append results to arrays opt_res.append_arr(num_seg, g_pix, intercept, slope, sig_intercept, sig_slope, inv_var, save_opt) num_seg[g_pix] += 1 f_max_seg = max(f_max_seg, num_seg.max()) # If there are pixels with no later good groups, update stack # arrays accordingly c_mask_2d_init = mask_2d_init.copy() # create array: 0...ngroups-1 in a column for each pixel arr_ind_all = np.array( [np.arange(ngroups), ] * c_mask_2d_init.shape[1]).transpose() wh_c_start_all = np.zeros(c_mask_2d_init.shape[1], dtype=np.uint8) wh_c_start_all[g_pix] = start[g_pix] # set to False all groups before start group c_mask_2d_init[arr_ind_all < wh_c_start_all] = False # select pixels having all groups False from start to ramp end wh_rest_false = np.where(c_mask_2d_init.sum(axis=0) == 0) if len(wh_rest_false[0]) > 0: pix_rest_false = wh_rest_false[0] start[pix_rest_false] = -1 end_st[end_heads[pix_rest_false] - 1, pix_rest_false] = 0 end_heads[pix_rest_false] = 0 pixel_done[pix_rest_false] = True # all processing is complete return f_max_seg def fit_next_segment_long_end_of_ramp( wh_check, start, end_st, end_heads, pixel_done, got_case, f_max_seg, inv_var, num_seg, slope, intercept, variance, sig_intercept, sig_slope, opt_res, save_opt): """ Long enough (semiramp has >2 groups), at end of ramp - set start to -1 to designate all fitting done - remove current end from end stack - set number of ends to 0 - add slopes and variances to running sums For segments of this type, the final good group is the final group in the ramp, and the variable `l_interval` used below is equal to the number of the segment's groups minus 1. Parameters ---------- wh_check : ndarray pixels for current segment processing and updating, 1-D start : ndarray lowest channel in fit, 1-D int end_st : ndarray stack array of endpoints, 2-D int end_heads : ndarray number of endpoints for each pixel, 1-D int pixel_done : ndarray whether each pixel's calculations are completed, 1-D bool got_case : ndarray classification of pixel for current semiramp, 1-D f_max_seg : int actual maximum number of segments within a ramp, updated here based on fitting ramps in the current data section; later used when truncating arrays before output. inv_var : ndarray values of 1/variance for good pixels, 1-D float num_seg : ndarray numbers of segments for good pixels, 1-D int slope : ndarray weighted slope for current iteration's pixels for data section, 1-D float intercept : ndarray y-intercepts from fit for data section, 1-D float variance : ndarray variance of residuals for fit for data section, 1-D float sig_intercept : ndarray sigma of y-intercepts from fit for data section, 1-D float sig_slope : ndarray sigma of slopes from fit for data section (for a single segment), 1-D float opt_res : OptRes object all fitting quantities, used to compute final results and to populate optional output product save_opt : bool save optional fitting results Returns ------- f_max_seg : int actual maximum number of segments within a ramp, updated here based on fitting ramps in the current data section; later used when truncating arrays before output. """ these_pix = wh_check[0] start[these_pix] = -1 # all processing for this pixel is completed end_st[end_heads[these_pix] - 1, these_pix] = 0 end_heads[these_pix] = 0 pixel_done[these_pix] = True # all processing for pixel is completed got_case[these_pix] = True with warnings.catch_warnings(): warnings.filterwarnings("ignore", "invalid value.*", RuntimeWarning) g_pix = these_pix[variance[these_pix] > 0.] # good pixels if len(g_pix) > 0: inv_var[g_pix] += 1.0 / variance[g_pix] # Append results to arrays opt_res.append_arr(num_seg, g_pix, intercept, slope, sig_intercept, sig_slope, inv_var, save_opt) num_seg[g_pix] += 1 f_max_seg = max(f_max_seg, num_seg.max()) return f_max_seg def fit_short_ngroups( ngroups, start, end_st, end_heads, pixel_done, all_pix, inv_var, num_seg, slope, intercept, variance, sig_intercept, sig_slope, opt_res, save_opt, mask_2d_init, ramp_mask_sum): """ Special case fitting for short ngroups fit. Parameters ---------- ngroups : int number of groups in exposure start : ndarray lowest channel in fit, 1-D int end_st : ndarray stack array of endpoints, 2-D int end_heads : ndarray number of endpoints for each pixel, 1-D int pixel_done : ndarray whether each pixel's calculations are completed, 1-D bool all_pix : ndarray all pixels in image, 1-D inv_var : ndarray values of 1/variance for good pixels, 1-D float num_seg : ndarray numbers of segments for good pixels, 1-D int slope : ndarray weighted slope for current iteration's pixels for data section, 1-D float intercept : ndarray y-intercepts from fit for data section, 1-D float variance : float, ndarray variance of residuals for fit for data section, 1-D sig_intercept : ndarray sigma of y-intercepts from fit for data section, 1-D float sig_slope : ndarray sigma of slopes from fit for data section (for a single segment), 1-D float opt_res : OptRes object all fitting quantities, used to compute final results and to populate optional output product save_opt : bool save optional fitting results mask_2d_init : ndarray copy of intial mask_2d, 2-D bool ramp_mask_sum : ndarray number of channels to fit for each pixel, 1-D int Returns ------- f_max_seg : int actual maximum number of segments within a ramp, updated here based on fitting ramps in the current data section; later used when truncating arrays before output. num_seg : ndarray numbers of segments for good pixels, 1-D int """ # Dataset has NGROUPS=2, so special fitting is done for all pixels. # All segments are at the end of the array. # - set start to -1 to designate all fitting done # - remove current end from end stack # - set number of ends to 0 # - add slopes and variances to running sums # - set pixel_done to True to designate all fitting done if ngroups == 2: start[all_pix] = -1 end_st[end_heads[all_pix] - 1, all_pix] = 0 end_heads[all_pix] = 0 pixel_done[all_pix] = True g_pix = all_pix[variance[all_pix] > 0.] if len(g_pix) > 0: inv_var[g_pix] += 1.0 / variance[g_pix] opt_res.append_arr(num_seg, g_pix, intercept, slope, sig_intercept, sig_slope, inv_var, save_opt) num_seg[g_pix] = 1 return 1, num_seg # Dataset has NGROUPS=1 ; so special fitting is done for all pixels # and all intervals are at the end of the array. # - set start to -1 to designate all fitting done # - remove current end from end stack # - set number of ends to 0 # - add slopes and variances to running sums # - set pixel_done to True to designate all fitting done start[all_pix] = -1 end_st[end_heads[all_pix] - 1, all_pix] = 0 end_heads[all_pix] = 0 pixel_done[all_pix] = True wh_check = np.where(mask_2d_init[0, :] & (ramp_mask_sum == 1)) if len(wh_check[0]) > 0: g_pix = wh_check[0] # Ignore all pixels having no good groups (so the single group is bad) if len(g_pix) > 0: inv_var[g_pix] += 1.0 / variance[g_pix] # Append results to arrays opt_res.append_arr(num_seg, g_pix, intercept, slope, sig_intercept, sig_slope, inv_var, save_opt) num_seg[g_pix] = 1 return 1, num_seg def fit_lines(data, mask_2d, rn_sect, gain_sect, ngroups, weighting): """ Do linear least squares fit to data cube in this integration for a single segment for all pixels. In addition to applying the mask due to identified cosmic rays, the data is also masked to exclude intervals that are too short to fit well. The first channel to fit in a segment is either the first group in the ramp, or a group in which a cosmic ray has been flagged. Parameters ---------- data : ndarray array of values for current data section, 3-D float mask_2d : ndarray delineates which channels to fit for each pixel, 2-D bool rn_sect : ndarray read noise values for all pixels in data section gain_sect : ndarray gain values for all pixels in data section ngroups : int number of groups per integration weighting : str 'optimal' specifies that optimal weighting should be used; currently the only weighting supported. Returns ------- Note - all of these pertain to a single segment (hence '_s') slope_s : ndarray 1-D weighted slope for current iteration's pixels for data section intercept_s : ndarray 1-D y-intercepts from fit for data section variance_s : ndarray 1-D variance of residuals for fit for data section sig_intercept_s : ndarray 1-D sigma of y-intercepts from fit for data section sig_slope_s : ndarray 1-D sigma of slopes from fit for data section (for a single segment) """ # To ensure that the first channel to be fit is the cosmic-ray-affected # group, the channel previous to each channel masked as good is # also masked as good. This is only for the local purpose of setting # the first channel, and will not propagate beyond this current function # call. c_mask_2d = mask_2d.copy() wh_mask_2d = np.where(c_mask_2d) c_mask_2d[np.maximum(wh_mask_2d[0] - 1, 0), wh_mask_2d[1]] = True del wh_mask_2d # num of reads/pixel unmasked nreads_1d = c_mask_2d.astype(np.int16).sum(axis=0) npix = c_mask_2d.shape[1] slope_s = np.zeros(npix, dtype=np.float32) variance_s = np.zeros(npix, dtype=np.float32) intercept_s = np.zeros(npix, dtype=np.float32) sig_intercept_s = np.zeros(npix, dtype=np.float32) sig_slope_s = np.zeros(npix, dtype=np.float32) # Calculate slopes etc. for datasets having either 1 or 2 groups per # integration, and return if ngroups == 1: # process all pixels in 1 group/integration dataset slope_s, intercept_s, variance_s, sig_intercept_s, sig_slope_s = \ fit_1_group(slope_s, intercept_s, variance_s, sig_intercept_s, sig_slope_s, npix, data, c_mask_2d) return slope_s, intercept_s, variance_s, sig_intercept_s, sig_slope_s if ngroups == 2: # process all pixels in 2 group/integration dataset rn_sect_1d = rn_sect.reshape(npix) slope_s, intercept_s, variance_s, sig_intercept_s, sig_slope_s = \ fit_2_group(slope_s, intercept_s, variance_s, sig_intercept_s, sig_slope_s, npix, data, c_mask_2d, rn_sect_1d) return slope_s, intercept_s, variance_s, sig_intercept_s, sig_slope_s # reshape data_masked data_masked = data * np.reshape(c_mask_2d, data.shape) data_masked = np.reshape(data_masked, (data_masked.shape[0], npix)) # For datasets having >2 groups/integration, for any semiramp in which the # 0th group is good and the 1st group is bad, determine whether or not to # use the 0th group. wh_pix_1r = np.where(c_mask_2d[0, :] & (np.logical_not(c_mask_2d[1, :]))) if len(wh_pix_1r[0]) > 0: slope_s, intercept_s, variance_s, sig_intercept_s, \ sig_slope_s = fit_single_read(slope_s, intercept_s, variance_s, sig_intercept_s, sig_slope_s, npix, data, wh_pix_1r) del wh_pix_1r # For datasets having >2 groups/integrations, for any semiramp in which only # the 0th and 1st group are good, set slope, etc wh_pix_2r = np.where(c_mask_2d.sum(axis=0) == 2) # ramps with 2 good groups slope_s, intercept_s, variance_s, sig_slope_s, sig_intercept_s = \ fit_double_read(c_mask_2d, wh_pix_2r, data_masked, slope_s, intercept_s, variance_s, sig_slope_s, sig_intercept_s, rn_sect) del wh_pix_2r # Select ramps having >2 good groups wh_pix_to_use = np.where(c_mask_2d.sum(axis=0) > 2) good_pix = wh_pix_to_use[0] # Ramps with >2 good groups data_masked = data_masked[:, good_pix] del wh_pix_to_use xvalues = np.arange(data_masked.shape[0])[:, np.newaxis] * c_mask_2d xvalues = xvalues[:, good_pix] # set to those pixels to be used c_mask_2d = c_mask_2d[:, good_pix] nreads_1d = nreads_1d[good_pix] if weighting.lower() == 'optimal': # fit using optimal weighting # get sums from optimal weighting sumx, sumxx, sumxy, sumy, nreads_wtd, xvalues = \ calc_opt_sums(rn_sect, gain_sect, data_masked, c_mask_2d, xvalues, good_pix) slope, intercept, sig_slope, sig_intercept = \ calc_opt_fit(nreads_wtd, sumxx, sumx, sumxy, sumy) variance = sig_slope**2. # variance due to fit values elif weighting.lower() == 'unweighted': # fit using unweighted weighting # get sums from unweighted weighting sumx, sumxx, sumxy, sumy = calc_unwtd_sums(data_masked, xvalues) slope, intercept, sig_slope, sig_intercept, line_fit =\ calc_unwtd_fit(xvalues, nreads_1d, sumxx, sumx, sumxy, sumy) denominator = nreads_1d * sumxx - sumx**2 # In case this branch is ever used again, disable, and then re-enable # harmless arithmetic warrnings warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning) warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning) variance = nreads_1d / denominator warnings.resetwarnings() denominator = 0 else: # unsupported weighting type specified log.error('FATAL ERROR: unsupported weighting type specified.') slope_s[good_pix] = slope variance_s[good_pix] = variance intercept_s[good_pix] = intercept sig_intercept_s[good_pix] = sig_intercept sig_slope_s[good_pix] = sig_slope return slope_s, intercept_s, variance_s, sig_intercept_s, sig_slope_s def fit_single_read(slope_s, intercept_s, variance_s, sig_intercept_s, sig_slope_s, npix, data, wh_pix_1r): """ For datasets having >2 groups/integrations, for any semiramp in which the 0th group is good and the 1st group is either SAT or CR, set slope, etc. Parameters ---------- slope_s : ndarray 1-D weighted slope for current iteration's pixels for data section intercept_s : ndarray 1-D y-intercepts from fit for data section variance_s : ndarray 1-D variance of residuals for fit for data section sig_intercept_s : ndarray 1-D sigma of y-intercepts from fit for data section sig_slope_s : ndarray 1-D sigma of slopes from fit for data section npix : int number of pixels in 2D array data : float array of values for current data section wh_pix_1r : tuple locations of pixels whose only good group is the 0th group Returns ------- slope_s : ndarray 1-D weighted slope for current iteration's pixels for data section intercept_s : ndarray 1-D y-intercepts from fit for data section variance_s : ndarray 1-D variance of residuals for fit for data section sig_slope_s : ndarray 1-D sigma of slopes from fit for data section sig_intercept_s : ndarray 1-D sigma of y-intercepts from fit for data section """ data0_slice = data[0, :, :].reshape(npix) slope_s[wh_pix_1r] = data0_slice[wh_pix_1r] # The following arrays will have values correctly calculated later; for # now they are just place-holders variance_s[wh_pix_1r] = utils.LARGE_VARIANCE sig_slope_s[wh_pix_1r] = 0. intercept_s[wh_pix_1r] = 0. sig_intercept_s[wh_pix_1r] = 0. return slope_s, intercept_s, variance_s, sig_slope_s, sig_intercept_s def fit_double_read(mask_2d, wh_pix_2r, data_masked, slope_s, intercept_s, variance_s, sig_slope_s, sig_intercept_s, rn_sect): """ Process all semi-ramps having exactly 2 good groups. May need to optimize later to remove loop over pixels. Parameters ---------- mask_2d : ndarray 2-D bool delineates which channels to fit for each pixel wh_pix_2r : tuple locations of pixels whose only good groups are the 0th and the 1st data_masked : ndarray 2-D masked values for all pixels in data section slope_s : ndarray 1-D weighted slope for current iteration's pixels for data section intercept_s : ndarray 1-D y-intercepts from fit for data section variance_s : ndarray 1-D variance of residuals for fit for data section sig_slope_s : ndarray 1-D sigma of slopes from fit for data section sig_intercept_s : ndarray 1-D sigma of y-intercepts from fit for data section rn_sect : ndarray 2-D read noise values for all pixels in data section Returns ------- slope_s : ndarray 1-D weighted slope for current iteration's pixels for data section intercept_s : ndarray 1-D y-intercepts from fit for data section variance_s : ndarray 1-D variance of residuals for fit for data section sig_slope_s : ndarray 1-D sigma of slopes from fit for data section sig_intercept_s : ndarray 1-D sigma of y-intercepts from fit for data section """ rn_sect_flattened = rn_sect.flatten() for ff in range(len(wh_pix_2r[0])): # loop over the pixels pixel_ff = wh_pix_2r[0][ff] # pixel index (1d) rn = rn_sect_flattened[pixel_ff] # read noise for this pixel read_nums = np.where(mask_2d[:, pixel_ff]) second_read = read_nums[0][1] data_ramp = data_masked[:, pixel_ff] * mask_2d[:, pixel_ff] data_semi = data_ramp[mask_2d[:, pixel_ff]] # picks only the 2 diff_data = data_semi[1] - data_semi[0] slope_s[pixel_ff] = diff_data intercept_s[pixel_ff] = \ data_semi[1] * (1. - second_read) + data_semi[0] * second_read # by geometry variance_s[pixel_ff] = 2.0 * rn * rn sig_slope_s[pixel_ff] = np.sqrt(2) * rn sig_intercept_s[pixel_ff] = np.sqrt(2) * rn return slope_s, intercept_s, variance_s, sig_slope_s, sig_intercept_s def calc_unwtd_fit(xvalues, nreads_1d, sumxx, sumx, sumxy, sumy): """ Do linear least squares fit to data cube in this integration, using unweighted fits to the segments. Currently not supported. Parameters ---------- xvalues : ndarray 1-D int indices of valid pixel values for all groups nreads_1d : ndarray 1-D int number of reads in an integration sumxx : float sum of squares of xvalues sumx : float sum of xvalues sumxy : float sum of product of xvalues and data sumy : float sum of data Returns ------- slope : ndarray 1-D weighted slope for current iteration's pixels for data section intercept : ndarray 1-D y-intercepts from fit for data section sig_slope : ndarray 1-D sigma of slopes from fit for data section sig_intercept : ndarray 1-D sigma of y-intercepts from fit for data section line_fit : ndarray 1-D values of fit using slope and intercept """ denominator = nreads_1d * sumxx - sumx**2 # In case this branch is ever used again, suppress, and then re-enable # harmless arithmetic warnings warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning) warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning) slope = (nreads_1d * sumxy - sumx * sumy) / denominator intercept = (sumxx * sumy - sumx * sumxy) / denominator sig_intercept = (sumxx / denominator)**0.5 sig_slope = (nreads_1d / denominator)**0.5 warnings.resetwarnings() line_fit = (slope * xvalues) + intercept return slope, intercept, sig_slope, sig_intercept, line_fit def calc_opt_fit(nreads_wtd, sumxx, sumx, sumxy, sumy): """ Do linear least squares fit to data cube in this integration for a single semi-ramp for all pixels, using optimally weighted fits to the semi_ramps. The weighting uses the formulation by Fixsen (Fixsen et al, PASP, 112, 1350). Note - these weights, sigmas, and variances pertain only to the fitting, and the variances are *NOT* the variances of the slope due to noise. Parameters ---------- nreads_wtd : ndarray sum of product of data and optimal weight, 1-D float sumxx : ndarray sum of squares of xvalues, 1-D float sumx : ndarray sum of xvalues, 1-D float sumxy : ndarray sum of product of xvalues and data, 1-D float sumy : ndarray sum of data, 1-D float Returns ------- slope : ndarray weighted slope for current iteration's pixels for data section, 1-D float intercept : ndarray y-intercepts from fit for data section, 1-D float sig_slope : ndarray sigma of slopes from fit for data section, 1-D float sig_intercept : ndarray sigma of y-intercepts from fit for data section, 1-D float """ denominator = nreads_wtd * sumxx - sumx**2 # Suppress, and then re-enable harmless arithmetic warnings warnings.filterwarnings("ignore", ".*invalid value.*", RuntimeWarning) warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning) slope = (nreads_wtd * sumxy - sumx * sumy) / denominator intercept = (sumxx * sumy - sumx * sumxy) / denominator sig_intercept = (sumxx / denominator)**0.5 sig_slope = (nreads_wtd / denominator)**0.5 # STD of the slope's fit warnings.resetwarnings() return slope, intercept, sig_slope, sig_intercept def fit_1_group(slope_s, intercept_s, variance_s, sig_intercept_s, sig_slope_s, npix, data, mask_2d): """ This function sets the fitting arrays for datasets having only 1 group per integration. Parameters ---------- slope_s : ndarray weighted slope for current iteration's pixels for data section, 1-D float intercept_s : ndarray y-intercepts from fit for data section, 1-D float variance_s : ndarray variance of residuals for fit for data section, 1-D float sig_intercept_s : ndarray sigma of y-intercepts from fit for data section, 1-D float sig_slope_s : ndarray sigma of slopes from fit for data section, 1-D float npix : int number of pixels in 2d array data : float array of values for current data section mask_2d : ndarray delineates which channels to fit for each pixel, 2-D bool Returns ------- slope_s : ndarray weighted slope for current iteration's pixels for data section, 1-D float intercept_s : ndarray y-intercepts from fit for data section, 1-D float variance_s : ndarray variance of residuals for fit for data section, 1-D float sig_intercept_s : ndarray sigma of y-intercepts from fit for data section, 1-D float sig_slope_s : ndarray sigma of slopes from fit for data section, 1-D float """ # For pixels not saturated, recalculate the slope as the value of the SCI # data in that group, which will later be divided by the group exposure # time to give the count rate. Recalculate other fit quantities to be # benign. slope_s = data[0, :, :].reshape(npix) # The following arrays will have values correctly calculated later; for # now they are just place-holders variance_s = np.zeros(npix, dtype=np.float32) + utils.LARGE_VARIANCE sig_slope_s = slope_s * 0. intercept_s = slope_s * 0. sig_intercept_s = slope_s * 0. # For saturated pixels, overwrite slope with benign values. wh_sat0 = np.where(np.logical_not(mask_2d[0, :])) if len(wh_sat0[0]) > 0: sat_pix = wh_sat0[0] slope_s[sat_pix] = 0. return slope_s, intercept_s, variance_s, sig_intercept_s, sig_slope_s def fit_2_group(slope_s, intercept_s, variance_s, sig_intercept_s, sig_slope_s, npix, data, mask_2d, rn_sect_1d): """ This function sets the fitting arrays for datasets having only 2 groups per integration. Parameters ---------- slope_s : ndarray weighted slope for current iteration's pixels for data section, 1-D float intercept_s : ndarray y-intercepts from fit for data section, 1-D float variance_s : ndarray variance of residuals for fit for data section, 1-D float sig_intercept_s : ndarray sigma of y-intercepts from fit for data section, 1-D float sig_slope_s : ndarray sigma of slopes from fit for data section, 1-D float npix : int number of pixels in 2d array data : float array of values for current data section mask_2d : ndarray delineates which channels to fit for each pixel, 2-D bool rn_sect_1d : ndarray read noise values for all pixels in data section, 1-D float Returns ------- slope_s : ndarray weighted slope for current iteration's pixels for data section, 1-D float intercept_s : ndarray y-intercepts from fit for data section, 1-D float variance_s : ndarray variance of residuals for fit for data section, 1-D float sig_intercept_s : ndarray sigma of y-intercepts from fit for data section, 1-D float sig_slope_s : ndarray sigma of slopes from fit for data section, 1-D float """ # For pixels saturated on the first group, overwrite fit values with # benign values to be recalculated later. wh_sat0 = np.where(np.logical_not(mask_2d[0, :])) if len(wh_sat0[0]) > 0: sat_pix = wh_sat0[0] slope_s[sat_pix] = 0. variance_s[sat_pix] = 0. sig_slope_s[sat_pix] = 0. intercept_s[sat_pix] = 0. sig_intercept_s[sat_pix] = 0. del wh_sat0 # For pixels saturated on the second group, recalculate the slope as # the value of the SCI data in the first group, which will later be # divided by the group exposure time to give the count rate, and # recalculate the other fit quantities to be benign. Note: these pixels # will already have been handled earlier (for intervals of arbitrary # length) in this function, but are being included here to explicitly # cover all possibilities for pixels in datasets with ngroups=2. Will # later consider refactoring. wh_sat1 = np.where((mask_2d[:, :].sum(axis=0) == 1) & mask_2d[0, :]) if len(wh_sat1[0]) > 0: data0_slice = data[0, :, :].reshape(npix) slope_s[wh_sat1] = data0_slice[wh_sat1] # set variance non-zero because calling function uses variance=0 to # throw out bad results; this is not bad variance_s[wh_sat1] = 1. sig_slope_s[wh_sat1] = 0. intercept_s[wh_sat1] = 0. sig_intercept_s[wh_sat1] = 0. del wh_sat1 # For pixels with no saturated values, recalculate the slope as the # difference between the values of the second and first groups (1-based), # which will later be divided by the group exposure time to give the count # rate, and recalculate other fit quantities to be benign. wh_sat_no = np.where(mask_2d[:, :].sum(axis=0) == 2) if len(wh_sat_no[0]) > 0: data0_slice = data[0, :, :].reshape(npix) data1_slice = data[1, :, :].reshape(npix) slope_s[wh_sat_no] = data1_slice[wh_sat_no] - data0_slice[wh_sat_no] sig_slope_s[wh_sat_no] = np.sqrt(2) * rn_sect_1d[wh_sat_no] intercept_s[wh_sat_no] = data0_slice[wh_sat_no] -\ data1_slice[wh_sat_no] # by geometry sig_intercept_s[wh_sat_no] = np.sqrt(2) * rn_sect_1d[wh_sat_no] variance_s[wh_sat_no] = np.sqrt(2) * rn_sect_1d[wh_sat_no] del wh_sat_no return slope_s, intercept_s, variance_s, sig_intercept_s, sig_slope_s def calc_num_seg(gdq, n_int): """ Calculate the maximum number of segments that will be fit within an integration, calculated over all pixels and all integrations. This value is based on the locations of cosmic ray-affected pixels in all of the ramps, and will be used to allocate arrays used for the optional output product. Parameters ---------- gdq : ndarray cube of GROUPDQ array for a data, 3-D flag n_int : int (unused) total number of integrations in data set Return: ------- max_num_seg : int The maximum number of segements within an integration max_cr : int The maximum number of cosmic rays within an integration """ max_cr = 0 # max number of CRS for all integrations # For all 2d pixels, get max number of CRs or DO_NOT_USE flags along their # ramps, to use as a surrogate for the number of segments along the ramps # Note that we only care about flags that are NOT in the first or last groups, # because exclusion of a first or last group won't result in an additional segment. check_flag = constants.dqflags["JUMP_DET"] | constants.dqflags["DO_NOT_USE"] max_cr = np.count_nonzero(np.bitwise_and(gdq[:, 1:-1], check_flag), axis=1).max() # Do not want to return a value > the number of groups, which can occur if # this is a MIRI dataset in which the first or last group was flagged as # DO_NOT_USE and also flagged as a jump. max_num_seg = int(max_cr) + 1 # n CRS implies n+1 segments if max_num_seg > gdq.shape[1]: max_num_seg = gdq.shape[1] return max_num_seg, max_cr def calc_unwtd_sums(data_masked, xvalues): """ Calculate the sums needed to determine the slope and intercept (and sigma of each) using an unweighted fit. Unweighted fitting currently not supported. Parameters ---------- data_masked : ndarray masked values for all pixels in data section, 2-D float xvalues : ndarray indices of valid pixel values for all groups, 1-D int Return: ------- sumx : float sum of xvalues sumxx : float sum of squares of xvalues sumxy : float sum of product of xvalues and data sumy : float sum of data """ sumx = xvalues.sum(axis=0) sumxx = (xvalues**2).sum(axis=0) sumy = (np.reshape(data_masked.sum(axis=0), sumx.shape)) sumxy = (xvalues * np.reshape(data_masked, xvalues.shape)).sum(axis=0) return sumx, sumxx, sumxy, sumy def calc_opt_sums(rn_sect, gain_sect, data_masked, mask_2d, xvalues, good_pix): """ Calculate the sums needed to determine the slope and intercept (and sigma of each) using the optimal weights. For each good pixel's segment, from the initial and final indices and the corresponding number of counts, calculate the SNR. From the SNR, calculate the weighting exponent using the formulation by Fixsen (Fixsen et al, PASP, 112, 1350). Using this exponent and the gain and the readnoise, the weights are calculated from which the sums are calculated. Parameters ---------- rn_sect : ndarray read noise values for all pixels in data section, 2-D float gain_sect : ndarray gain values for all pixels in data section, 2-D float data_masked : ndarray masked values for all pixels in data section, 2-D float mask_2d : ndarray delineates which channels to fit for each pixel, 2-D bool xvalues : ndarray indices of valid pixel values for all groups, 2-D int good_pix : ndarray indices of pixels having valid data for all groups, 1-D int Return: ------- sumx : float sum of xvalues sumxx : float sum of squares of xvalues sumxy : float sum of product of xvalues and data sumy : float sum of data nreads_wtd : ndarray sum of optimal weights, 1-D float xvalues : ndarray rolled up indices of valid pixel values for all groups, 2-D int """ c_mask_2d = mask_2d.copy() # copy the mask to prevent propagation rn_sect = np.float32(rn_sect) # Return 'empty' sums if there is no more data to fit if data_masked.size == 0: return np.array([]), np.array([]), np.array([]), np.array([]),\ np.array([]), np.array([]) # get initial group for each good pixel for this semiramp fnz = np.argmax(c_mask_2d, axis=0) # For those pixels that are all False, set to sentinel value of -1 fnz[c_mask_2d.sum(axis=0) == 0] = -1 mask_2d_sum = c_mask_2d.sum(axis=0) # number of valid groups/pixel # get final valid group for each pixel for this semiramp ind_lastnz = fnz + mask_2d_sum - 1 # get SCI value of initial good group for semiramp data_zero = data_masked[fnz, range(data_masked.shape[1])] # get SCI value of final good group for semiramp data_final = data_masked[(ind_lastnz), range(data_masked.shape[1])] data_diff = data_final - data_zero # correctly does *NOT* have nans ind_lastnz = 0 # Use the readnoise and gain for good pixels only rn_sect_rav = rn_sect.flatten()[good_pix] rn_2_r = rn_sect_rav * rn_sect_rav gain_sect_r = gain_sect.flatten()[good_pix] # Calculate the sigma for nonzero gain values sigma_ir = data_final.copy() * 0.0 numer_ir = data_final.copy() * 0.0 # Calculate the SNR for pixels from the readnoise, the gain, and the # difference between the last and first reads for pixels where this results # in a positive SNR. Otherwise set the SNR to 0. sqrt_arg = rn_2_r + data_diff * gain_sect_r with warnings.catch_warnings(): warnings.filterwarnings("ignore", "invalid value.*", RuntimeWarning) wh_pos = np.where((sqrt_arg >= 0.) & (gain_sect_r != 0.)) numer_ir[wh_pos] = \ np.sqrt(rn_2_r[wh_pos] + data_diff[wh_pos] * gain_sect_r[wh_pos]) sigma_ir[wh_pos] = numer_ir[wh_pos] / gain_sect_r[wh_pos] snr = data_diff * 0. snr[wh_pos] = data_diff[wh_pos] / sigma_ir[wh_pos] snr[np.isnan(snr)] = 0.0 snr[snr < 0.] = 0.0 del wh_pos gain_sect_r = 0 numer_ir = 0 data_diff = 0 sigma_ir = 0 power_wt_r = calc_power(snr) # Get the interpolated power for this SNR # Make array of number of good groups, and exponents for each pixel num_nz = (data_masked != 0.).sum(0) # number of nonzero groups per pixel nrd_data_a = num_nz.copy() num_nz = 0 nrd_prime = (nrd_data_a - 1) / 2. nrd_data_a = 0 # Calculate inverse read noise^2 for use in weights # Suppress, then re-enable, harmless arithmetic warning warnings.filterwarnings("ignore", ".*divide by zero.*", RuntimeWarning) invrdns2_r = 1. / rn_2_r warnings.resetwarnings() rn_sect = 0 fnz = 0 # Set optimal weights for each group of each pixel; # for all pixels at once, loop over the groups wt_h = np.zeros(data_masked.shape, dtype=np.float32) for jj_rd in range(data_masked.shape[0]): wt_h[jj_rd, :] = \ abs((abs(jj_rd - nrd_prime) / nrd_prime) ** power_wt_r) * invrdns2_r wt_h[np.isnan(wt_h)] = 0. wt_h[np.isinf(wt_h)] = 0. # For all pixels, 'roll' up the leading zeros such that the 0th group of # each pixel is the lowest nonzero group for that pixel wh_m2d_f = np.logical_not(c_mask_2d[0, :]) # ramps with initial group False while wh_m2d_f.sum() > 0: data_masked[:, wh_m2d_f] = np.roll(data_masked[:, wh_m2d_f], -1, axis=0) c_mask_2d[:, wh_m2d_f] = np.roll(c_mask_2d[:, wh_m2d_f], -1, axis=0) xvalues[:, wh_m2d_f] = np.roll(xvalues[:, wh_m2d_f], -1, axis=0) wh_m2d_f = np.logical_not(c_mask_2d[0, :]) # Create weighted sums for Poisson noise and read noise nreads_wtd = (wt_h * c_mask_2d).sum(axis=0) # using optimal weights sumx = (xvalues * wt_h).sum(axis=0) sumxx = (xvalues**2 * wt_h).sum(axis=0) c_data_masked = data_masked.copy() c_data_masked[np.isnan(c_data_masked)] = 0. sumy = (np.reshape((c_data_masked * wt_h).sum(axis=0), sumx.shape)) sumxy = (xvalues * wt_h * np.reshape(c_data_masked, xvalues.shape)).sum(axis=0) return sumx, sumxx, sumxy, sumy, nreads_wtd, xvalues <file_sep>/src/stcal/ramp_fitting/constants.py dqflags = { "DO_NOT_USE": None, "SATURATED": None, "JUMP_DET": None, "NO_GAIN_VALUE": None, "UNRELIABLE_SLOPE": None, } def update_dqflags(input_flags): dqflags["DO_NOT_USE"] = input_flags["DO_NOT_USE"] dqflags["SATURATED"] = input_flags["SATURATED"] dqflags["JUMP_DET"] = input_flags["JUMP_DET"] dqflags["NO_GAIN_VALUE"] = input_flags["NO_GAIN_VALUE"] dqflags["UNRELIABLE_SLOPE"] = input_flags["UNRELIABLE_SLOPE"] def update_dqflags_from_ramp_data(ramp_data): dqflags["DO_NOT_USE"] = ramp_data.flags_do_not_use dqflags["SATURATED"] = ramp_data.flags_saturated dqflags["JUMP_DET"] = ramp_data.flags_jump_det dqflags["NO_GAIN_VALUE"] = ramp_data.flags_no_gain_val dqflags["UNRELIABLE_SLOPE"] = ramp_data.flags_unreliable_slope <file_sep>/CHANGES.rst 0.2.4 (unreleased) ================== 0.2.3 (2021-08-06) ================== ramp_fitting ------------ - Fix ramp fitting multiprocessing. (#30) 0.2.2 (2021-07-19) ================== ramp_fitting ------------ - Implemented multiprocessing for OLS. [#30] - Added DQ flag parameter to `ramp_fit` [#25] - Move common ``jump`` code to stcal [#27] 0.2.1 (2021-05-20) ================== ramp_fitting ------------ - Fixed bug for median ramp rate computation in report JP-1950. [#12] 0.2.0 (2021-05-18) ================== ramp_fitting ------------ - Added ramp fitting code [#6] 0.1.0 (2021-03-19) ================== - Added code to manipulate bitmasks. <file_sep>/docs/stcal/jump/description.rst Algorithm --------- This routine detects jumps in an exposure by looking for outliers in the up-the-ramp signal for each pixel in each integration within an input exposure. On output, the GROUPDQ array is updated with the DQ flag "JUMP_DET" to indicate the location of each jump that was found. In addition, any pixels that have non-positive or NaN values in the gain reference file will have DQ flags "NO_GAIN_VALUE" and "DO_NOT_USE" set in the output PIXELDQ array. The SCI and ERR arrays of the input data are not modified. The current implementation uses the two-point difference method described in Anderson&Gordon2011_. Two-Point Difference Method ^^^^^^^^^^^^^^^^^^^^^^^^^^^ The two-point difference method is applied to each integration as follows: * Compute the first differences for each pixel (the difference between adjacent groups) * Compute the clipped (dropping the largest difference) median of the first differences for each pixel. * Use the median to estimate the Poisson noise for each group and combine it with the read noise to arrive at an estimate of the total expected noise for each difference. * Compute the "difference ratio" as the difference between the first differences of each group and the median, divided by the expected noise. * If the largest "difference ratio" is greater than the rejection threshold, flag the group corresponding to that ratio as having a jump. * If a jump is found in a given pixel, iterate the above steps with the jump-impacted group excluded, looking for additional lower-level jumps that still exceed the rejection threshold. * Stop iterating on a given pixel when no new jumps are found or only one difference remains. * If the there are only three differences (four groups), the standard median is used rather than the clipped median. * If there are only two differences (three groups), the smallest one is compared to the larger one and if the larger one is above a threshold, it is flagged as a jump. Note that any ramp values flagged as SATURATED in the input GROUPDQ array are not used in any of the above calculations and hence will never be marked as containing a jump. .. _Anderson&Gordon2011: https://ui.adsabs.harvard.edu/abs/2011PASP..123.1237A<file_sep>/docs/stcal/package_index.rst ============= Package Index ============= .. toctree:: :maxdepth: 2 jump/index.rst<file_sep>/src/stcal/ramp_fitting/gls_fit.py #! /usr/bin/env python # !!!!!!!!!!!!!!!!!!! NOTE !!!!!!!!!!!!!!!!!!! # Needs work. # Also, this code makes reference to `nreads` as a the second dimension # of the 4-D data set, while `ngroups` makes reference to the NGROUPS # key word in the exposure metadata. This should be changed, removing # reference to the NGROUPS key word and using ngroups as the second # dimension of the 4-D data set. # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! ''' import logging from multiprocessing.pool import Pool as Pool import numpy as np import numpy.linalg as la import time from .. import datamodels from ..datamodels import dqflags from . import utils log = logging.getLogger(__name__) log.setLevel(logging.DEBUG) DO_NOT_USE = dqflags.group['DO_NOT_USE'] JUMP_DET = dqflags.group['JUMP_DET'] SATURATED = dqflags.group['SATURATED'] UNRELIABLE_SLOPE = dqflags.pixel['UNRELIABLE_SLOPE'] BUFSIZE = 1024 * 300000 # 300Mb cache size for data section log = logging.getLogger(__name__) log.setLevel(logging.DEBUG) # This is the number of iterations that will be done with use_extra_terms # set to False. If this is zero, use_extra_terms will be set to True even # for the first iteration. # NUM_ITER_NO_EXTRA_TERMS = 1 NUM_ITER_NO_EXTRA_TERMS = 0 # These are the lower and upper limits of the number of iterations that # will be done by determine_slope. # MIN_ITER = NUM_ITER_NO_EXTRA_TERMS + 1 # MAX_ITER = 3 MIN_ITER = 1 MAX_ITER = 1 # This is a term to add for saturated pixels to give them low weight. HUGE_FOR_LOW_WEIGHT = 1.e20 # This is a value to replace zero or negative values in a fit, to make # all values of the fit positive and to give low weight where the fit was # zero or negative. FIT_MUST_BE_POSITIVE = 1.e10 def gls_ramp_fit(input_model, buffsize, save_opt, readnoise_model, gain_model, max_cores): """Fit a ramp using generalized least squares. Extended Summary ---------------- Calculate the count rate for each pixel in the data ramp, for every integration. Generalized least squares is used for fitting the ramp in order to take into account the correlation between reads. If the input file contains multiple integrations, a second output file will be written, containing per-integration count rates. One additional file can optionally be written (if save_opt is True), containing per-integration data. Parameters ---------- model : data model Input data model, assumed to be of type RampModel. buffsize : int Size of data section (buffer) in bytes. save_opt : boolean Calculate optional fitting results. readnoise_model : instance of data Model Readnoise for all pixels. gain_model : instance of gain model Gain for all pixels. max_cores : string Number of cores to use for multiprocessing. If set to 'none' (the default), then no multiprocessing will be done. The other allowable values are 'quarter', 'half', and 'all'. This is the fraction of cores to use for multi-proc. The total number of cores includes the SMT cores (Hyper Threading for Intel). Returns ------- new_model : Data Model object DM object containing a rate image averaged over all integrations in the exposure. int_model : Data Model object or None DM object containing rate images for each integration in the exposure, or None if there is only one integration. gls_opt_model : GLS_RampFitModel object or None Object containing optional GLS-specific ramp fitting data for the exposure; this will be None if save_opt is False. """ number_slices = utils.compute_slices(max_cores) # Get needed sizes and shapes nreads, npix, imshape, cubeshape, n_int, instrume, frame_time, ngroups, \ group_time = utils.get_dataset_info(input_model) (group_time, frames_per_group, saturated_flag, jump_flag) = \ utils.get_more_info(input_model) # Get readnoise array for calculation of variance of noiseless ramps, and # gain array in case optimal weighting is to be done # KDG - not sure what this means and no optimal weigting in GLS readnoise_2d, gain_2d = utils.get_ref_subs(input_model, readnoise_model, gain_model, frames_per_group) # Flag any bad pixels in the gain pixeldq = utils.reset_bad_gain(input_model.pixeldq, gain_2d) log.info("number of processes being used is %d" % number_slices) total_rows = input_model.data.shape[2] tstart = time.time() # Determine the maximum number of cosmic ray hits for any pixel. max_num_cr = -1 # invalid initial value for num_int in range(n_int): i_max_num_cr = utils.get_max_num_cr(input_model.groupdq[num_int, :, :, :], jump_flag) max_num_cr = max(max_num_cr, i_max_num_cr) # Calculate effective integration time (once EFFINTIM has been populated # and accessible, will use that instead), and other keywords that will # needed if the pedestal calculation is requested. Note 'nframes' # is the number of given by the NFRAMES keyword, and is the number of # frames averaged on-board for a group, i.e., it does not include the # groupgap. effintim, nframes, groupgap, dropframes1 = utils.get_efftim_ped(input_model) if number_slices == 1: rows_per_slice = total_rows slopes, slope_int, slope_err_int, pixeldq_sect, dq_int, sum_weight, \ intercept_int, intercept_err_int, pedestal_int, ampl_int, ampl_err_int = \ gls_fit_all_integrations(frame_time, gain_2d, input_model.groupdq, group_time, jump_flag, max_num_cr, input_model.data, input_model.err, nframes, pixeldq, readnoise_2d, saturated_flag, save_opt) else: rows_per_slice = round(total_rows / number_slices) pool = Pool(processes=number_slices) slices = [] slopes = np.zeros(imshape, dtype=np.float32) sum_weight = np.zeros(imshape, dtype=np.float32) # For multiple-integration datasets, will output integration-specific # results to separate file named <basename> + '_rateints.fits'. # Even if there's only one integration, the output results will be # saved in these arrays. slope_int = np.zeros((n_int,) + imshape, dtype=np.float32) slope_err_int = np.zeros((n_int,) + imshape, dtype=np.float32) dq_int = np.zeros((n_int,) + imshape, dtype=np.uint32) out_pixeldq = np.zeros(imshape, dtype=np.uint32) if save_opt: # Create arrays for the fitted values of zero-point intercept and # cosmic-ray amplitudes, and their errors. intercept_int = np.zeros((n_int,) + imshape, dtype=np.float32) intercept_err_int = np.zeros((n_int,) + imshape, dtype=np.float32) # The pedestal is the extrapolation of the first group back to zero # time, for each integration. pedestal_int = np.zeros((n_int,) + imshape, dtype=np.float32) # If there are no cosmic rays, set the last axis length to 1. shape_ampl = (n_int, imshape[0], imshape[1], max(1, max_num_cr)) ampl_int = np.zeros(shape_ampl, dtype=np.float32) ampl_err_int = np.zeros(shape_ampl, dtype=np.float32) # Loop over number of processes for i in range(number_slices - 1): start_row = i * rows_per_slice stop_row = (i + 1) * rows_per_slice readnoise_slice = readnoise_2d[start_row: stop_row, :] gain_slice = gain_2d[start_row: stop_row, :] data_slice = input_model.data[:, :, start_row:stop_row, :].copy() err_slice = input_model.err[:, :, start_row: stop_row, :].copy() groupdq_slice = input_model.groupdq[:, :, start_row: stop_row, :].copy() pixeldq_slice = pixeldq[start_row: stop_row, :].copy() slices.insert(i, (frame_time, gain_slice, groupdq_slice, group_time, jump_flag, max_num_cr, data_slice, err_slice, frames_per_group, pixeldq_slice, readnoise_slice, saturated_flag, save_opt)) # The last slice takes the remainder of the rows start_row = (number_slices - 1) * rows_per_slice readnoise_slice = readnoise_2d[start_row: total_rows, :] gain_slice = gain_2d[start_row: total_rows, :] data_slice = input_model.data[:, :, start_row: total_rows, :].copy() err_slice = input_model.err[:, :, start_row: total_rows, :].copy() groupdq_slice = input_model.groupdq[:, :, start_row: total_rows, :].copy() pixeldq_slice = input_model.pixeldq[start_row: total_rows, :].copy() slices.insert(number_slices - 1, (frame_time, gain_slice, groupdq_slice, group_time, jump_flag, max_num_cr, data_slice, err_slice, frames_per_group, pixeldq_slice, readnoise_slice, saturated_flag, save_opt)) log.debug("Creating %d processes for ramp fitting " % number_slices) real_results = pool.starmap(gls_fit_all_integrations, slices) pool.close() pool.join() k = 0 log.debug("All processes complete") for resultslice in real_results: start_row = k * rows_per_slice if len(real_results) == k + 1: # last result slopes[start_row:total_rows, :] = resultslice[0] slope_int[:, start_row:total_rows, :] = resultslice[1] slope_err_int[:, start_row:total_rows, :] = resultslice[2] out_pixeldq[start_row:total_rows, :] = resultslice[3] if resultslice[4] is not None: dq_int[:, start_row:total_rows, :] = resultslice[4] # nint > 1 sum_weight[start_row:total_rows, :] = resultslice[5] # nint > 1 if resultslice[6] is not None: intercept_int[:, start_row: total_rows, :] = resultslice[6] # optional intercept_err_int[:, start_row:total_rows, :] = resultslice[7] # optional pedestal_int[:, start_row: total_rows, :] = resultslice[8] # optional ampl_int[:, start_row:total_rows, :] = resultslice[9] # optional ampl_err_int[:, start_row: total_rows, :] = resultslice[10] # optional else: stop_row = (k + 1) * rows_per_slice slopes[start_row:stop_row, :] = resultslice[0] slope_int[:, start_row:stop_row, :] = resultslice[1] slope_err_int[:, start_row:stop_row, :] = resultslice[2] out_pixeldq[start_row:stop_row, :] = resultslice[3] if resultslice[4] is not None: dq_int[:, start_row:stop_row, :] = resultslice[4] # nint > 1 sum_weight[start_row:stop_row, :] = resultslice[5] # nint > 1 if resultslice[6] is not None: intercept_int[:, start_row: stop_row, :] = resultslice[6] # optional intercept_err_int[:, start_row:stop_row, :] = resultslice[7] # optional pedestal_int[:, start_row: stop_row, :] = resultslice[8] # optional ampl_int[:, start_row:stop_row, :] = resultslice[9] # optional ampl_err_int[:, start_row: stop_row, :] = resultslice[10] # optional k = k + 1 # Average the slopes over all integrations. if n_int > 1: sum_weight = np.where(sum_weight <= 0., 1., sum_weight) recip_sum_weight = 1. / sum_weight slopes *= recip_sum_weight gls_err = np.sqrt(recip_sum_weight) # Convert back from electrons to DN. slope_int /= gain_2d slope_err_int /= gain_2d if n_int > 1: slopes /= gain_2d gls_err /= gain_2d if save_opt: intercept_int /= gain_2d intercept_err_int /= gain_2d pedestal_int /= gain_2d gain_shape = gain_2d.shape gain_4d = gain_2d.reshape((1, gain_shape[0], gain_shape[1], 1)) ampl_int /= gain_4d ampl_err_int /= gain_4d del gain_4d del gain_2d # Compress all integration's dq arrays to create 2D PIXELDDQ array for # primary output final_pixeldq = utils.dq_compress_final(dq_int, n_int) int_model = utils.gls_output_integ(input_model, slope_int, slope_err_int, dq_int) if save_opt: # collect optional results for output # Get the zero-point intercepts and the cosmic-ray amplitudes for # each integration (even if there's only one integration). gls_opt_model = utils.gls_output_optional( input_model, intercept_int, intercept_err_int, pedestal_int, ampl_int, ampl_err_int) else: gls_opt_model = None tstop = time.time() if n_int > 1: utils.log_stats(slopes) else: utils.log_stats(slope_int[0]) log.debug('Instrument: %s' % instrume) log.debug('Number of pixels in 2D array: %d' % npix) log.debug('Shape of 2D image: (%d, %d)' % imshape) log.debug('Shape of data cube: (%d, %d, %d)' % cubeshape) log.debug('Buffer size (bytes): %d' % buffsize) log.debug('Number of rows per slice: %d' % rows_per_slice) log.info('Number of groups per integration: %d' % nreads) log.info('Number of integrations: %d' % n_int) log.debug('The execution time in seconds: %f' % (tstop - tstart,)) # Create new model... if n_int > 1: new_model = datamodels.ImageModel( data=slopes.astype(np.float32), dq=final_pixeldq, err=gls_err.astype(np.float32)) else: new_model = datamodels.ImageModel( data=slope_int[0], dq=final_pixeldq, err=slope_err_int[0]) new_model.update(input_model) # ... and add all keys from input return new_model, int_model, gls_opt_model def gls_fit_all_integrations( frame_time, gain_2d, gdq_cube, group_time, jump_flag, max_num_cr, data_sect, input_var_sect, nframes_used, pixeldq, readnoise_2d, saturated_flag, save_opt): """ This method will fit the rate for all pixels and all integrations using the Generalized Least Squares (GLS) method. Parameters ---------- frame_time : float32 The time to read one frame gain_2d : 2D float32 The gain in electrons per DN for each pixel gdq_cube : 4-D DQ Flags The group dq flag values for all groups in the exposure group_time : float32 The time to read one group jump_flag : DQ flag The DQ value to mark a jump max_num_cr : int The largest number of cosmic rays found in any integration data_sect : 4-D float32 The input ramp cube with the sample values for each group of each integration for each pixel input_var_sect : 4-D float32 The input variance for each group of each integration for each pixel nframes_used : int The number of frames used to form each group average pixel_dq : 2-D DQ flags The pixel DQ flags for all pixels readnoise_2d : 2-D float32 The read noise for each pixel saturated_flag : DQ flag The DQ flag value to mark saturation save_opt : boolean Set to true to return the optional output model Returns -------- slopes : 2-D float32 The output rate for each pixel slope_int : 2-D float32 The output y-intercept for each pixel slope_var_sect : 2-D float32 The variance of the rate for each pixel pixeldq_sect : 2-D DQ flag The pixel dq for each pixel dq_int : 3-D DQ flag The pixel dq for each integration for each pixel sum_weight : 2-D float32 The sum of the weights for each pixel intercept_int : 3-D float32 The y-intercept for each integration for each pixel intercept_err_int : 3-D float32 The uncertainty of the y-intercept for each pixel of each integration pedestal_int : 3-D float32 The pedestal value for each integration for each pixel ampl_int : 3-D float32 The amplitude of each cosmic ray for each pixel ampl_err_int : The variance of the amplitude of each cosmic ray for each pixel """ number_ints = data_sect.shape[0] number_rows = data_sect.shape[2] number_cols = data_sect.shape[3] imshape = (data_sect.shape[2], data_sect.shape[3]) slope_int = np.zeros((number_ints, number_rows, number_cols), dtype=np.float32) slope_err_int = np.zeros((number_ints, number_rows, number_cols), dtype=np.float32) dq_int = np.zeros((number_ints, number_rows, number_cols), dtype=np.uint32) temp_dq = np.zeros((number_rows, number_cols), dtype=np.uint32) slopes = np.zeros((number_rows, number_cols), dtype=np.float32) sum_weight = np.zeros((number_rows, number_cols), dtype=np.float32) if save_opt: # Create arrays for the fitted values of zero-point intercept and # cosmic-ray amplitudes, and their errors. intercept_int = np.zeros((number_ints,) + imshape, dtype=np.float32) intercept_err_int = np.zeros((number_ints,) + imshape, dtype=np.float32) # The pedestal is the extrapolation of the first group back to zero # time, for each integration. pedestal_int = np.zeros((number_ints,) + imshape, dtype=np.float32) # The first group, for calculating the pedestal. (This only needs # to be nrows high, but we don't have nrows yet. xxx) first_group = np.zeros(imshape, dtype=np.float32) # If there are no cosmic rays, set the last axis length to 1. shape_ampl = (number_ints, imshape[0], imshape[1], max(1, max_num_cr)) ampl_int = np.zeros(shape_ampl, dtype=np.float32) ampl_err_int = np.zeros(shape_ampl, dtype=np.float32) else: intercept_int = None intercept_err_int = None pedestal_int = None first_group = None shape_ampl = None ampl_int = None ampl_err_int = None # loop over data integrations for num_int in range(number_ints): if save_opt: first_group[:, :] = 0. # re-use this for each integration # We'll propagate error estimates from previous steps to the # current step by using the variance. input_var_sect = input_var_sect ** 2 # Convert the data section from DN to electrons. data_sect *= gain_2d if save_opt: first_group[:, :] = data_sect[num_int, 0, :, :].copy() intercept_sect, intercept_var_sect, slope_sect, slope_var_sect, cr_sect, cr_var_sect = \ determine_slope(data_sect[num_int, :, :, :], input_var_sect[num_int, :, :, :], gdq_cube[num_int, :, :, :], readnoise_2d, gain_2d, frame_time, group_time, nframes_used, max_num_cr, saturated_flag, jump_flag) slope_int[num_int, :, :] = slope_sect.copy() v_mask = (slope_var_sect <= 0.) if v_mask.any(): # Replace negative or zero variances with a large value. slope_var_sect[v_mask] = utils.LARGE_VARIANCE # Also set a flag in the pixel dq array. temp_dq[:, :][v_mask] = UNRELIABLE_SLOPE del v_mask # If a pixel was flagged (by an earlier step) as saturated in # the first group, flag the pixel as bad. # Note: save s_mask until after the call to utils.gls_pedestal. s_mask = (gdq_cube[0] == saturated_flag) if s_mask.any(): temp_dq[:, :][s_mask] = UNRELIABLE_SLOPE slope_err_int[num_int, :, :] = np.sqrt(slope_var_sect) # We need to take a weighted average if (and only if) number_ints > 1. # Accumulate sum of slopes and sum of weights. if number_ints > 1: weight = 1. / slope_var_sect slopes[:, :] += (slope_sect * weight) sum_weight[:, :] += weight if save_opt: # Save the intercepts and cosmic-ray amplitudes for the # current integration. intercept_int[num_int, :, :] = intercept_sect.copy() intercept_err_int[num_int, :, :] = np.sqrt(np.abs(intercept_var_sect)) pedestal_int[num_int, :, :] = utils.gls_pedestal(first_group[:, :], slope_int[num_int, :, :], s_mask, frame_time, nframes_used) ampl_int[num_int, :, :, :] = cr_sect.copy() ampl_err_int[num_int, :, :, :] = np.sqrt(np.abs(cr_var_sect)) # Compress 4D->2D dq arrays for saturated and jump-detected # pixels pixeldq_sect = pixeldq[:, :].copy() dq_int[num_int, :, :] = \ utils.dq_compress_sect(gdq_cube[num_int, :, :, :], pixeldq_sect).copy() dq_int[num_int, :, :] |= temp_dq temp_dq[:, :] = 0 # initialize for next integration return slopes, slope_int, slope_var_sect, pixeldq_sect, dq_int, sum_weight, \ intercept_int, intercept_err_int, pedestal_int, ampl_int, ampl_err_int def determine_slope(data_sect, input_var_sect, gdq_sect, readnoise_sect, gain_sect, frame_time, group_time, nframes_used, max_num_cr, saturated_flag, jump_flag): """Iteratively fit a slope, intercept, and cosmic rays to a ramp. This function fits a ramp, possibly with discontinuities (cosmic-ray hits), to a 3-D data "cube" with shape (number of groups, number of pixels high, number of pixels wide). The fit will be done multiple times, with the previous fit being used to assign weights (via the covariance matrix) for the current fit. The iterations stop either when the maximum number of iterations has been reached or when the maximum difference between the previous fit and the current fit is below a cutoff. This function calls compute_slope and evaluate_fit. compute_slope creates arrays for the slope, intercept, and cosmic-ray amplitudes (the arrays that will be returned by determine_slope). Then it loops over the number of cosmic rays, from 0 to max_num_cr inclusive. Within this loop, compute_slope copies to temporary arrays the ramp data for all the pixels that have the current number of cosmic ray hits, calls gls_fit to compute the fit, then copies the results of the fit (slope, etc.) to the output arrays for just those pixels. The input to gls_fit is ramp data for a subset of pixels (nz in number) that all have the same number of cosmic-ray hits. gls_fit solves matrix equations (one for each of the nz pixels) of the form: y = x * p where y is a column vector containing the observed data values in electrons for each group (the length of y is ngroups, the number of groups); x is a matrix with ngroups rows and 2 + num_cr columns, where num_cr is the number of cosmic rays being included in the fit; and p is the solution, a column vector containing the intercept, slope, and the amplitude of each of the num_cr cosmic rays. The first column of x is all ones, for fitting to the intercept. The second column of x is the time (seconds) at the beginning of each group. The remaining num_cr columns (if num_cr > 0) are Heaviside functions, 0 for the first rows and 1 for all rows at and following the group containing a cosmic-ray hit (each row corresponds to a group). There will be one such column for each cosmic ray, so that the cosmic rays will be fit independently of each other. Whether a cosmic ray hit the detector during a particular group was determined by a previous step, and the affected groups are flagged in the group data quality array. In order to account for the variance of each observed value and the covariance between them (since they're measurements along a ramp), the solution is computed in this form (the @ sign represents matrix multiplication): (xT @ C^-1 @ x)^-1 @ [xT @ C^-1 @ y] where C is the ngroups by ngroups covariance matrix, ^-1 means matrix inverse, and xT is the transpose of x. Summary of the notation: data_sect is 3-D, (ngroups, ny, nx); this is the ramp of science data. cr_flagged is 3-D, (ngroups, ny, nx); 1 indicates a cosmic ray, e.g.: cr_flagged = np.where(np.bitwise_and(gdq_sect, jump_flag), 1, 0) cr_flagged_2d is 2-D, (ngroups, nz); this gives the location within the ramp of each cosmic ray, for the subset of pixels (nz of them) that have a total of num_cr cosmic ray hits at each pixel. This is passed to gls_fit(), which fits a slope to the ramp. ramp_data has shape (ngroups, nz); this will be a ramp with a 1-D array of pixels copied out of data_sect. The pixels will be those that have a particular number of cosmic-ray hits, somewhere within the ramp. Sum cr_flagged over groups to get an (ny, nx) image of the number of cosmic rays (i.e. accumulated over the ramp) in each pixel. sum_flagged = cr_flagged.sum(axis=0, ...) sum_flagged is used to extract the nz pixels from (ny, nx) that have a specified number of cosmic ray hits, e.g.: for num_cr in range(max_num_cr + 1): ncr_mask = (sum_flagged == num_cr) nz = ncr_mask.sum(dtype=np.int32) for k in range(ngroups): ramp_data[k] = data_sect[k][ncr_mask] cr_flagged_2d[k] = cr_flagged[k][ncr_mask] gls_fit is called for the subset of pixels (nz of them) that have num_cr cosmic ray hits within the ramp, the same number for every pixel. Parameters ---------- data_sect : 3-D ndarray, shape (ngroups, ny, nx) The ramp data for one integration. This may be a subarray in detector coordinates, but covering all groups. ngroups is the number of groups; ny is the number of pixels in the Y direction; nx is the number of pixels in the X (more rapidly varying) direction. The units should be electrons. input_var_sect : 3-D ndarray, shape (ngroups, ny, nx) The square of the input ERR array, matching data_sect. gdq_sect : 3-D ndarray, shape (ngroups, ny, nx) The group data quality array. This may be a subarray, matching data_sect. readnoise_sect : 2-D ndarray, shape (ny, nx) The read noise in electrons at each detector pixel (i.e. not a ramp). This may be a subarray, similar to data_sect. gain_sect : 2-D ndarray, or None, shape (ny, nx) The gain in electrons per DN at each detector pixel (i.e. not a ramp). This may be a subarray, matching readnoise_sect. If gain_sect is None, a value of 1 will be assumed. frame_time : float The time to read one frame, in seconds (e.g. 10.6 s). group_time : float Time increment between groups, in seconds. nframes_used : int Number of frames that were averaged together to make a group. Note that this value does not include the number (if any) of skipped frames. max_num_cr : non-negative int The maximum number of cosmic rays that should be handled. This must be specified by the caller, because determine_slope may be called for different sections of the input data, and those sections may have differing maximum numbers of cosmic rays. saturated_flag : int dqflags.group['SATURATED'] jump_flag : int dqflags.group['JUMP_DET'] Returns ------- tuple : (intercept_sect, int_var_sect, slope_sect, slope_var_sect, cr_sect, cr_var_sect) intercept_sect : 2-D ndarray, shape (ny, nx) The intercept of the ramp at each pixel. int_var_sect : 2-D ndarray, shape (ny, nx) The variance of the intercept at each pixel. slope_sect : 2-D ndarray, shape (ny, nx) The ramp slope at each pixel of data_sect. slope_var_sect : 2-D ndarray, shape (ny, nx) The variance of the slope at each pixel. cr_sect : 3-D ndarray, shape (ny, nx, cr_dimen) The amplitude of each cosmic ray at each pixel. cr_dimen is max_num_cr or 1, whichever is larger. cr_var_sect : 3-D ndarray, shape (ny, nx, cr_dimen) The variance of each cosmic-ray amplitude. """ slope_diff_cutoff = 1.e-5 # These will be updated in the loop. prev_slope_sect = (data_sect[1, :, :] - data_sect[0, :, :]) / group_time prev_fit = data_sect.copy() use_extra_terms = True iter = 0 done = False if NUM_ITER_NO_EXTRA_TERMS <= 0: # Even the first iteration uses the extra terms. temp_use_extra_terms = True else: temp_use_extra_terms = False while not done: (intercept_sect, int_var_sect, slope_sect, slope_var_sect, cr_sect, cr_var_sect) = \ compute_slope(data_sect, input_var_sect, gdq_sect, readnoise_sect, gain_sect, prev_fit, prev_slope_sect, frame_time, group_time, nframes_used, max_num_cr, saturated_flag, jump_flag, temp_use_extra_terms) iter += 1 if iter == NUM_ITER_NO_EXTRA_TERMS: temp_use_extra_terms = use_extra_terms if iter >= MAX_ITER: done = True else: # If there are pixels with zero or negative variance, ignore # them when taking the difference between the slopes computed # in the current and previous iterations. slope_diff = np.where(slope_var_sect > 0., prev_slope_sect - slope_sect, 0.) max_slope_diff = np.abs(slope_diff).max() if iter >= MIN_ITER and max_slope_diff < slope_diff_cutoff: done = True current_fit = evaluate_fit(intercept_sect, slope_sect, cr_sect, frame_time, group_time, gdq_sect, jump_flag) prev_fit = positive_fit(current_fit) # use for next iteration del current_fit prev_slope_sect = slope_sect.copy() return (intercept_sect, int_var_sect, slope_sect, slope_var_sect, cr_sect, cr_var_sect) def evaluate_fit(intercept_sect, slope_sect, cr_sect, frame_time, group_time, gdq_sect, jump_flag): """Evaluate the fit (intercept, slope, cosmic-ray amplitudes). Parameters ---------- intercept_sect : 2-D ndarray The intercept of the ramp at each pixel of data_sect (one of the arguments to determine_slope). slope_sect : 2-D ndarray The ramp slope at each pixel of data_sect. cr_sect : 3-D ndarray The amplitude of each cosmic ray at each pixel of data_sect. frame_time : float The time to read one frame, in seconds (e.g. 10.6 s). group_time : float Time increment between groups, in seconds. gdq_sect : 3-D ndarray; indices: group, y, x The group data quality array. This may be a subarray, matching data_sect. jump_flag : int dqflags.group['JUMP_DET'] Returns ------- fit_model : 3-D ndarray, shape (ngroups, ny, nx) This is the same shape as data_sect, and if the fit is good, fit_model and data_sect should not differ by much. """ shape_3d = gdq_sect.shape # the ramp, (ngroups, ny, nx) ngroups = gdq_sect.shape[0] # This array is also created in function compute_slope. cr_flagged = np.empty(shape_3d, dtype=np.uint8) cr_flagged[:] = np.where(np.bitwise_and(gdq_sect, jump_flag), 1, 0) sum_flagged = cr_flagged.sum(axis=0, dtype=np.int32) # local_max_num_cr is local to this function. It may be smaller than # the max_num_cr that's an argument to determine_slope, and it can even # be zero. local_max_num_cr = sum_flagged.max() del sum_flagged # The independent variable, in seconds at each image pixel. ind_var = np.zeros(shape_3d, dtype=np.float64) M = round(group_time / frame_time) iv = np.arange(ngroups, dtype=np.float64) * group_time + \ frame_time * (M + 1.) / 2. iv = iv.reshape((ngroups, 1, 1)) ind_var += iv # No cosmic rays yet; these will be accounted for below. # ind_var has a different shape (ngroups, ny, nx) from slope_sect and # intercept_sect, but their last dimensions are the same. fit_model = ind_var * slope_sect + intercept_sect # heaviside and cr_flagged have shape (ngroups, ny, nx). heaviside = np.zeros(shape_3d, dtype=np.float64) cr_cumsum = cr_flagged.cumsum(axis=0, dtype=np.int16) # Add an offset for each cosmic ray. for n in range(local_max_num_cr): heaviside[:] = np.where(cr_cumsum > n, 1., 0.) fit_model += (heaviside * cr_sect[:, :, n]) return fit_model def positive_fit(current_fit): """Replace zero and negative values with a positive number. Ramp data should be positive, since they are based on counts. The fit to a ramp can go negative, however, due e.g. to extrapolation beyond where the data are saturated. To avoid negative elements in the covariance matrix (populated in part with the fit to the ramp), this function replaces zero or negative values in the fit with a positive number. Parameters ---------- current_fit : 3-D ndarray, shape (ngroups, ny, nx) The fit returned by evaluate_fit. Returns ------- current_fit : 3-D ndarray, shape (ngroups, ny, nx) This is the same as the input current_fit, except that zero and negative values will have been replaced by a positive value. """ return np.where(current_fit <= 0., FIT_MUST_BE_POSITIVE, current_fit) def compute_slope(data_sect, input_var_sect, gdq_sect, readnoise_sect, gain_sect, prev_fit, prev_slope_sect, frame_time, group_time, nframes_used, max_num_cr, saturated_flag, jump_flag, use_extra_terms): """Set up the call to fit a slope to ramp data. This loops over the number of cosmic rays (jumps). That is, all the ramps with no cosmic rays are processed first, then all the ramps with one cosmic ray, then with two, etc. Parameters ---------- data_sect : 3-D ndarray; shape (ngroups, ny, nx) The ramp data for one of the integrations in an exposure. This may be a subarray in detector coordinates, but covering all groups. input_var_sect : 3-D ndarray, shape (ngroups, ny, nx) The square of the input ERR array, matching data_sect. gdq_sect : 3-D ndarray; shape (ngroups, ny, nx) The group data quality array. This may be a subarray, matching data_sect. readnoise_sect : 2-D ndarray; shape (ny, nx) The read noise in electrons at each detector pixel (i.e. not a ramp). This may be a subarray, similar to data_sect. gain_sect : 2-D ndarray, or None; shape (ny, nx) The gain in electrons per DN at each detector pixel (i.e. not a ramp). This may be a subarray, matching readnoise_sect. If gain_sect is None, a value of 1 will be assumed. prev_fit : 3-D ndarray; shape (ngroups, ny, nx) The previous fit (intercept, slope, cosmic-ray amplitudes) evaluated for each pixel in the subarray. data_sect itself may be used for the first iteration. prev_slope_sect : 2-D ndarray; shape (ny, nx) An estimate (e.g. from a previous iteration) of the slope at each pixel, in electrons per second. This may be a subarray, similar to data_sect. frame_time : float The time to read one frame, in seconds (e.g. 10.6 s). group_time : float Time increment between groups, in seconds. nframes_used : int Number of frames that were averaged together to make a group. This value does not include the number (if any) of skipped frames. max_num_cr : non-negative int The maximum number of cosmic rays that should be handled. saturated_flag : int dqflags.group['SATURATED'] jump_flag : int dqflags.group['JUMP_DET'] use_extra_terms : bool True if we should include <NAME> terms in the covariance matrix. See JWST-STScI-003193.pdf Returns ------- tuple : (intercept_sect, int_var_sect, slope_sect, slope_var_sect, cr_sect, cr_var_sect) intercept_sect is a 2-D ndarray, the intercept of the ramp at each pixel of data_sect. int_var_sect is a 2-D ndarray, the variance of the intercept at each pixel of data_sect. slope_sect is a 2-D ndarray, the ramp slope at each pixel of data_sect. slope_var_sect is a 2-D ndarray, the variance of the slope at each pixel of data_sect. cr_sect is a 3-D ndarray, shape (ny, nx, cr_dimen), the amplitude of each cosmic ray at each pixel of data_sect. cr_dimen is max_num_cr or 1, whichever is larger. cr_var_sect is a 3-D ndarray, the variance of each cosmic ray amplitude. """ cr_flagged = np.empty(data_sect.shape, dtype=np.uint8) cr_flagged[:] = np.where(np.bitwise_and(gdq_sect, jump_flag), 1, 0) # If a pixel is flagged as a jump in the first group, we can't fit to # the ramp, because a matrix that we need to invert would be singular. # If there's only one group, we can't fit a ramp to it anyway, so # at this point we wouldn't need to be concerned about a jump. If # there is more than one group, just ignore any jump the first group. if data_sect.shape[0] > 1: cr_flagged[0, :, :] = 0 # Sum over groups to get an (ny, nx) image of the number of cosmic # rays in each pixel, accumulated over the ramp. sum_flagged = cr_flagged.sum(axis=0, dtype=np.int32) # If a pixel is flagged as saturated in the first or second group, we # don't want to even attempt to fit a slope to the ramp for that pixel. # Handle this case by setting the corresponding pixel in sum_flagged to # a negative number. The test `ncr_mask = (sum_flagged == num_cr)` # will therefore never match, since num_cr is zero or larger, and the # pixel will not be included in any ncr_mask. mask1 = (gdq_sect[0, :, :] == saturated_flag) sum_flagged[mask1] = -1 # one_group_mask flags pixels that are not saturated in the first # group but are saturated in the second group (if there is a second # group). For these pixels, we will assign a value to the slope # image by just dividing the value in the first group by group_time. if len(gdq_sect) > 1: mask2 = (gdq_sect[1, :, :] == saturated_flag) sum_flagged[mask2] = -1 one_group_mask = np.bitwise_and(mask2, np.bitwise_not(mask1)) del mask2 else: one_group_mask = np.bitwise_not(mask1) del mask1 # Set elements of this array to a huge value if the corresponding # pixels are saturated. This is not a flag, it's a value to be # added to the diagonal of the covariance matrix. saturated = np.empty(data_sect.shape, dtype=np.float64) saturated[:] = np.where(np.bitwise_and(gdq_sect, saturated_flag), HUGE_FOR_LOW_WEIGHT, 0.) # Create arrays to be populated and then returned. shape = data_sect.shape # Lower limit of one, in case there are no cosmic rays at all. cr_dimen = max(1, max_num_cr) intercept_sect = np.zeros((shape[1], shape[2]), dtype=data_sect.dtype) slope_sect = np.zeros((shape[1], shape[2]), dtype=data_sect.dtype) cr_sect = np.zeros((shape[1], shape[2], cr_dimen), dtype=data_sect.dtype) int_var_sect = np.zeros((shape[1], shape[2]), dtype=data_sect.dtype) slope_var_sect = np.zeros((shape[1], shape[2]), dtype=data_sect.dtype) cr_var_sect = np.zeros((shape[1], shape[2], cr_dimen), dtype=data_sect.dtype) # This takes care of the case that there's only one group, as well as # pixels that are saturated in the second but not the first group of a # multi-group file if one_group_mask.any(): slope_sect[one_group_mask] = data_sect[0, one_group_mask] / group_time del one_group_mask # Fit slopes for all pixels that have no cosmic ray hits anywhere in # the ramp, then fit slopes with one CR hit, then with two, etc. for num_cr in range(max_num_cr + 1): ngroups = len(data_sect) ncr_mask = (sum_flagged == num_cr) # Number of detector pixels flagged with num_cr CRs within the ramp. nz = ncr_mask.sum(dtype=np.int32) if nz <= 0: continue # ramp_data will be a ramp with a 1-D array of pixels copied out # of data_sect. ramp_data = np.empty((ngroups, nz), dtype=data_sect.dtype) input_var_data = np.empty((ngroups, nz), dtype=data_sect.dtype) prev_fit_data = np.empty((ngroups, nz), dtype=prev_fit.dtype) prev_slope_data = np.empty(nz, dtype=prev_slope_sect.dtype) prev_slope_data[:] = prev_slope_sect[ncr_mask] readnoise = np.empty(nz, dtype=readnoise_sect.dtype) readnoise[:] = readnoise_sect[ncr_mask] if gain_sect is None: gain = None else: gain = np.empty(nz, dtype=gain_sect.dtype) gain[:] = gain_sect[ncr_mask] cr_flagged_2d = np.empty((ngroups, nz), dtype=cr_flagged.dtype) saturated_data = np.empty((ngroups, nz), dtype=prev_fit.dtype) for k in range(ngroups): ramp_data[k] = data_sect[k][ncr_mask] input_var_data[k] = input_var_sect[k][ncr_mask] prev_fit_data[k] = prev_fit[k][ncr_mask] cr_flagged_2d[k] = cr_flagged[k][ncr_mask] # This is for clobbering saturated pixels. saturated_data[k] = saturated[k][ncr_mask] (result, variances) = \ gls_fit(ramp_data, prev_fit_data, prev_slope_data, readnoise, gain, frame_time, group_time, nframes_used, num_cr, cr_flagged_2d, saturated_data) # Copy the intercept, slope, and cosmic-ray amplitudes and their # variances to the arrays to be returned. # ncr_mask is a mask array that is True for each pixel that has the # current number (num_cr) of cosmic rays. Thus, the output arrays # are being populated here in sets, a different set of pixels with # each iteration of this loop. intercept_sect[ncr_mask] = result[:, 0].copy() int_var_sect[ncr_mask] = variances[:, 0].copy() slope_sect[ncr_mask] = result[:, 1].copy() slope_var_sect[ncr_mask] = variances[:, 1].copy() # In this loop, i is just an index. cr_sect is populated for # number of cosmic rays = 1 to num_cr, inclusive. for i in range(num_cr): cr_sect[ncr_mask, i] = result[:, 2 + i].copy() cr_var_sect[ncr_mask, i] = variances[:, 2 + i].copy() return (intercept_sect, int_var_sect, slope_sect, slope_var_sect, cr_sect, cr_var_sect) def gls_fit(ramp_data, prev_fit_data, prev_slope_data, readnoise, gain, frame_time, group_time, nframes_used, num_cr, cr_flagged_2d, saturated_data): """Generalized least squares linear fit. It is assumed that every input pixel has num_cr cosmic-ray hits somewhere within the ramp. This function should be called separately for different values of num_cr. Notes ----- Curently the noise model is assumed to be a combination of read and photon noise alone. Same technique could be used with more complex noise models, but then the ramp covariance matrix should be input. Parameters ---------- ramp_data : 2-D ndarray; indices: group, pixel number The ramp data for one of the integrations in an exposure. This may be a subset in detector coordinates, but covering all groups. The shape is (ngroups, nz), where ngroups is the length of the ramp, and nz is the number of pixels in the current subset. prev_fit_data : 2-D ndarray, shape (ngroups, nz) The fit to ramp_data, based on applying the values of intercept, slope, and cosmic-ray amplitudes that were determined in a previous call to gls_fit. This array is only used for setting up the covariance matrix. prev_slope_data : 1-D ndarray, length nz. An estimate (e.g. from a previous iteration) of the slope at each pixel, in electrons per second. readnoise : 1-D ndarray, length nz. The read noise in electrons at each detector pixel. gain : 1-D ndarray, shape (nz,) The analog-to-digital gain (electrons per dn) at each detector pixel. frame_time : float The time to read one frame, in seconds (e.g. 10.6 s). group_time : float Time increment between groups, in seconds. nframes_used : int Number of frames that were averaged together to make a group. Note that this value does not include the number (if any) of skipped frames. num_cr : int The number of cosmic rays that will be handled. All pixels in the current set (ramp_data) are assumed to have this many cosmic ray hits somewhere within the ramp. cr_flagged_2d : 2-D ndarray, shape (ngroups, nz) The values should be 0 or 1; 1 indicates that a cosmic ray was detected (by another step) at that point. saturated_data : 2-D ndarray, shape (ngroups, nz) Normal values are zero; the value will be a huge number for saturated pixels. This will be added to the main diagonal of the inverse of the weight matrix to greatly reduce the weight for saturated pixels. Returns ------- tuple : (result2d, variances) result2d is a 2-D ndarray; shape (nz, 2 + num_cr) The computed values of intercept, slope, and cosmic-ray amplitudes (there will be num_cr cosmic-ray amplitudes) for each of the nz pixels. variances is a 2-D ndarray; shape (nz, 2 + num_cr) The variance for the intercept, slope, and for the amplitude of each cosmic ray that was detected. """ M = float(nframes_used) ngroups = ramp_data.shape[0] nz = ramp_data.shape[1] num_cr = int(num_cr) # x is an array (length nz) of matrices, each of which is the # independent variable of a linear equation. Each such matrix # has ngroups rows and 2 + num_cr columns. The first column is set # to 1, for finding the intercept. The second column is the time at # each group, for finding the slope. The remaining columns (if any), # are 0 for all rows prior to a certain point, then 1 for all # subsequent rows (i.e. the Heaviside function). The transition from # 0 to 1 is the location of a cosmic ray hit; the first 1 in a column # corresponds to the value in cr_flagged_2d being 1. x = np.zeros((nz, ngroups, 2 + num_cr), dtype=np.float64) x[:, :, 0] = 1. x[:, :, 1] = np.arange(ngroups, dtype=np.float64) * group_time + \ frame_time * (M + 1.) / 2. if num_cr > 0: sum_crs = cr_flagged_2d.cumsum(axis=0) for k in range(ngroups): s = slice(k, ngroups) for n in range(1, num_cr + 1): temp = np.where(np.logical_and(cr_flagged_2d[k] == 1, sum_crs[k] == n)) if len(temp[0]) > 0: index = (temp[0], s, n + 1) x[index] = 1 del temp, index y = np.transpose(ramp_data, (1, 0)).reshape((nz, ngroups, 1)) # ramp_cov is an array of nz matrices, each ngroups x ngroups. # each matrix gives the covariance of that pixel's ramp data ramp_cov = np.ones((nz, ngroups, ngroups), dtype=np.float64) # Use the previous fit to the data to populate the covariance matrix, # for each of the nz pixels. prev_fit_data has shape (ngroups, nz), # similar to the ramp data, but we want the nz axis to be the first # (we're constructing an array of nz matrix equations), so transpose # prev_fit_data. prev_fit_T = np.transpose(prev_fit_data, (1, 0)) for k in range(ngroups): # Populate the upper right, row by row. ramp_cov[:, k, k:ngroups] = prev_fit_T[:, k:k + 1] # Populate the lower left, column by column. ramp_cov[:, k:ngroups, k] = prev_fit_T[:, k:k + 1] # Give saturated pixels a very high high variance (hence a low weight) ramp_cov[:, k, k] += saturated_data[k, :] del prev_fit_T # iden is 2-D, but it can broadcast to 4-D. This is used to add terms to # the diagonal of the covariance matrix. iden = np.identity(ngroups) rn3d = readnoise.reshape((nz, 1, 1)) ramp_cov += (iden * rn3d**2) # prev_slope_data must be non-negative. flags = prev_slope_data < 0. prev_slope_data[flags] = 1. # The resulting fit parameters are # (xT @ ramp_cov^-1 @ x)^-1 @ [xT @ ramp_cov^-1 @ y] # = [y-intercept, slope, cr_amplitude_1, cr_amplitude_2, ...] # where @ means matrix multiplication. # shape of xT is (nz, 2 + num_cr, ngroups) xT = np.transpose(x, (0, 2, 1)) # shape of `ramp_invcov` is (nz, ngroups, ngroups) iden = iden.reshape((1, ngroups, ngroups)) ramp_invcov = la.solve(ramp_cov, iden) del iden # temp1 = xT @ ramp_invcov # np.einsum use is equivalent to matrix multiplication # shape of temp1 is (nz, 2 + num_cr, ngroups) temp1 = np.einsum('...ij,...jk->...ik', xT, ramp_invcov) # temp_var = xT @ ramp_invcov @ x # shape of temp_var is (nz, 2 + num_cr, 2 + num_cr) temp_var = np.einsum('...ij,...jk->...ik', temp1, x) # `fitparam_cov` is an array of nz covariance matrices. # fitparam_cov = (xT @ ramp_invcov @ x)^-1 # shape of fitparam_covar is (nz, 2 + num_cr, 2 + num_cr) I_2 = np.eye(2 + num_cr).reshape((1, 2 + num_cr, 2 + num_cr)) try: # inverse of temp_var fitparam_cov = la.solve(temp_var, I_2) except la.LinAlgError: # find the pixel with the singular matrix for z in range(nz): try: la.solve(temp_var[z], I_2) except la.LinAlgError as msg2: log.warning("singular matrix, z = %d" % z) raise la.LinAlgError(msg2) del I_2 # [xT @ ramp_invcov @ y] # shape of temp2 is (nz, 2 + num_cr, 1) temp2 = np.einsum('...ij,...jk->...ik', temp1, y) # shape of fitparam is (nz, 2 + num_cr, 1) fitparam = np.einsum('...ij,...jk->...ik', fitparam_cov, temp2) r_shape = fitparam.shape fitparam2d = fitparam.reshape((r_shape[0], r_shape[1])) del fitparam # shape of both result2d and variances is (nz, 2 + num_cr) fitparam_uncs = fitparam_cov.diagonal(axis1=1, axis2=2).copy() return (fitparam2d, fitparam_uncs) ''' <file_sep>/setup.py #!/usr/bin/env python3 from setuptools import setup setup(use_scm_version="src/stcal/_version.py") <file_sep>/docs/conf.py from pathlib import Path import os import sys from configparser import ConfigParser from datetime import datetime import importlib import sphinx import stsci_rtd_theme def setup(app): try: app.add_css_file("stsci.css") except AttributeError: app.add_stylesheet("stsci.css") REPO_ROOT = Path(__file__).parent.parent # Modules that automodapi will document need to be available # in the path: sys.path.insert(0, str(REPO_ROOT/"src"/"stcal")) # Read the package's setup.cfg so that we can use relevant # values here: conf = ConfigParser() conf.read(REPO_ROOT/"setup.cfg") setup_metadata = dict(conf.items("metadata")) project = setup_metadata["name"] author = setup_metadata["author"] copyright = f"{datetime.now().year}, {author}" package = importlib.import_module(setup_metadata["name"]) version = package.__version__.split("-", 1)[0] release = package.__version__ extensions = [ "sphinx_automodapi.automodapi", "numpydoc", ] autosummary_generate = True numpydoc_show_class_members = False autoclass_content = "both" html_theme = "stsci_rtd_theme" html_theme_options = { "collapse_navigation": True } html_theme_path = [stsci_rtd_theme.get_html_theme_path()] html_domain_indices = True html_sidebars = {"**": ["globaltoc.html", "relations.html", "searchbox.html"]} html_use_index = True <file_sep>/tox.ini [tox] envlist= py38,style,bandit [testenv] usedevelop= true extras= test passenv = TOXENV CI CODECOV_* HOME CRDS_* commands= !cov: pytest {posargs} cov: pytest --cov-report=xml --cov=src/stcal --cov-config=setup.cfg {posargs} [testenv:style] deps= flake8 commands= flake8 --count [testenv:bandit] deps= bandit commands= bandit -r -ll src [testenv:build-docs] extras= docs commands= sphinx-build -W docs/source build/docs [testenv:jwst-cov] deps= jwst[test] @ git+https://github.com/spacetelescope/jwst pytest-xdist commands= pytest -n auto --cov-report=xml --cov={toxinidir}/src/stcal --ignore-glob=timeconversion --ignore-glob=associations --pyargs {posargs:jwst} [testenv:romancal-cov] deps= romancal[test] @ git+https://github.com/spacetelescope/romancal commands= pytest --cov-report=xml --cov={toxinidir}/src/stcal --pyargs romancal <file_sep>/README.md # stcal [![Documentation Status](https://readthedocs.org/projects/stcal/badge/?version=latest)](http://stcal.readthedocs.io/en/latest/?badge=latest) [![CI](https://github.com/spacetelescope/stcal/actions/workflows/ci.yml/badge.svg)](https://github.com/spacetelescope/stcal/actions/workflows/ci.yml) [![codecov](https://codecov.io/gh/spacetelescope/stcal/branch/main/graph/badge.svg?token=C1LO00W9CZ)](https://codecov.io/gh/spacetelescope/stcal) STScI Calibration algorithms and tools. <file_sep>/src/stcal/ramp_fitting/ramp_fit_class.py class RampData: def __init__(self): """ Creates an internal ramp fit class. """ # Arrays from the data model self.data = None self.err = None self.groupdq = None self.pixeldq = None self.int_times = None # Meta information self.instrument_name = None self.frame_time = None self.group_time = None self.groupgap = None self.nframes = None self.drop_frames1 = None # Data quality flags self.flags_do_not_use = None self.flags_jump_det = None self.flags_saturated = None self.flags_no_gain_val = None self.flags_unreliable_slope = None def set_arrays(self, model): """ Set the arrays needed for ramp fitting. Sets the following arrays: data : 4-D array containing the pixel information. It has dimensions (nintegrations, ngroups, nrows, ncols) err : 4-D array containing the error information. It has dimensions (nintegrations, ngroups, nrows, ncols) groupdq :4-D array containing the data quality flags. It has dimensions (nintegrations, ngroups, nrows, ncols) pixeldq : 4-D array containing the pixel data quality information. It has dimensions (nintegrations, ngroups, nrows, ncols) int_times : list Time information for each integration (only JWST). Parameters ---------- model : Data model JWST or Roman Ramp Model """ # Get arrays from the data model self.data = model.data self.err = model.err self.groupdq = model.groupdq self.pixeldq = model.pixeldq if hasattr(model, 'int_times'): self.int_times = model.int_times def set_meta(self, model): """ Set the meta information needed for ramp fitting. name: The instrument name. frame_time: The time to read one frame. group_time: The time to read one group. groupgap: The number of frames that are not included in the group average nframes: The number of frames that are included in the group average drop_frames1: The number of frames dropped at the beginning of every integration. May not be used in some pipelines, so is defaulted to NoneType. Parameters ---------- model : Data model JWST or ROman Ramp Model """ # Get meta information self.instrument_name = model.meta.instrument.name self.frame_time = model.meta.exposure.frame_time self.group_time = model.meta.exposure.group_time self.groupgap = model.meta.exposure.groupgap self.nframes = model.meta.exposure.nframes # May not be available for all pipelines, so is defaulted to NoneType. if hasattr(model, 'drop_frames1'): self.drop_frames1 = model.exposure.drop_frames1 def set_dqflags(self, dqflags): """ Set the data quality flags needed for ramp fitting. Parameter --------- dqflags : dict A dictionary with specific key words needed for processing. """ # Get data quality flags self.flags_do_not_use = dqflags["DO_NOT_USE"] self.flags_jump_det = dqflags["JUMP_DET"] self.flags_saturated = dqflags["SATURATED"] self.flags_no_gain_val = dqflags["NO_GAIN_VALUE"] self.flags_unreliable_slope = dqflags["UNRELIABLE_SLOPE"]
016da594cc654831d955c15629771ef64af18bbf
[ "Markdown", "Python", "reStructuredText", "INI" ]
16
Python
eslavich/stcal
c42cd59d1e432c64d7b0a5384712d6e6c5ab4059
6e0b42b47b1062e3666ec90fe49005979448f6c2
refs/heads/master
<repo_name>jeffgoeken/jeffs_stuff<file_sep>/app/controllers/notes_controller.rb class NotesController < ApplicationController def create @entry = Entry.find(params[:entry_id]) @note = @entry.notes.create(note_params) redirect_to entry_path(@entry) end private def note_params params.require(:note).permit(:Initials, :Body) end end <file_sep>/app/models/entry.rb class Entry < ActiveRecord::Base has_many :notes end <file_sep>/app/views/entries/index.json.jbuilder json.array!(@entries) do |entry| json.extract! entry, :id, :date, :code, :escalated, :summary, :status, :created_by json.url entry_url(entry, format: :json) end
7a6eccf9848d3d027bbdd4b6ccd9b7fbfba1db63
[ "Ruby" ]
3
Ruby
jeffgoeken/jeffs_stuff
fc6d8b12c5c4e8bd37c55f0ebdae0d6d19818c66
03ae31fff351861c5b4473d1f5259c1e15c3d99c
refs/heads/master
<file_sep>#include <iostream> #include "common.h" using namespace std; extern int main_gm(string ip, u_int16_t port, const ZZ &psk, int lambda); int main_m(string ip, u_int16_t port, string id, const ZZ &psk); int main(int argc, char *argv[]) { // Console logger with color // usage https://github.com/gabime/spdlog auto Log = stdout_color_mt("console"); Log->info("Program started"); int oc; /*选项字符 */ char *ip = nullptr; char *name = nullptr; string psk = ""; string log_level; bool type = false; while ((oc = getopt(argc, argv, "gmhi:n:p:l:")) != -1) { switch (oc) { case 'g': type = true; break; case 'm': type = false; break; case 'i': ip = optarg; break; case 'n': name = optarg; break; case 'p': psk = optarg; break; case 'l': log_level = optarg; break; case 'h': cout << "usage: {-h|-m} [-i <ip>] [-n <id>] -p <PSK> [-l <log_level>]" << endl; return 0; default: cout << "usage: {-h|-m} [-i <ip>] [-n <id>] -p <PSK> [-l <log_level>]" << endl; break; } } if (psk == "") { Log->critical("Wrong usage: no psk"); return -1; } set_level(level::debug); if (log_level == "debug") set_level(level::debug); if (log_level == "info") set_level(level::info); if (log_level == "warn") set_level(level::warn); if (log_level == "err") set_level(level::err); if (log_level == "critical") set_level(level::critical); ZZ _psk = conv<ZZ>(atoi(psk.c_str())); if (type) {//GM main_gm("0.0.0.0", 9999, _psk, 64); } else { if (!ip) { Log->critical("Wrong usage: no ip"); return -1; } if (!name) { Log->critical("Wrong usage: no id"); return -1; } main_m(ip, 9999, name, _psk); } return 0; }
b52e6e057a600bfbc24756430f42ef6b9f1a086a
[ "C++" ]
1
C++
GroupCommuTeam/GroupCommu
a6fbef8efccd372ed07ed030820bcf03302579c9
bdfbb974b0e0885eb002f8403d22a9f03bcab539
refs/heads/master
<file_sep>/* * generated by Xtext */ package org.xtext.example.mydsl.serializer; import com.google.inject.Inject; import java.util.List; import org.eclipse.emf.ecore.EObject; import org.eclipse.xtext.IGrammarAccess; import org.eclipse.xtext.RuleCall; import org.eclipse.xtext.nodemodel.INode; import org.eclipse.xtext.serializer.analysis.GrammarAlias.AbstractElementAlias; import org.eclipse.xtext.serializer.analysis.GrammarAlias.AlternativeAlias; import org.eclipse.xtext.serializer.analysis.GrammarAlias.TokenAlias; import org.eclipse.xtext.serializer.analysis.ISyntacticSequencerPDAProvider.ISynNavigable; import org.eclipse.xtext.serializer.analysis.ISyntacticSequencerPDAProvider.ISynTransition; import org.eclipse.xtext.serializer.sequencer.AbstractSyntacticSequencer; import org.xtext.example.mydsl.services.TextualVerdulerGrammarAccess; @SuppressWarnings("all") public class TextualVerdulerSyntacticSequencer extends AbstractSyntacticSequencer { protected TextualVerdulerGrammarAccess grammarAccess; protected AbstractElementAlias match_LapsoTiempoLiteral_HoraKeyword_1_0_or_HorasKeyword_1_1; protected AbstractElementAlias match_LapsoTiempoNumerico_HoraKeyword_1_1_0_or_HorasKeyword_1_1_1; protected AbstractElementAlias match_MedidaPesoKilo_KiloKeyword_1_0_or_KilosKeyword_1_1; protected AbstractElementAlias match_MontoDinero_PesitoKeyword_1_2_or_PesitosKeyword_1_3_or_PesoKeyword_1_0_or_PesosKeyword_1_1; protected AbstractElementAlias match_PesoMagnitudFija_UnKeyword_1_0_q; protected AbstractElementAlias match_PesoMagnitudFija_UnKeyword_2_0_q; protected AbstractElementAlias match_Producto_LasKeyword_0_1_or_LasKeyword_0_3_or_LosKeyword_0_0_or_LosKeyword_0_2; protected AbstractElementAlias match_TareaRevisionProductos_LasKeyword_1_0_0_or_LosKeyword_1_0_1; protected AbstractElementAlias match_TareaRevisionProductos_LasKeyword_2_0_or_LosKeyword_2_1; @Inject protected void init(IGrammarAccess access) { grammarAccess = (TextualVerdulerGrammarAccess) access; match_LapsoTiempoLiteral_HoraKeyword_1_0_or_HorasKeyword_1_1 = new AlternativeAlias(false, false, new TokenAlias(false, false, grammarAccess.getLapsoTiempoLiteralAccess().getHoraKeyword_1_0()), new TokenAlias(false, false, grammarAccess.getLapsoTiempoLiteralAccess().getHorasKeyword_1_1())); match_LapsoTiempoNumerico_HoraKeyword_1_1_0_or_HorasKeyword_1_1_1 = new AlternativeAlias(false, false, new TokenAlias(false, false, grammarAccess.getLapsoTiempoNumericoAccess().getHoraKeyword_1_1_0()), new TokenAlias(false, false, grammarAccess.getLapsoTiempoNumericoAccess().getHorasKeyword_1_1_1())); match_MedidaPesoKilo_KiloKeyword_1_0_or_KilosKeyword_1_1 = new AlternativeAlias(false, false, new TokenAlias(false, false, grammarAccess.getMedidaPesoKiloAccess().getKiloKeyword_1_0()), new TokenAlias(false, false, grammarAccess.getMedidaPesoKiloAccess().getKilosKeyword_1_1())); match_MontoDinero_PesitoKeyword_1_2_or_PesitosKeyword_1_3_or_PesoKeyword_1_0_or_PesosKeyword_1_1 = new AlternativeAlias(false, false, new TokenAlias(false, false, grammarAccess.getMontoDineroAccess().getPesitoKeyword_1_2()), new TokenAlias(false, false, grammarAccess.getMontoDineroAccess().getPesitosKeyword_1_3()), new TokenAlias(false, false, grammarAccess.getMontoDineroAccess().getPesoKeyword_1_0()), new TokenAlias(false, false, grammarAccess.getMontoDineroAccess().getPesosKeyword_1_1())); match_PesoMagnitudFija_UnKeyword_1_0_q = new TokenAlias(false, true, grammarAccess.getPesoMagnitudFijaAccess().getUnKeyword_1_0()); match_PesoMagnitudFija_UnKeyword_2_0_q = new TokenAlias(false, true, grammarAccess.getPesoMagnitudFijaAccess().getUnKeyword_2_0()); match_Producto_LasKeyword_0_1_or_LasKeyword_0_3_or_LosKeyword_0_0_or_LosKeyword_0_2 = new AlternativeAlias(false, false, new TokenAlias(false, false, grammarAccess.getProductoAccess().getLasKeyword_0_1()), new TokenAlias(false, false, grammarAccess.getProductoAccess().getLasKeyword_0_3()), new TokenAlias(false, false, grammarAccess.getProductoAccess().getLosKeyword_0_0()), new TokenAlias(false, false, grammarAccess.getProductoAccess().getLosKeyword_0_2())); match_TareaRevisionProductos_LasKeyword_1_0_0_or_LosKeyword_1_0_1 = new AlternativeAlias(false, false, new TokenAlias(false, false, grammarAccess.getTareaRevisionProductosAccess().getLasKeyword_1_0_0()), new TokenAlias(false, false, grammarAccess.getTareaRevisionProductosAccess().getLosKeyword_1_0_1())); match_TareaRevisionProductos_LasKeyword_2_0_or_LosKeyword_2_1 = new AlternativeAlias(false, false, new TokenAlias(false, false, grammarAccess.getTareaRevisionProductosAccess().getLasKeyword_2_0()), new TokenAlias(false, false, grammarAccess.getTareaRevisionProductosAccess().getLosKeyword_2_1())); } @Override protected String getUnassignedRuleCallToken(EObject semanticObject, RuleCall ruleCall, INode node) { return ""; } @Override protected void emitUnassignedTokens(EObject semanticObject, ISynTransition transition, INode fromNode, INode toNode) { if (transition.getAmbiguousSyntaxes().isEmpty()) return; List<INode> transitionNodes = collectNodes(fromNode, toNode); for (AbstractElementAlias syntax : transition.getAmbiguousSyntaxes()) { List<INode> syntaxNodes = getNodesFor(transitionNodes, syntax); if(match_LapsoTiempoLiteral_HoraKeyword_1_0_or_HorasKeyword_1_1.equals(syntax)) emit_LapsoTiempoLiteral_HoraKeyword_1_0_or_HorasKeyword_1_1(semanticObject, getLastNavigableState(), syntaxNodes); else if(match_LapsoTiempoNumerico_HoraKeyword_1_1_0_or_HorasKeyword_1_1_1.equals(syntax)) emit_LapsoTiempoNumerico_HoraKeyword_1_1_0_or_HorasKeyword_1_1_1(semanticObject, getLastNavigableState(), syntaxNodes); else if(match_MedidaPesoKilo_KiloKeyword_1_0_or_KilosKeyword_1_1.equals(syntax)) emit_MedidaPesoKilo_KiloKeyword_1_0_or_KilosKeyword_1_1(semanticObject, getLastNavigableState(), syntaxNodes); else if(match_MontoDinero_PesitoKeyword_1_2_or_PesitosKeyword_1_3_or_PesoKeyword_1_0_or_PesosKeyword_1_1.equals(syntax)) emit_MontoDinero_PesitoKeyword_1_2_or_PesitosKeyword_1_3_or_PesoKeyword_1_0_or_PesosKeyword_1_1(semanticObject, getLastNavigableState(), syntaxNodes); else if(match_PesoMagnitudFija_UnKeyword_1_0_q.equals(syntax)) emit_PesoMagnitudFija_UnKeyword_1_0_q(semanticObject, getLastNavigableState(), syntaxNodes); else if(match_PesoMagnitudFija_UnKeyword_2_0_q.equals(syntax)) emit_PesoMagnitudFija_UnKeyword_2_0_q(semanticObject, getLastNavigableState(), syntaxNodes); else if(match_Producto_LasKeyword_0_1_or_LasKeyword_0_3_or_LosKeyword_0_0_or_LosKeyword_0_2.equals(syntax)) emit_Producto_LasKeyword_0_1_or_LasKeyword_0_3_or_LosKeyword_0_0_or_LosKeyword_0_2(semanticObject, getLastNavigableState(), syntaxNodes); else if(match_TareaRevisionProductos_LasKeyword_1_0_0_or_LosKeyword_1_0_1.equals(syntax)) emit_TareaRevisionProductos_LasKeyword_1_0_0_or_LosKeyword_1_0_1(semanticObject, getLastNavigableState(), syntaxNodes); else if(match_TareaRevisionProductos_LasKeyword_2_0_or_LosKeyword_2_1.equals(syntax)) emit_TareaRevisionProductos_LasKeyword_2_0_or_LosKeyword_2_1(semanticObject, getLastNavigableState(), syntaxNodes); else acceptNodes(getLastNavigableState(), syntaxNodes); } } /** * Ambiguous syntax: * 'hora' | 'horas' * * This ambiguous syntax occurs at: * horas=HorasLiteral (ambiguity) 'y' fraccionHora=FraccionHoraLiteral * horas=HorasLiteral (ambiguity) (rule end) */ protected void emit_LapsoTiempoLiteral_HoraKeyword_1_0_or_HorasKeyword_1_1(EObject semanticObject, ISynNavigable transition, List<INode> nodes) { acceptNodes(transition, nodes); } /** * Ambiguous syntax: * 'hora' | 'horas' * * This ambiguous syntax occurs at: * horas=INT (ambiguity) 'y' minutos=INT * horas=INT (ambiguity) (rule end) */ protected void emit_LapsoTiempoNumerico_HoraKeyword_1_1_0_or_HorasKeyword_1_1_1(EObject semanticObject, ISynNavigable transition, List<INode> nodes) { acceptNodes(transition, nodes); } /** * Ambiguous syntax: * 'kilo' | 'kilos' * * This ambiguous syntax occurs at: * (rule start) (ambiguity) (rule start) */ protected void emit_MedidaPesoKilo_KiloKeyword_1_0_or_KilosKeyword_1_1(EObject semanticObject, ISynNavigable transition, List<INode> nodes) { acceptNodes(transition, nodes); } /** * Ambiguous syntax: * 'peso' | 'pesos' | 'pesito' | 'pesitos' * * This ambiguous syntax occurs at: * precio=INT (ambiguity) (rule end) */ protected void emit_MontoDinero_PesitoKeyword_1_2_or_PesitosKeyword_1_3_or_PesoKeyword_1_0_or_PesosKeyword_1_1(EObject semanticObject, ISynNavigable transition, List<INode> nodes) { acceptNodes(transition, nodes); } /** * Ambiguous syntax: * 'un'? * * This ambiguous syntax occurs at: * (rule start) (ambiguity) pesaCuartoKilo?='cuarto kilo' */ protected void emit_PesoMagnitudFija_UnKeyword_1_0_q(EObject semanticObject, ISynNavigable transition, List<INode> nodes) { acceptNodes(transition, nodes); } /** * Ambiguous syntax: * 'un'? * * This ambiguous syntax occurs at: * (rule start) (ambiguity) pesaUnKilo?='kilo' */ protected void emit_PesoMagnitudFija_UnKeyword_2_0_q(EObject semanticObject, ISynNavigable transition, List<INode> nodes) { acceptNodes(transition, nodes); } /** * Ambiguous syntax: * 'Los' | 'Las' | 'los' | 'las' * * This ambiguous syntax occurs at: * (rule start) (ambiguity) name=ID */ protected void emit_Producto_LasKeyword_0_1_or_LasKeyword_0_3_or_LosKeyword_0_0_or_LosKeyword_0_2(EObject semanticObject, ISynNavigable transition, List<INode> nodes) { acceptNodes(transition, nodes); } /** * Ambiguous syntax: * 'las' | 'los' * * This ambiguous syntax occurs at: * (rule start) 'se revisaron' (ambiguity) productosRevisados+=[Producto|ID] * productosRevisados+=[Producto|ID] ',' (ambiguity) productosRevisados+=[Producto|ID] */ protected void emit_TareaRevisionProductos_LasKeyword_1_0_0_or_LosKeyword_1_0_1(EObject semanticObject, ISynNavigable transition, List<INode> nodes) { acceptNodes(transition, nodes); } /** * Ambiguous syntax: * 'las' | 'los' * * This ambiguous syntax occurs at: * (rule start) 'se revisaron' (ambiguity) productosRevisados+=[Producto|ID] * productosRevisados+=[Producto|ID] ',' (ambiguity) productosRevisados+=[Producto|ID] */ protected void emit_TareaRevisionProductos_LasKeyword_2_0_or_LosKeyword_2_1(EObject semanticObject, ISynNavigable transition, List<INode> nodes) { acceptNodes(transition, nodes); } } <file_sep>/** */ package org.xtext.example.mydsl.textualVerduler.impl; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; import org.xtext.example.mydsl.textualVerduler.ProductoConPrecio; import org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Producto Con Precio</b></em>'. * <!-- end-user-doc --> * * @generated */ public class ProductoConPrecioImpl extends MinimalEObjectImpl.Container implements ProductoConPrecio { /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ProductoConPrecioImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return TextualVerdulerPackage.Literals.PRODUCTO_CON_PRECIO; } } //ProductoConPrecioImpl <file_sep>/** */ package org.xtext.example.mydsl.textualVerduler.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.xtext.example.mydsl.textualVerduler.ClienteConDeuda; import org.xtext.example.mydsl.textualVerduler.MontoDinero; import org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Cliente Con Deuda</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.ClienteConDeudaImpl#getMontoDeuda <em>Monto Deuda</em>}</li> * </ul> * * @generated */ public class ClienteConDeudaImpl extends ClienteImpl implements ClienteConDeuda { /** * The cached value of the '{@link #getMontoDeuda() <em>Monto Deuda</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMontoDeuda() * @generated * @ordered */ protected MontoDinero montoDeuda; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ClienteConDeudaImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return TextualVerdulerPackage.Literals.CLIENTE_CON_DEUDA; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MontoDinero getMontoDeuda() { return montoDeuda; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetMontoDeuda(MontoDinero newMontoDeuda, NotificationChain msgs) { MontoDinero oldMontoDeuda = montoDeuda; montoDeuda = newMontoDeuda; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.CLIENTE_CON_DEUDA__MONTO_DEUDA, oldMontoDeuda, newMontoDeuda); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setMontoDeuda(MontoDinero newMontoDeuda) { if (newMontoDeuda != montoDeuda) { NotificationChain msgs = null; if (montoDeuda != null) msgs = ((InternalEObject)montoDeuda).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.CLIENTE_CON_DEUDA__MONTO_DEUDA, null, msgs); if (newMontoDeuda != null) msgs = ((InternalEObject)newMontoDeuda).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.CLIENTE_CON_DEUDA__MONTO_DEUDA, null, msgs); msgs = basicSetMontoDeuda(newMontoDeuda, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.CLIENTE_CON_DEUDA__MONTO_DEUDA, newMontoDeuda, newMontoDeuda)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case TextualVerdulerPackage.CLIENTE_CON_DEUDA__MONTO_DEUDA: return basicSetMontoDeuda(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case TextualVerdulerPackage.CLIENTE_CON_DEUDA__MONTO_DEUDA: return getMontoDeuda(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case TextualVerdulerPackage.CLIENTE_CON_DEUDA__MONTO_DEUDA: setMontoDeuda((MontoDinero)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case TextualVerdulerPackage.CLIENTE_CON_DEUDA__MONTO_DEUDA: setMontoDeuda((MontoDinero)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case TextualVerdulerPackage.CLIENTE_CON_DEUDA__MONTO_DEUDA: return montoDeuda != null; } return super.eIsSet(featureID); } } //ClienteConDeudaImpl <file_sep>/** */ package org.xtext.example.mydsl.textualVerduler.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.xtext.example.mydsl.textualVerduler.PesoMagnitudFija; import org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Peso Magnitud Fija</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.PesoMagnitudFijaImpl#isPesaMedioKilo <em>Pesa Medio Kilo</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.PesoMagnitudFijaImpl#isPesaCuartoKilo <em>Pesa Cuarto Kilo</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.PesoMagnitudFijaImpl#isPesaUnKilo <em>Pesa Un Kilo</em>}</li> * </ul> * * @generated */ public class PesoMagnitudFijaImpl extends PesoImpl implements PesoMagnitudFija { /** * The default value of the '{@link #isPesaMedioKilo() <em>Pesa Medio Kilo</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isPesaMedioKilo() * @generated * @ordered */ protected static final boolean PESA_MEDIO_KILO_EDEFAULT = false; /** * The cached value of the '{@link #isPesaMedioKilo() <em>Pesa Medio Kilo</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isPesaMedioKilo() * @generated * @ordered */ protected boolean pesaMedioKilo = PESA_MEDIO_KILO_EDEFAULT; /** * The default value of the '{@link #isPesaCuartoKilo() <em>Pesa Cuarto Kilo</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isPesaCuartoKilo() * @generated * @ordered */ protected static final boolean PESA_CUARTO_KILO_EDEFAULT = false; /** * The cached value of the '{@link #isPesaCuartoKilo() <em>Pesa Cuarto Kilo</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isPesaCuartoKilo() * @generated * @ordered */ protected boolean pesaCuartoKilo = PESA_CUARTO_KILO_EDEFAULT; /** * The default value of the '{@link #isPesaUnKilo() <em>Pesa Un Kilo</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isPesaUnKilo() * @generated * @ordered */ protected static final boolean PESA_UN_KILO_EDEFAULT = false; /** * The cached value of the '{@link #isPesaUnKilo() <em>Pesa Un Kilo</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isPesaUnKilo() * @generated * @ordered */ protected boolean pesaUnKilo = PESA_UN_KILO_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected PesoMagnitudFijaImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return TextualVerdulerPackage.Literals.PESO_MAGNITUD_FIJA; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public boolean isPesaMedioKilo() { return pesaMedioKilo; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setPesaMedioKilo(boolean newPesaMedioKilo) { boolean oldPesaMedioKilo = pesaMedioKilo; pesaMedioKilo = newPesaMedioKilo; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.PESO_MAGNITUD_FIJA__PESA_MEDIO_KILO, oldPesaMedioKilo, pesaMedioKilo)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public boolean isPesaCuartoKilo() { return pesaCuartoKilo; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setPesaCuartoKilo(boolean newPesaCuartoKilo) { boolean oldPesaCuartoKilo = pesaCuartoKilo; pesaCuartoKilo = newPesaCuartoKilo; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.PESO_MAGNITUD_FIJA__PESA_CUARTO_KILO, oldPesaCuartoKilo, pesaCuartoKilo)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public boolean isPesaUnKilo() { return pesaUnKilo; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setPesaUnKilo(boolean newPesaUnKilo) { boolean oldPesaUnKilo = pesaUnKilo; pesaUnKilo = newPesaUnKilo; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.PESO_MAGNITUD_FIJA__PESA_UN_KILO, oldPesaUnKilo, pesaUnKilo)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case TextualVerdulerPackage.PESO_MAGNITUD_FIJA__PESA_MEDIO_KILO: return isPesaMedioKilo(); case TextualVerdulerPackage.PESO_MAGNITUD_FIJA__PESA_CUARTO_KILO: return isPesaCuartoKilo(); case TextualVerdulerPackage.PESO_MAGNITUD_FIJA__PESA_UN_KILO: return isPesaUnKilo(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case TextualVerdulerPackage.PESO_MAGNITUD_FIJA__PESA_MEDIO_KILO: setPesaMedioKilo((Boolean)newValue); return; case TextualVerdulerPackage.PESO_MAGNITUD_FIJA__PESA_CUARTO_KILO: setPesaCuartoKilo((Boolean)newValue); return; case TextualVerdulerPackage.PESO_MAGNITUD_FIJA__PESA_UN_KILO: setPesaUnKilo((Boolean)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case TextualVerdulerPackage.PESO_MAGNITUD_FIJA__PESA_MEDIO_KILO: setPesaMedioKilo(PESA_MEDIO_KILO_EDEFAULT); return; case TextualVerdulerPackage.PESO_MAGNITUD_FIJA__PESA_CUARTO_KILO: setPesaCuartoKilo(PESA_CUARTO_KILO_EDEFAULT); return; case TextualVerdulerPackage.PESO_MAGNITUD_FIJA__PESA_UN_KILO: setPesaUnKilo(PESA_UN_KILO_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case TextualVerdulerPackage.PESO_MAGNITUD_FIJA__PESA_MEDIO_KILO: return pesaMedioKilo != PESA_MEDIO_KILO_EDEFAULT; case TextualVerdulerPackage.PESO_MAGNITUD_FIJA__PESA_CUARTO_KILO: return pesaCuartoKilo != PESA_CUARTO_KILO_EDEFAULT; case TextualVerdulerPackage.PESO_MAGNITUD_FIJA__PESA_UN_KILO: return pesaUnKilo != PESA_UN_KILO_EDEFAULT; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (pesaMedioKilo: "); result.append(pesaMedioKilo); result.append(", pesaCuartoKilo: "); result.append(pesaCuartoKilo); result.append(", pesaUnKilo: "); result.append(pesaUnKilo); result.append(')'); return result.toString(); } } //PesoMagnitudFijaImpl <file_sep>/** */ package org.xtext.example.mydsl.textualVerduler.impl; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; import org.xtext.example.mydsl.textualVerduler.Horario; import org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Horario</b></em>'. * <!-- end-user-doc --> * * @generated */ public class HorarioImpl extends MinimalEObjectImpl.Container implements Horario { /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected HorarioImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return TextualVerdulerPackage.Literals.HORARIO; } } //HorarioImpl <file_sep>/** */ package org.xtext.example.mydsl.textualVerduler; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EObject; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>Venta</b></em>'. * <!-- end-user-doc --> * * <p> * The following features are supported: * </p> * <ul> * <li>{@link org.xtext.example.mydsl.textualVerduler.Venta#getComprador <em>Comprador</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.Venta#getItemsVendidos <em>Items Vendidos</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.Venta#getTotalRedondeado <em>Total Redondeado</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.Venta#getTotalPagado <em>Total Pagado</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.Venta#getTotalDebiendo <em>Total Debiendo</em>}</li> * </ul> * * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getVenta() * @model * @generated */ public interface Venta extends EObject { /** * Returns the value of the '<em><b>Comprador</b></em>' reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Comprador</em>' reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Comprador</em>' reference. * @see #setComprador(Cliente) * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getVenta_Comprador() * @model * @generated */ Cliente getComprador(); /** * Sets the value of the '{@link org.xtext.example.mydsl.textualVerduler.Venta#getComprador <em>Comprador</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Comprador</em>' reference. * @see #getComprador() * @generated */ void setComprador(Cliente value); /** * Returns the value of the '<em><b>Items Vendidos</b></em>' containment reference list. * The list contents are of type {@link org.xtext.example.mydsl.textualVerduler.ItemVenta}. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Items Vendidos</em>' containment reference list isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Items Vendidos</em>' containment reference list. * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getVenta_ItemsVendidos() * @model containment="true" * @generated */ EList<ItemVenta> getItemsVendidos(); /** * Returns the value of the '<em><b>Total Redondeado</b></em>' containment reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Total Redondeado</em>' containment reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Total Redondeado</em>' containment reference. * @see #setTotalRedondeado(MontoDinero) * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getVenta_TotalRedondeado() * @model containment="true" * @generated */ MontoDinero getTotalRedondeado(); /** * Sets the value of the '{@link org.xtext.example.mydsl.textualVerduler.Venta#getTotalRedondeado <em>Total Redondeado</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Total Redondeado</em>' containment reference. * @see #getTotalRedondeado() * @generated */ void setTotalRedondeado(MontoDinero value); /** * Returns the value of the '<em><b>Total Pagado</b></em>' containment reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Total Pagado</em>' containment reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Total Pagado</em>' containment reference. * @see #setTotalPagado(MontoDinero) * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getVenta_TotalPagado() * @model containment="true" * @generated */ MontoDinero getTotalPagado(); /** * Sets the value of the '{@link org.xtext.example.mydsl.textualVerduler.Venta#getTotalPagado <em>Total Pagado</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Total Pagado</em>' containment reference. * @see #getTotalPagado() * @generated */ void setTotalPagado(MontoDinero value); /** * Returns the value of the '<em><b>Total Debiendo</b></em>' containment reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Total Debiendo</em>' containment reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Total Debiendo</em>' containment reference. * @see #setTotalDebiendo(MontoDinero) * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getVenta_TotalDebiendo() * @model containment="true" * @generated */ MontoDinero getTotalDebiendo(); /** * Sets the value of the '{@link org.xtext.example.mydsl.textualVerduler.Venta#getTotalDebiendo <em>Total Debiendo</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Total Debiendo</em>' containment reference. * @see #getTotalDebiendo() * @generated */ void setTotalDebiendo(MontoDinero value); } // Venta <file_sep>/** */ package org.xtext.example.mydsl.textualVerduler.impl; import java.util.Collection; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; import org.eclipse.emf.ecore.util.EObjectContainmentEList; import org.eclipse.emf.ecore.util.InternalEList; import org.xtext.example.mydsl.textualVerduler.Cliente; import org.xtext.example.mydsl.textualVerduler.ItemVenta; import org.xtext.example.mydsl.textualVerduler.MontoDinero; import org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage; import org.xtext.example.mydsl.textualVerduler.Venta; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Venta</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.VentaImpl#getComprador <em>Comprador</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.VentaImpl#getItemsVendidos <em>Items Vendidos</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.VentaImpl#getTotalRedondeado <em>Total Redondeado</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.VentaImpl#getTotalPagado <em>Total Pagado</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.VentaImpl#getTotalDebiendo <em>Total Debiendo</em>}</li> * </ul> * * @generated */ public class VentaImpl extends MinimalEObjectImpl.Container implements Venta { /** * The cached value of the '{@link #getComprador() <em>Comprador</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getComprador() * @generated * @ordered */ protected Cliente comprador; /** * The cached value of the '{@link #getItemsVendidos() <em>Items Vendidos</em>}' containment reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getItemsVendidos() * @generated * @ordered */ protected EList<ItemVenta> itemsVendidos; /** * The cached value of the '{@link #getTotalRedondeado() <em>Total Redondeado</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTotalRedondeado() * @generated * @ordered */ protected MontoDinero totalRedondeado; /** * The cached value of the '{@link #getTotalPagado() <em>Total Pagado</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTotalPagado() * @generated * @ordered */ protected MontoDinero totalPagado; /** * The cached value of the '{@link #getTotalDebiendo() <em>Total Debiendo</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTotalDebiendo() * @generated * @ordered */ protected MontoDinero totalDebiendo; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected VentaImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return TextualVerdulerPackage.Literals.VENTA; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Cliente getComprador() { if (comprador != null && comprador.eIsProxy()) { InternalEObject oldComprador = (InternalEObject)comprador; comprador = (Cliente)eResolveProxy(oldComprador); if (comprador != oldComprador) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, TextualVerdulerPackage.VENTA__COMPRADOR, oldComprador, comprador)); } } return comprador; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Cliente basicGetComprador() { return comprador; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setComprador(Cliente newComprador) { Cliente oldComprador = comprador; comprador = newComprador; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.VENTA__COMPRADOR, oldComprador, comprador)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<ItemVenta> getItemsVendidos() { if (itemsVendidos == null) { itemsVendidos = new EObjectContainmentEList<ItemVenta>(ItemVenta.class, this, TextualVerdulerPackage.VENTA__ITEMS_VENDIDOS); } return itemsVendidos; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MontoDinero getTotalRedondeado() { return totalRedondeado; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetTotalRedondeado(MontoDinero newTotalRedondeado, NotificationChain msgs) { MontoDinero oldTotalRedondeado = totalRedondeado; totalRedondeado = newTotalRedondeado; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.VENTA__TOTAL_REDONDEADO, oldTotalRedondeado, newTotalRedondeado); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setTotalRedondeado(MontoDinero newTotalRedondeado) { if (newTotalRedondeado != totalRedondeado) { NotificationChain msgs = null; if (totalRedondeado != null) msgs = ((InternalEObject)totalRedondeado).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.VENTA__TOTAL_REDONDEADO, null, msgs); if (newTotalRedondeado != null) msgs = ((InternalEObject)newTotalRedondeado).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.VENTA__TOTAL_REDONDEADO, null, msgs); msgs = basicSetTotalRedondeado(newTotalRedondeado, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.VENTA__TOTAL_REDONDEADO, newTotalRedondeado, newTotalRedondeado)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MontoDinero getTotalPagado() { return totalPagado; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetTotalPagado(MontoDinero newTotalPagado, NotificationChain msgs) { MontoDinero oldTotalPagado = totalPagado; totalPagado = newTotalPagado; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.VENTA__TOTAL_PAGADO, oldTotalPagado, newTotalPagado); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setTotalPagado(MontoDinero newTotalPagado) { if (newTotalPagado != totalPagado) { NotificationChain msgs = null; if (totalPagado != null) msgs = ((InternalEObject)totalPagado).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.VENTA__TOTAL_PAGADO, null, msgs); if (newTotalPagado != null) msgs = ((InternalEObject)newTotalPagado).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.VENTA__TOTAL_PAGADO, null, msgs); msgs = basicSetTotalPagado(newTotalPagado, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.VENTA__TOTAL_PAGADO, newTotalPagado, newTotalPagado)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MontoDinero getTotalDebiendo() { return totalDebiendo; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetTotalDebiendo(MontoDinero newTotalDebiendo, NotificationChain msgs) { MontoDinero oldTotalDebiendo = totalDebiendo; totalDebiendo = newTotalDebiendo; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.VENTA__TOTAL_DEBIENDO, oldTotalDebiendo, newTotalDebiendo); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setTotalDebiendo(MontoDinero newTotalDebiendo) { if (newTotalDebiendo != totalDebiendo) { NotificationChain msgs = null; if (totalDebiendo != null) msgs = ((InternalEObject)totalDebiendo).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.VENTA__TOTAL_DEBIENDO, null, msgs); if (newTotalDebiendo != null) msgs = ((InternalEObject)newTotalDebiendo).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.VENTA__TOTAL_DEBIENDO, null, msgs); msgs = basicSetTotalDebiendo(newTotalDebiendo, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.VENTA__TOTAL_DEBIENDO, newTotalDebiendo, newTotalDebiendo)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case TextualVerdulerPackage.VENTA__ITEMS_VENDIDOS: return ((InternalEList<?>)getItemsVendidos()).basicRemove(otherEnd, msgs); case TextualVerdulerPackage.VENTA__TOTAL_REDONDEADO: return basicSetTotalRedondeado(null, msgs); case TextualVerdulerPackage.VENTA__TOTAL_PAGADO: return basicSetTotalPagado(null, msgs); case TextualVerdulerPackage.VENTA__TOTAL_DEBIENDO: return basicSetTotalDebiendo(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case TextualVerdulerPackage.VENTA__COMPRADOR: if (resolve) return getComprador(); return basicGetComprador(); case TextualVerdulerPackage.VENTA__ITEMS_VENDIDOS: return getItemsVendidos(); case TextualVerdulerPackage.VENTA__TOTAL_REDONDEADO: return getTotalRedondeado(); case TextualVerdulerPackage.VENTA__TOTAL_PAGADO: return getTotalPagado(); case TextualVerdulerPackage.VENTA__TOTAL_DEBIENDO: return getTotalDebiendo(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case TextualVerdulerPackage.VENTA__COMPRADOR: setComprador((Cliente)newValue); return; case TextualVerdulerPackage.VENTA__ITEMS_VENDIDOS: getItemsVendidos().clear(); getItemsVendidos().addAll((Collection<? extends ItemVenta>)newValue); return; case TextualVerdulerPackage.VENTA__TOTAL_REDONDEADO: setTotalRedondeado((MontoDinero)newValue); return; case TextualVerdulerPackage.VENTA__TOTAL_PAGADO: setTotalPagado((MontoDinero)newValue); return; case TextualVerdulerPackage.VENTA__TOTAL_DEBIENDO: setTotalDebiendo((MontoDinero)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case TextualVerdulerPackage.VENTA__COMPRADOR: setComprador((Cliente)null); return; case TextualVerdulerPackage.VENTA__ITEMS_VENDIDOS: getItemsVendidos().clear(); return; case TextualVerdulerPackage.VENTA__TOTAL_REDONDEADO: setTotalRedondeado((MontoDinero)null); return; case TextualVerdulerPackage.VENTA__TOTAL_PAGADO: setTotalPagado((MontoDinero)null); return; case TextualVerdulerPackage.VENTA__TOTAL_DEBIENDO: setTotalDebiendo((MontoDinero)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case TextualVerdulerPackage.VENTA__COMPRADOR: return comprador != null; case TextualVerdulerPackage.VENTA__ITEMS_VENDIDOS: return itemsVendidos != null && !itemsVendidos.isEmpty(); case TextualVerdulerPackage.VENTA__TOTAL_REDONDEADO: return totalRedondeado != null; case TextualVerdulerPackage.VENTA__TOTAL_PAGADO: return totalPagado != null; case TextualVerdulerPackage.VENTA__TOTAL_DEBIENDO: return totalDebiendo != null; } return super.eIsSet(featureID); } } //VentaImpl <file_sep>/* * generated by Xtext */ package org.xtext.example.mydsl.services; import com.google.inject.Singleton; import com.google.inject.Inject; import java.util.List; import org.eclipse.xtext.*; import org.eclipse.xtext.service.GrammarProvider; import org.eclipse.xtext.service.AbstractElementFinder.*; import org.eclipse.xtext.common.services.TerminalsGrammarAccess; @Singleton public class TextualVerdulerGrammarAccess extends AbstractGrammarElementFinder { public class VerduleriaElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "Verduleria"); private final Alternatives cAlternatives = (Alternatives)rule.eContents().get(1); private final Assignment cClientesAssignment_0 = (Assignment)cAlternatives.eContents().get(0); private final RuleCall cClientesClienteParserRuleCall_0_0 = (RuleCall)cClientesAssignment_0.eContents().get(0); private final Assignment cProductosAssignment_1 = (Assignment)cAlternatives.eContents().get(1); private final RuleCall cProductosProductoConPrecioParserRuleCall_1_0 = (RuleCall)cProductosAssignment_1.eContents().get(0); private final Assignment cVentasAssignment_2 = (Assignment)cAlternatives.eContents().get(2); private final RuleCall cVentasVentaParserRuleCall_2_0 = (RuleCall)cVentasAssignment_2.eContents().get(0); private final Assignment cTareasAssignment_3 = (Assignment)cAlternatives.eContents().get(3); private final RuleCall cTareasTareaParserRuleCall_3_0 = (RuleCall)cTareasAssignment_3.eContents().get(0); //Verduleria: // (clientes+=Cliente | productos+=ProductoConPrecio | ventas+=Venta | tareas+=Tarea)*; @Override public ParserRule getRule() { return rule; } //(clientes+=Cliente | productos+=ProductoConPrecio | ventas+=Venta | tareas+=Tarea)* public Alternatives getAlternatives() { return cAlternatives; } //clientes+=Cliente public Assignment getClientesAssignment_0() { return cClientesAssignment_0; } //Cliente public RuleCall getClientesClienteParserRuleCall_0_0() { return cClientesClienteParserRuleCall_0_0; } //productos+=ProductoConPrecio public Assignment getProductosAssignment_1() { return cProductosAssignment_1; } //ProductoConPrecio public RuleCall getProductosProductoConPrecioParserRuleCall_1_0() { return cProductosProductoConPrecioParserRuleCall_1_0; } //ventas+=Venta public Assignment getVentasAssignment_2() { return cVentasAssignment_2; } //Venta public RuleCall getVentasVentaParserRuleCall_2_0() { return cVentasVentaParserRuleCall_2_0; } //tareas+=Tarea public Assignment getTareasAssignment_3() { return cTareasAssignment_3; } //Tarea public RuleCall getTareasTareaParserRuleCall_3_0() { return cTareasTareaParserRuleCall_3_0; } } public class ClienteElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "Cliente"); private final Alternatives cAlternatives = (Alternatives)rule.eContents().get(1); private final RuleCall cClienteConDeudaParserRuleCall_0 = (RuleCall)cAlternatives.eContents().get(0); private final RuleCall cClienteAlDiaParserRuleCall_1 = (RuleCall)cAlternatives.eContents().get(1); private final RuleCall cClienteConCreditoParserRuleCall_2 = (RuleCall)cAlternatives.eContents().get(2); //Cliente: // ClienteConDeuda | ClienteAlDia | ClienteConCredito; @Override public ParserRule getRule() { return rule; } //ClienteConDeuda | ClienteAlDia | ClienteConCredito public Alternatives getAlternatives() { return cAlternatives; } //ClienteConDeuda public RuleCall getClienteConDeudaParserRuleCall_0() { return cClienteConDeudaParserRuleCall_0; } //ClienteAlDia public RuleCall getClienteAlDiaParserRuleCall_1() { return cClienteAlDiaParserRuleCall_1; } //ClienteConCredito public RuleCall getClienteConCreditoParserRuleCall_2() { return cClienteConCreditoParserRuleCall_2; } } public class ClienteConDeudaElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "ClienteConDeuda"); private final Group cGroup = (Group)rule.eContents().get(1); private final Keyword cClienteKeyword_0 = (Keyword)cGroup.eContents().get(0); private final Assignment cNameAssignment_1 = (Assignment)cGroup.eContents().get(1); private final RuleCall cNameIDTerminalRuleCall_1_0 = (RuleCall)cNameAssignment_1.eContents().get(0); private final Keyword cDebeKeyword_2 = (Keyword)cGroup.eContents().get(2); private final Assignment cMontoDeudaAssignment_3 = (Assignment)cGroup.eContents().get(3); private final RuleCall cMontoDeudaMontoDineroParserRuleCall_3_0 = (RuleCall)cMontoDeudaAssignment_3.eContents().get(0); //ClienteConDeuda: // "Cliente" name=ID ", debe" montoDeuda=MontoDinero; @Override public ParserRule getRule() { return rule; } //"Cliente" name=ID ", debe" montoDeuda=MontoDinero public Group getGroup() { return cGroup; } //"Cliente" public Keyword getClienteKeyword_0() { return cClienteKeyword_0; } //name=ID public Assignment getNameAssignment_1() { return cNameAssignment_1; } //ID public RuleCall getNameIDTerminalRuleCall_1_0() { return cNameIDTerminalRuleCall_1_0; } //", debe" public Keyword getDebeKeyword_2() { return cDebeKeyword_2; } //montoDeuda=MontoDinero public Assignment getMontoDeudaAssignment_3() { return cMontoDeudaAssignment_3; } //MontoDinero public RuleCall getMontoDeudaMontoDineroParserRuleCall_3_0() { return cMontoDeudaMontoDineroParserRuleCall_3_0; } } public class ClienteAlDiaElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "ClienteAlDia"); private final Group cGroup = (Group)rule.eContents().get(1); private final Keyword cClienteKeyword_0 = (Keyword)cGroup.eContents().get(0); private final Assignment cNameAssignment_1 = (Assignment)cGroup.eContents().get(1); private final RuleCall cNameIDTerminalRuleCall_1_0 = (RuleCall)cNameAssignment_1.eContents().get(0); private final Keyword cAlDiaKeyword_2 = (Keyword)cGroup.eContents().get(2); //ClienteAlDia: // "Cliente" name=ID "al dia."; @Override public ParserRule getRule() { return rule; } //"Cliente" name=ID "al dia." public Group getGroup() { return cGroup; } //"Cliente" public Keyword getClienteKeyword_0() { return cClienteKeyword_0; } //name=ID public Assignment getNameAssignment_1() { return cNameAssignment_1; } //ID public RuleCall getNameIDTerminalRuleCall_1_0() { return cNameIDTerminalRuleCall_1_0; } //"al dia." public Keyword getAlDiaKeyword_2() { return cAlDiaKeyword_2; } } public class ClienteConCreditoElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "ClienteConCredito"); private final Group cGroup = (Group)rule.eContents().get(1); private final Keyword cClienteKeyword_0 = (Keyword)cGroup.eContents().get(0); private final Assignment cNameAssignment_1 = (Assignment)cGroup.eContents().get(1); private final RuleCall cNameIDTerminalRuleCall_1_0 = (RuleCall)cNameAssignment_1.eContents().get(0); private final Keyword cTieneCreditoKeyword_2 = (Keyword)cGroup.eContents().get(2); private final Assignment cMontoCreditoAssignment_3 = (Assignment)cGroup.eContents().get(3); private final RuleCall cMontoCreditoMontoDineroParserRuleCall_3_0 = (RuleCall)cMontoCreditoAssignment_3.eContents().get(0); //ClienteConCredito: // "Cliente" name=ID ", tiene credito" montoCredito=MontoDinero; @Override public ParserRule getRule() { return rule; } //"Cliente" name=ID ", tiene credito" montoCredito=MontoDinero public Group getGroup() { return cGroup; } //"Cliente" public Keyword getClienteKeyword_0() { return cClienteKeyword_0; } //name=ID public Assignment getNameAssignment_1() { return cNameAssignment_1; } //ID public RuleCall getNameIDTerminalRuleCall_1_0() { return cNameIDTerminalRuleCall_1_0; } //", tiene credito" public Keyword getTieneCreditoKeyword_2() { return cTieneCreditoKeyword_2; } //montoCredito=MontoDinero public Assignment getMontoCreditoAssignment_3() { return cMontoCreditoAssignment_3; } //MontoDinero public RuleCall getMontoCreditoMontoDineroParserRuleCall_3_0() { return cMontoCreditoMontoDineroParserRuleCall_3_0; } } public class ProductoConPrecioElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "ProductoConPrecio"); private final Group cGroup = (Group)rule.eContents().get(1); private final RuleCall cProductoParserRuleCall_0 = (RuleCall)cGroup.eContents().get(0); private final Keyword cSalenKeyword_1 = (Keyword)cGroup.eContents().get(1); private final Assignment cPrecioAssignment_2 = (Assignment)cGroup.eContents().get(2); private final RuleCall cPrecioPrecioPorPesoParserRuleCall_2_0 = (RuleCall)cPrecioAssignment_2.eContents().get(0); private final Keyword cFullStopKeyword_3 = (Keyword)cGroup.eContents().get(3); //ProductoConPrecio: // Producto "salen" precio=PrecioPorPeso "."; @Override public ParserRule getRule() { return rule; } //Producto "salen" precio=PrecioPorPeso "." public Group getGroup() { return cGroup; } //Producto public RuleCall getProductoParserRuleCall_0() { return cProductoParserRuleCall_0; } //"salen" public Keyword getSalenKeyword_1() { return cSalenKeyword_1; } //precio=PrecioPorPeso public Assignment getPrecioAssignment_2() { return cPrecioAssignment_2; } //PrecioPorPeso public RuleCall getPrecioPrecioPorPesoParserRuleCall_2_0() { return cPrecioPrecioPorPesoParserRuleCall_2_0; } //"." public Keyword getFullStopKeyword_3() { return cFullStopKeyword_3; } } public class ProductoElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "Producto"); private final Group cGroup = (Group)rule.eContents().get(1); private final Alternatives cAlternatives_0 = (Alternatives)cGroup.eContents().get(0); private final Keyword cLosKeyword_0_0 = (Keyword)cAlternatives_0.eContents().get(0); private final Keyword cLasKeyword_0_1 = (Keyword)cAlternatives_0.eContents().get(1); private final Keyword cLosKeyword_0_2 = (Keyword)cAlternatives_0.eContents().get(2); private final Keyword cLasKeyword_0_3 = (Keyword)cAlternatives_0.eContents().get(3); private final Assignment cNameAssignment_1 = (Assignment)cGroup.eContents().get(1); private final RuleCall cNameIDTerminalRuleCall_1_0 = (RuleCall)cNameAssignment_1.eContents().get(0); //Producto: // ("Los" | "Las" | "los" | "las") name=ID; @Override public ParserRule getRule() { return rule; } //("Los" | "Las" | "los" | "las") name=ID public Group getGroup() { return cGroup; } //"Los" | "Las" | "los" | "las" public Alternatives getAlternatives_0() { return cAlternatives_0; } //"Los" public Keyword getLosKeyword_0_0() { return cLosKeyword_0_0; } //"Las" public Keyword getLasKeyword_0_1() { return cLasKeyword_0_1; } //"los" public Keyword getLosKeyword_0_2() { return cLosKeyword_0_2; } //"las" public Keyword getLasKeyword_0_3() { return cLasKeyword_0_3; } //name=ID public Assignment getNameAssignment_1() { return cNameAssignment_1; } //ID public RuleCall getNameIDTerminalRuleCall_1_0() { return cNameIDTerminalRuleCall_1_0; } } public class PrecioPorPesoElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "PrecioPorPeso"); private final Group cGroup = (Group)rule.eContents().get(1); private final Assignment cPrecioAssignment_0 = (Assignment)cGroup.eContents().get(0); private final RuleCall cPrecioMontoDineroParserRuleCall_0_0 = (RuleCall)cPrecioAssignment_0.eContents().get(0); private final Alternatives cAlternatives_1 = (Alternatives)cGroup.eContents().get(1); private final Group cGroup_1_0 = (Group)cAlternatives_1.eContents().get(0); private final Keyword cLosKeyword_1_0_0 = (Keyword)cGroup_1_0.eContents().get(0); private final Assignment cPesajeAssignment_1_0_1 = (Assignment)cGroup_1_0.eContents().get(1); private final RuleCall cPesajePesoMagnitudVariableParserRuleCall_1_0_1_0 = (RuleCall)cPesajeAssignment_1_0_1.eContents().get(0); private final Group cGroup_1_1 = (Group)cAlternatives_1.eContents().get(1); private final Keyword cElKeyword_1_1_0 = (Keyword)cGroup_1_1.eContents().get(0); private final Assignment cPesajeAssignment_1_1_1 = (Assignment)cGroup_1_1.eContents().get(1); private final RuleCall cPesajePesoMagnitudFijaParserRuleCall_1_1_1_0 = (RuleCall)cPesajeAssignment_1_1_1.eContents().get(0); //PrecioPorPeso: // precio=MontoDinero ("los" pesaje=PesoMagnitudVariable | "el" pesaje=PesoMagnitudFija); @Override public ParserRule getRule() { return rule; } //precio=MontoDinero ("los" pesaje=PesoMagnitudVariable | "el" pesaje=PesoMagnitudFija) public Group getGroup() { return cGroup; } //precio=MontoDinero public Assignment getPrecioAssignment_0() { return cPrecioAssignment_0; } //MontoDinero public RuleCall getPrecioMontoDineroParserRuleCall_0_0() { return cPrecioMontoDineroParserRuleCall_0_0; } //"los" pesaje=PesoMagnitudVariable | "el" pesaje=PesoMagnitudFija public Alternatives getAlternatives_1() { return cAlternatives_1; } //"los" pesaje=PesoMagnitudVariable public Group getGroup_1_0() { return cGroup_1_0; } //"los" public Keyword getLosKeyword_1_0_0() { return cLosKeyword_1_0_0; } //pesaje=PesoMagnitudVariable public Assignment getPesajeAssignment_1_0_1() { return cPesajeAssignment_1_0_1; } //PesoMagnitudVariable public RuleCall getPesajePesoMagnitudVariableParserRuleCall_1_0_1_0() { return cPesajePesoMagnitudVariableParserRuleCall_1_0_1_0; } //"el" pesaje=PesoMagnitudFija public Group getGroup_1_1() { return cGroup_1_1; } //"el" public Keyword getElKeyword_1_1_0() { return cElKeyword_1_1_0; } //pesaje=PesoMagnitudFija public Assignment getPesajeAssignment_1_1_1() { return cPesajeAssignment_1_1_1; } //PesoMagnitudFija public RuleCall getPesajePesoMagnitudFijaParserRuleCall_1_1_1_0() { return cPesajePesoMagnitudFijaParserRuleCall_1_1_1_0; } } public class MontoDineroElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "MontoDinero"); private final Group cGroup = (Group)rule.eContents().get(1); private final Assignment cPrecioAssignment_0 = (Assignment)cGroup.eContents().get(0); private final RuleCall cPrecioINTTerminalRuleCall_0_0 = (RuleCall)cPrecioAssignment_0.eContents().get(0); private final Alternatives cAlternatives_1 = (Alternatives)cGroup.eContents().get(1); private final Keyword cPesoKeyword_1_0 = (Keyword)cAlternatives_1.eContents().get(0); private final Keyword cPesosKeyword_1_1 = (Keyword)cAlternatives_1.eContents().get(1); private final Keyword cPesitoKeyword_1_2 = (Keyword)cAlternatives_1.eContents().get(2); private final Keyword cPesitosKeyword_1_3 = (Keyword)cAlternatives_1.eContents().get(3); //MontoDinero: // se aceptan las diferentes alternativas para nombrar a la moneda tanto para contemplar la posibilidad del "un peso" //// como para adaptarse a la jerga verdulera y así abarcar un mayor número de potenciales clientes. // precio=INT ("peso" | "pesos" | "pesito" | "pesitos"); @Override public ParserRule getRule() { return rule; } //// se aceptan las diferentes alternativas para nombrar a la moneda tanto para contemplar la posibilidad del "un peso" //// como para adaptarse a la jerga verdulera y así abarcar un mayor número de potenciales clientes. //precio=INT ("peso" | "pesos" | "pesito" | "pesitos") public Group getGroup() { return cGroup; } //// se aceptan las diferentes alternativas para nombrar a la moneda tanto para contemplar la posibilidad del "un peso" //// como para adaptarse a la jerga verdulera y así abarcar un mayor número de potenciales clientes. //precio=INT public Assignment getPrecioAssignment_0() { return cPrecioAssignment_0; } //INT public RuleCall getPrecioINTTerminalRuleCall_0_0() { return cPrecioINTTerminalRuleCall_0_0; } //"peso" | "pesos" | "pesito" | "pesitos" public Alternatives getAlternatives_1() { return cAlternatives_1; } //"peso" public Keyword getPesoKeyword_1_0() { return cPesoKeyword_1_0; } //"pesos" public Keyword getPesosKeyword_1_1() { return cPesosKeyword_1_1; } //"pesito" public Keyword getPesitoKeyword_1_2() { return cPesitoKeyword_1_2; } //"pesitos" public Keyword getPesitosKeyword_1_3() { return cPesitosKeyword_1_3; } } public class PesoElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "Peso"); private final Alternatives cAlternatives = (Alternatives)rule.eContents().get(1); private final RuleCall cPesoMagnitudVariableParserRuleCall_0 = (RuleCall)cAlternatives.eContents().get(0); private final RuleCall cPesoMagnitudFijaParserRuleCall_1 = (RuleCall)cAlternatives.eContents().get(1); //Peso: // PesoMagnitudVariable | PesoMagnitudFija; @Override public ParserRule getRule() { return rule; } //PesoMagnitudVariable | PesoMagnitudFija public Alternatives getAlternatives() { return cAlternatives; } //PesoMagnitudVariable public RuleCall getPesoMagnitudVariableParserRuleCall_0() { return cPesoMagnitudVariableParserRuleCall_0; } //PesoMagnitudFija public RuleCall getPesoMagnitudFijaParserRuleCall_1() { return cPesoMagnitudFijaParserRuleCall_1; } } public class PesoMagnitudVariableElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "PesoMagnitudVariable"); private final Group cGroup = (Group)rule.eContents().get(1); private final Assignment cMagnitudAssignment_0 = (Assignment)cGroup.eContents().get(0); private final RuleCall cMagnitudINTTerminalRuleCall_0_0 = (RuleCall)cMagnitudAssignment_0.eContents().get(0); private final Assignment cMedidaPesoAssignment_1 = (Assignment)cGroup.eContents().get(1); private final RuleCall cMedidaPesoMedidaPesoParserRuleCall_1_0 = (RuleCall)cMedidaPesoAssignment_1.eContents().get(0); //PesoMagnitudVariable: // magnitud=INT medidaPeso=MedidaPeso; @Override public ParserRule getRule() { return rule; } //magnitud=INT medidaPeso=MedidaPeso public Group getGroup() { return cGroup; } //magnitud=INT public Assignment getMagnitudAssignment_0() { return cMagnitudAssignment_0; } //INT public RuleCall getMagnitudINTTerminalRuleCall_0_0() { return cMagnitudINTTerminalRuleCall_0_0; } //medidaPeso=MedidaPeso public Assignment getMedidaPesoAssignment_1() { return cMedidaPesoAssignment_1; } //MedidaPeso public RuleCall getMedidaPesoMedidaPesoParserRuleCall_1_0() { return cMedidaPesoMedidaPesoParserRuleCall_1_0; } } public class PesoMagnitudFijaElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "PesoMagnitudFija"); private final Alternatives cAlternatives = (Alternatives)rule.eContents().get(1); private final Assignment cPesaMedioKiloAssignment_0 = (Assignment)cAlternatives.eContents().get(0); private final Keyword cPesaMedioKiloMedioKiloKeyword_0_0 = (Keyword)cPesaMedioKiloAssignment_0.eContents().get(0); private final Group cGroup_1 = (Group)cAlternatives.eContents().get(1); private final Keyword cUnKeyword_1_0 = (Keyword)cGroup_1.eContents().get(0); private final Assignment cPesaCuartoKiloAssignment_1_1 = (Assignment)cGroup_1.eContents().get(1); private final Keyword cPesaCuartoKiloCuartoKiloKeyword_1_1_0 = (Keyword)cPesaCuartoKiloAssignment_1_1.eContents().get(0); private final Group cGroup_2 = (Group)cAlternatives.eContents().get(2); private final Keyword cUnKeyword_2_0 = (Keyword)cGroup_2.eContents().get(0); private final Assignment cPesaUnKiloAssignment_2_1 = (Assignment)cGroup_2.eContents().get(1); private final Keyword cPesaUnKiloKiloKeyword_2_1_0 = (Keyword)cPesaUnKiloAssignment_2_1.eContents().get(0); //PesoMagnitudFija: // pesaMedioKilo?="medio kilo" | "un"? pesaCuartoKilo?="cuarto kilo" | "un"? pesaUnKilo?="kilo"; @Override public ParserRule getRule() { return rule; } //pesaMedioKilo?="medio kilo" | "un"? pesaCuartoKilo?="cuarto kilo" | "un"? pesaUnKilo?="kilo" public Alternatives getAlternatives() { return cAlternatives; } //pesaMedioKilo?="medio kilo" public Assignment getPesaMedioKiloAssignment_0() { return cPesaMedioKiloAssignment_0; } //"medio kilo" public Keyword getPesaMedioKiloMedioKiloKeyword_0_0() { return cPesaMedioKiloMedioKiloKeyword_0_0; } //"un"? pesaCuartoKilo?="cuarto kilo" public Group getGroup_1() { return cGroup_1; } //"un"? public Keyword getUnKeyword_1_0() { return cUnKeyword_1_0; } //pesaCuartoKilo?="cuarto kilo" public Assignment getPesaCuartoKiloAssignment_1_1() { return cPesaCuartoKiloAssignment_1_1; } //"cuarto kilo" public Keyword getPesaCuartoKiloCuartoKiloKeyword_1_1_0() { return cPesaCuartoKiloCuartoKiloKeyword_1_1_0; } //"un"? pesaUnKilo?="kilo" public Group getGroup_2() { return cGroup_2; } //"un"? public Keyword getUnKeyword_2_0() { return cUnKeyword_2_0; } //pesaUnKilo?="kilo" public Assignment getPesaUnKiloAssignment_2_1() { return cPesaUnKiloAssignment_2_1; } //"kilo" public Keyword getPesaUnKiloKiloKeyword_2_1_0() { return cPesaUnKiloKiloKeyword_2_1_0; } } public class MedidaPesoElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "MedidaPeso"); private final Alternatives cAlternatives = (Alternatives)rule.eContents().get(1); private final RuleCall cMedidaPesoKiloParserRuleCall_0 = (RuleCall)cAlternatives.eContents().get(0); private final RuleCall cMedidaPesoGramosParserRuleCall_1 = (RuleCall)cAlternatives.eContents().get(1); //MedidaPeso: // MedidaPesoKilo | MedidaPesoGramos; @Override public ParserRule getRule() { return rule; } //MedidaPesoKilo | MedidaPesoGramos public Alternatives getAlternatives() { return cAlternatives; } //MedidaPesoKilo public RuleCall getMedidaPesoKiloParserRuleCall_0() { return cMedidaPesoKiloParserRuleCall_0; } //MedidaPesoGramos public RuleCall getMedidaPesoGramosParserRuleCall_1() { return cMedidaPesoGramosParserRuleCall_1; } } public class MedidaPesoGramosElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "MedidaPesoGramos"); private final Group cGroup = (Group)rule.eContents().get(1); private final Action cMedidaPesoGramosAction_0 = (Action)cGroup.eContents().get(0); private final Keyword cGramosKeyword_1 = (Keyword)cGroup.eContents().get(1); //MedidaPesoGramos: // {MedidaPesoGramos} "gramos"; @Override public ParserRule getRule() { return rule; } //{MedidaPesoGramos} "gramos" public Group getGroup() { return cGroup; } //{MedidaPesoGramos} public Action getMedidaPesoGramosAction_0() { return cMedidaPesoGramosAction_0; } //"gramos" public Keyword getGramosKeyword_1() { return cGramosKeyword_1; } } public class MedidaPesoKiloElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "MedidaPesoKilo"); private final Group cGroup = (Group)rule.eContents().get(1); private final Action cMedidaPesoKiloAction_0 = (Action)cGroup.eContents().get(0); private final Alternatives cAlternatives_1 = (Alternatives)cGroup.eContents().get(1); private final Keyword cKiloKeyword_1_0 = (Keyword)cAlternatives_1.eContents().get(0); private final Keyword cKilosKeyword_1_1 = (Keyword)cAlternatives_1.eContents().get(1); //MedidaPesoKilo: // {MedidaPesoKilo} ("kilo" | "kilos"); @Override public ParserRule getRule() { return rule; } //{MedidaPesoKilo} ("kilo" | "kilos") public Group getGroup() { return cGroup; } //{MedidaPesoKilo} public Action getMedidaPesoKiloAction_0() { return cMedidaPesoKiloAction_0; } //"kilo" | "kilos" public Alternatives getAlternatives_1() { return cAlternatives_1; } //"kilo" public Keyword getKiloKeyword_1_0() { return cKiloKeyword_1_0; } //"kilos" public Keyword getKilosKeyword_1_1() { return cKilosKeyword_1_1; } } public class VentaElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "Venta"); private final Group cGroup = (Group)rule.eContents().get(1); private final Assignment cCompradorAssignment_0 = (Assignment)cGroup.eContents().get(0); private final CrossReference cCompradorClienteCrossReference_0_0 = (CrossReference)cCompradorAssignment_0.eContents().get(0); private final RuleCall cCompradorClienteIDTerminalRuleCall_0_0_1 = (RuleCall)cCompradorClienteCrossReference_0_0.eContents().get(1); private final Keyword cCompraKeyword_1 = (Keyword)cGroup.eContents().get(1); private final Assignment cItemsVendidosAssignment_2 = (Assignment)cGroup.eContents().get(2); private final RuleCall cItemsVendidosItemVentaParserRuleCall_2_0 = (RuleCall)cItemsVendidosAssignment_2.eContents().get(0); private final Group cGroup_3 = (Group)cGroup.eContents().get(3); private final Keyword cCommaKeyword_3_0 = (Keyword)cGroup_3.eContents().get(0); private final Assignment cItemsVendidosAssignment_3_1 = (Assignment)cGroup_3.eContents().get(1); private final RuleCall cItemsVendidosItemVentaParserRuleCall_3_1_0 = (RuleCall)cItemsVendidosAssignment_3_1.eContents().get(0); private final Group cGroup_4 = (Group)cGroup.eContents().get(4); private final Keyword cCommaKeyword_4_0 = (Keyword)cGroup_4.eContents().get(0); private final Group cGroup_4_1 = (Group)cGroup_4.eContents().get(1); private final Keyword cSeRedondeaAKeyword_4_1_0 = (Keyword)cGroup_4_1.eContents().get(0); private final Assignment cTotalRedondeadoAssignment_4_1_1 = (Assignment)cGroup_4_1.eContents().get(1); private final RuleCall cTotalRedondeadoMontoDineroParserRuleCall_4_1_1_0 = (RuleCall)cTotalRedondeadoAssignment_4_1_1.eContents().get(0); private final Group cGroup_5 = (Group)cGroup.eContents().get(5); private final Keyword cCommaKeyword_5_0 = (Keyword)cGroup_5.eContents().get(0); private final Group cGroup_5_1 = (Group)cGroup_5.eContents().get(1); private final Keyword cPagaKeyword_5_1_0 = (Keyword)cGroup_5_1.eContents().get(0); private final Assignment cTotalPagadoAssignment_5_1_1 = (Assignment)cGroup_5_1.eContents().get(1); private final RuleCall cTotalPagadoMontoDineroParserRuleCall_5_1_1_0 = (RuleCall)cTotalPagadoAssignment_5_1_1.eContents().get(0); private final Group cGroup_6 = (Group)cGroup.eContents().get(6); private final Keyword cCommaKeyword_6_0 = (Keyword)cGroup_6.eContents().get(0); private final Group cGroup_6_1 = (Group)cGroup_6.eContents().get(1); private final Keyword cQuedaDebiendoKeyword_6_1_0 = (Keyword)cGroup_6_1.eContents().get(0); private final Assignment cTotalDebiendoAssignment_6_1_1 = (Assignment)cGroup_6_1.eContents().get(1); private final RuleCall cTotalDebiendoMontoDineroParserRuleCall_6_1_1_0 = (RuleCall)cTotalDebiendoAssignment_6_1_1.eContents().get(0); private final Keyword cFullStopKeyword_7 = (Keyword)cGroup.eContents().get(7); //// Modificada //Venta: // comprador=[Cliente] "compra" // hack para que meter la coma obligatoria despues de los items opcionales así las siguientes lineas que son opcionales (rodondea y paga) // // pueden llevar la coma adelante y no hay que andar haciendo chanchadas peores. // itemsVendidos+=ItemVenta ("," itemsVendidos+=ItemVenta)* // cambie los nombres de las variables precio1,2,3 por nombres mas declarativos // ("," ("se redondea a" totalRedondeado=MontoDinero))? // saque el precio exacto que no se neceitaba // ("," ("paga" totalPagado=MontoDinero))? ("," ("queda debiendo" totalDebiendo=MontoDinero))? // puse el monto que debe como opcional // "."; @Override public ParserRule getRule() { return rule; } //comprador=[Cliente] "compra" // hack para que meter la coma obligatoria despues de los items opcionales así las siguientes lineas que son opcionales (rodondea y paga) //// pueden llevar la coma adelante y no hay que andar haciendo chanchadas peores. //itemsVendidos+=ItemVenta ("," itemsVendidos+=ItemVenta)* // cambie los nombres de las variables precio1,2,3 por nombres mas declarativos //("," ("se redondea a" totalRedondeado=MontoDinero))? // saque el precio exacto que no se neceitaba //("," ("paga" totalPagado=MontoDinero))? ("," ("queda debiendo" totalDebiendo=MontoDinero))? // puse el monto que debe como opcional //"." public Group getGroup() { return cGroup; } //comprador=[Cliente] public Assignment getCompradorAssignment_0() { return cCompradorAssignment_0; } //[Cliente] public CrossReference getCompradorClienteCrossReference_0_0() { return cCompradorClienteCrossReference_0_0; } //ID public RuleCall getCompradorClienteIDTerminalRuleCall_0_0_1() { return cCompradorClienteIDTerminalRuleCall_0_0_1; } //"compra" public Keyword getCompraKeyword_1() { return cCompraKeyword_1; } //// hack para que meter la coma obligatoria despues de los items opcionales así las siguientes lineas que son opcionales (rodondea y paga) //// pueden llevar la coma adelante y no hay que andar haciendo chanchadas peores. //itemsVendidos+=ItemVenta public Assignment getItemsVendidosAssignment_2() { return cItemsVendidosAssignment_2; } //ItemVenta public RuleCall getItemsVendidosItemVentaParserRuleCall_2_0() { return cItemsVendidosItemVentaParserRuleCall_2_0; } //("," itemsVendidos+=ItemVenta)* public Group getGroup_3() { return cGroup_3; } //"," public Keyword getCommaKeyword_3_0() { return cCommaKeyword_3_0; } //itemsVendidos+=ItemVenta public Assignment getItemsVendidosAssignment_3_1() { return cItemsVendidosAssignment_3_1; } //ItemVenta public RuleCall getItemsVendidosItemVentaParserRuleCall_3_1_0() { return cItemsVendidosItemVentaParserRuleCall_3_1_0; } //("," ("se redondea a" totalRedondeado=MontoDinero))? public Group getGroup_4() { return cGroup_4; } //"," public Keyword getCommaKeyword_4_0() { return cCommaKeyword_4_0; } //"se redondea a" totalRedondeado=MontoDinero public Group getGroup_4_1() { return cGroup_4_1; } //"se redondea a" public Keyword getSeRedondeaAKeyword_4_1_0() { return cSeRedondeaAKeyword_4_1_0; } //totalRedondeado=MontoDinero public Assignment getTotalRedondeadoAssignment_4_1_1() { return cTotalRedondeadoAssignment_4_1_1; } //MontoDinero public RuleCall getTotalRedondeadoMontoDineroParserRuleCall_4_1_1_0() { return cTotalRedondeadoMontoDineroParserRuleCall_4_1_1_0; } //("," ("paga" totalPagado=MontoDinero))? public Group getGroup_5() { return cGroup_5; } //"," public Keyword getCommaKeyword_5_0() { return cCommaKeyword_5_0; } //"paga" totalPagado=MontoDinero public Group getGroup_5_1() { return cGroup_5_1; } //"paga" public Keyword getPagaKeyword_5_1_0() { return cPagaKeyword_5_1_0; } //totalPagado=MontoDinero public Assignment getTotalPagadoAssignment_5_1_1() { return cTotalPagadoAssignment_5_1_1; } //MontoDinero public RuleCall getTotalPagadoMontoDineroParserRuleCall_5_1_1_0() { return cTotalPagadoMontoDineroParserRuleCall_5_1_1_0; } //("," ("queda debiendo" totalDebiendo=MontoDinero))? public Group getGroup_6() { return cGroup_6; } //"," public Keyword getCommaKeyword_6_0() { return cCommaKeyword_6_0; } //"queda debiendo" totalDebiendo=MontoDinero public Group getGroup_6_1() { return cGroup_6_1; } //"queda debiendo" public Keyword getQuedaDebiendoKeyword_6_1_0() { return cQuedaDebiendoKeyword_6_1_0; } //totalDebiendo=MontoDinero public Assignment getTotalDebiendoAssignment_6_1_1() { return cTotalDebiendoAssignment_6_1_1; } //MontoDinero public RuleCall getTotalDebiendoMontoDineroParserRuleCall_6_1_1_0() { return cTotalDebiendoMontoDineroParserRuleCall_6_1_1_0; } //"." public Keyword getFullStopKeyword_7() { return cFullStopKeyword_7; } } public class ItemVentaElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "ItemVenta"); private final Group cGroup = (Group)rule.eContents().get(1); private final Assignment cCantidadAssignment_0 = (Assignment)cGroup.eContents().get(0); private final RuleCall cCantidadPesoParserRuleCall_0_0 = (RuleCall)cCantidadAssignment_0.eContents().get(0); private final Keyword cDeKeyword_1 = (Keyword)cGroup.eContents().get(1); private final Assignment cProductoAssignment_2 = (Assignment)cGroup.eContents().get(2); private final CrossReference cProductoProductoCrossReference_2_0 = (CrossReference)cProductoAssignment_2.eContents().get(0); private final RuleCall cProductoProductoIDTerminalRuleCall_2_0_1 = (RuleCall)cProductoProductoCrossReference_2_0.eContents().get(1); //ItemVenta: // cantidad=Peso "de" producto=[Producto]; @Override public ParserRule getRule() { return rule; } //cantidad=Peso "de" producto=[Producto] public Group getGroup() { return cGroup; } //cantidad=Peso public Assignment getCantidadAssignment_0() { return cCantidadAssignment_0; } //Peso public RuleCall getCantidadPesoParserRuleCall_0_0() { return cCantidadPesoParserRuleCall_0_0; } //"de" public Keyword getDeKeyword_1() { return cDeKeyword_1; } //producto=[Producto] public Assignment getProductoAssignment_2() { return cProductoAssignment_2; } //[Producto] public CrossReference getProductoProductoCrossReference_2_0() { return cProductoProductoCrossReference_2_0; } //ID public RuleCall getProductoProductoIDTerminalRuleCall_2_0_1() { return cProductoProductoIDTerminalRuleCall_2_0_1; } } public class TareaElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "Tarea"); private final Group cGroup = (Group)rule.eContents().get(1); private final Keyword cALasKeyword_0 = (Keyword)cGroup.eContents().get(0); private final Assignment cHorarioAssignment_1 = (Assignment)cGroup.eContents().get(1); private final RuleCall cHorarioHorarioParserRuleCall_1_0 = (RuleCall)cHorarioAssignment_1.eContents().get(0); private final Assignment cTareaRealizadaAssignment_2 = (Assignment)cGroup.eContents().get(2); private final RuleCall cTareaRealizadaTipoTareaParserRuleCall_2_0 = (RuleCall)cTareaRealizadaAssignment_2.eContents().get(0); private final Keyword cTomoKeyword_3 = (Keyword)cGroup.eContents().get(3); private final Assignment cDuracionAssignment_4 = (Assignment)cGroup.eContents().get(4); private final RuleCall cDuracionLapsoTiempoParserRuleCall_4_0 = (RuleCall)cDuracionAssignment_4.eContents().get(0); private final Keyword cFullStopKeyword_5 = (Keyword)cGroup.eContents().get(5); //Tarea: // "A las" horario=Horario tareaRealizada=TipoTarea ", tomo" duracion=LapsoTiempo "."; @Override public ParserRule getRule() { return rule; } //"A las" horario=Horario tareaRealizada=TipoTarea ", tomo" duracion=LapsoTiempo "." public Group getGroup() { return cGroup; } //"A las" public Keyword getALasKeyword_0() { return cALasKeyword_0; } //horario=Horario public Assignment getHorarioAssignment_1() { return cHorarioAssignment_1; } //Horario public RuleCall getHorarioHorarioParserRuleCall_1_0() { return cHorarioHorarioParserRuleCall_1_0; } //tareaRealizada=TipoTarea public Assignment getTareaRealizadaAssignment_2() { return cTareaRealizadaAssignment_2; } //TipoTarea public RuleCall getTareaRealizadaTipoTareaParserRuleCall_2_0() { return cTareaRealizadaTipoTareaParserRuleCall_2_0; } //", tomo" public Keyword getTomoKeyword_3() { return cTomoKeyword_3; } //duracion=LapsoTiempo public Assignment getDuracionAssignment_4() { return cDuracionAssignment_4; } //LapsoTiempo public RuleCall getDuracionLapsoTiempoParserRuleCall_4_0() { return cDuracionLapsoTiempoParserRuleCall_4_0; } //"." public Keyword getFullStopKeyword_5() { return cFullStopKeyword_5; } } public class HorarioElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "Horario"); private final Alternatives cAlternatives = (Alternatives)rule.eContents().get(1); private final RuleCall cHorarioNumericoParserRuleCall_0 = (RuleCall)cAlternatives.eContents().get(0); private final RuleCall cHorarioLiteralParserRuleCall_1 = (RuleCall)cAlternatives.eContents().get(1); //// Horarios //Horario: // HorarioNumerico | HorarioLiteral; @Override public ParserRule getRule() { return rule; } //HorarioNumerico | HorarioLiteral public Alternatives getAlternatives() { return cAlternatives; } //HorarioNumerico public RuleCall getHorarioNumericoParserRuleCall_0() { return cHorarioNumericoParserRuleCall_0; } //HorarioLiteral public RuleCall getHorarioLiteralParserRuleCall_1() { return cHorarioLiteralParserRuleCall_1; } } public class HorarioLiteralElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "HorarioLiteral"); private final Group cGroup = (Group)rule.eContents().get(1); private final Assignment cHoraAssignment_0 = (Assignment)cGroup.eContents().get(0); private final RuleCall cHoraHorasLiteralParserRuleCall_0_0 = (RuleCall)cHoraAssignment_0.eContents().get(0); private final Group cGroup_1 = (Group)cGroup.eContents().get(1); private final Keyword cYKeyword_1_0 = (Keyword)cGroup_1.eContents().get(0); private final Assignment cFraccionHoraAssignment_1_1 = (Assignment)cGroup_1.eContents().get(1); private final RuleCall cFraccionHoraFraccionHoraLiteralParserRuleCall_1_1_0 = (RuleCall)cFraccionHoraAssignment_1_1.eContents().get(0); //HorarioLiteral: // hora=HorasLiteral ("y" fraccionHora=FraccionHoraLiteral)?; @Override public ParserRule getRule() { return rule; } //hora=HorasLiteral ("y" fraccionHora=FraccionHoraLiteral)? public Group getGroup() { return cGroup; } //hora=HorasLiteral public Assignment getHoraAssignment_0() { return cHoraAssignment_0; } //HorasLiteral public RuleCall getHoraHorasLiteralParserRuleCall_0_0() { return cHoraHorasLiteralParserRuleCall_0_0; } //("y" fraccionHora=FraccionHoraLiteral)? public Group getGroup_1() { return cGroup_1; } //"y" public Keyword getYKeyword_1_0() { return cYKeyword_1_0; } //fraccionHora=FraccionHoraLiteral public Assignment getFraccionHoraAssignment_1_1() { return cFraccionHoraAssignment_1_1; } //FraccionHoraLiteral public RuleCall getFraccionHoraFraccionHoraLiteralParserRuleCall_1_1_0() { return cFraccionHoraFraccionHoraLiteralParserRuleCall_1_1_0; } } public class HorarioNumericoElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "HorarioNumerico"); private final Group cGroup = (Group)rule.eContents().get(1); private final Assignment cHoraAssignment_0 = (Assignment)cGroup.eContents().get(0); private final RuleCall cHoraINTTerminalRuleCall_0_0 = (RuleCall)cHoraAssignment_0.eContents().get(0); private final Group cGroup_1 = (Group)cGroup.eContents().get(1); private final Keyword cColonKeyword_1_0 = (Keyword)cGroup_1.eContents().get(0); private final Assignment cMinutosAssignment_1_1 = (Assignment)cGroup_1.eContents().get(1); private final RuleCall cMinutosINTTerminalRuleCall_1_1_0 = (RuleCall)cMinutosAssignment_1_1.eContents().get(0); //HorarioNumerico: // hora=INT (":" minutos=INT)?; @Override public ParserRule getRule() { return rule; } //hora=INT (":" minutos=INT)? public Group getGroup() { return cGroup; } //hora=INT public Assignment getHoraAssignment_0() { return cHoraAssignment_0; } //INT public RuleCall getHoraINTTerminalRuleCall_0_0() { return cHoraINTTerminalRuleCall_0_0; } //(":" minutos=INT)? public Group getGroup_1() { return cGroup_1; } //":" public Keyword getColonKeyword_1_0() { return cColonKeyword_1_0; } //minutos=INT public Assignment getMinutosAssignment_1_1() { return cMinutosAssignment_1_1; } //INT public RuleCall getMinutosINTTerminalRuleCall_1_1_0() { return cMinutosINTTerminalRuleCall_1_1_0; } } public class TipoTareaElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "TipoTarea"); private final Alternatives cAlternatives = (Alternatives)rule.eContents().get(1); private final RuleCall cTareaLimpiezaLocalParserRuleCall_0 = (RuleCall)cAlternatives.eContents().get(0); private final RuleCall cTareaArqueoCajaParserRuleCall_1 = (RuleCall)cAlternatives.eContents().get(1); private final RuleCall cTareaRevisionProductosParserRuleCall_2 = (RuleCall)cAlternatives.eContents().get(2); //// Tipo de tarea //TipoTarea: // TareaLimpiezaLocal | TareaArqueoCaja | TareaRevisionProductos; @Override public ParserRule getRule() { return rule; } //TareaLimpiezaLocal | TareaArqueoCaja | TareaRevisionProductos public Alternatives getAlternatives() { return cAlternatives; } //TareaLimpiezaLocal public RuleCall getTareaLimpiezaLocalParserRuleCall_0() { return cTareaLimpiezaLocalParserRuleCall_0; } //TareaArqueoCaja public RuleCall getTareaArqueoCajaParserRuleCall_1() { return cTareaArqueoCajaParserRuleCall_1; } //TareaRevisionProductos public RuleCall getTareaRevisionProductosParserRuleCall_2() { return cTareaRevisionProductosParserRuleCall_2; } } public class TareaRevisionProductosElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "TareaRevisionProductos"); private final Group cGroup = (Group)rule.eContents().get(1); private final Keyword cSeRevisaronKeyword_0 = (Keyword)cGroup.eContents().get(0); private final Group cGroup_1 = (Group)cGroup.eContents().get(1); private final Alternatives cAlternatives_1_0 = (Alternatives)cGroup_1.eContents().get(0); private final Keyword cLasKeyword_1_0_0 = (Keyword)cAlternatives_1_0.eContents().get(0); private final Keyword cLosKeyword_1_0_1 = (Keyword)cAlternatives_1_0.eContents().get(1); private final Assignment cProductosRevisadosAssignment_1_1 = (Assignment)cGroup_1.eContents().get(1); private final CrossReference cProductosRevisadosProductoCrossReference_1_1_0 = (CrossReference)cProductosRevisadosAssignment_1_1.eContents().get(0); private final RuleCall cProductosRevisadosProductoIDTerminalRuleCall_1_1_0_1 = (RuleCall)cProductosRevisadosProductoCrossReference_1_1_0.eContents().get(1); private final Keyword cCommaKeyword_1_2 = (Keyword)cGroup_1.eContents().get(2); private final Alternatives cAlternatives_2 = (Alternatives)cGroup.eContents().get(2); private final Keyword cLasKeyword_2_0 = (Keyword)cAlternatives_2.eContents().get(0); private final Keyword cLosKeyword_2_1 = (Keyword)cAlternatives_2.eContents().get(1); private final Assignment cProductosRevisadosAssignment_3 = (Assignment)cGroup.eContents().get(3); private final CrossReference cProductosRevisadosProductoCrossReference_3_0 = (CrossReference)cProductosRevisadosAssignment_3.eContents().get(0); private final RuleCall cProductosRevisadosProductoIDTerminalRuleCall_3_0_1 = (RuleCall)cProductosRevisadosProductoCrossReference_3_0.eContents().get(1); //TareaRevisionProductos: // "se revisaron" (("las" | "los") productosRevisados+=[Producto] ",")* ("las" | "los") productosRevisados+=[Producto]; @Override public ParserRule getRule() { return rule; } //"se revisaron" (("las" | "los") productosRevisados+=[Producto] ",")* ("las" | "los") productosRevisados+=[Producto] public Group getGroup() { return cGroup; } //"se revisaron" public Keyword getSeRevisaronKeyword_0() { return cSeRevisaronKeyword_0; } //(("las" | "los") productosRevisados+=[Producto] ",")* public Group getGroup_1() { return cGroup_1; } //"las" | "los" public Alternatives getAlternatives_1_0() { return cAlternatives_1_0; } //"las" public Keyword getLasKeyword_1_0_0() { return cLasKeyword_1_0_0; } //"los" public Keyword getLosKeyword_1_0_1() { return cLosKeyword_1_0_1; } //productosRevisados+=[Producto] public Assignment getProductosRevisadosAssignment_1_1() { return cProductosRevisadosAssignment_1_1; } //[Producto] public CrossReference getProductosRevisadosProductoCrossReference_1_1_0() { return cProductosRevisadosProductoCrossReference_1_1_0; } //ID public RuleCall getProductosRevisadosProductoIDTerminalRuleCall_1_1_0_1() { return cProductosRevisadosProductoIDTerminalRuleCall_1_1_0_1; } //"," public Keyword getCommaKeyword_1_2() { return cCommaKeyword_1_2; } //"las" | "los" public Alternatives getAlternatives_2() { return cAlternatives_2; } //"las" public Keyword getLasKeyword_2_0() { return cLasKeyword_2_0; } //"los" public Keyword getLosKeyword_2_1() { return cLosKeyword_2_1; } //productosRevisados+=[Producto] public Assignment getProductosRevisadosAssignment_3() { return cProductosRevisadosAssignment_3; } //[Producto] public CrossReference getProductosRevisadosProductoCrossReference_3_0() { return cProductosRevisadosProductoCrossReference_3_0; } //ID public RuleCall getProductosRevisadosProductoIDTerminalRuleCall_3_0_1() { return cProductosRevisadosProductoIDTerminalRuleCall_3_0_1; } } public class TareaArqueoCajaElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "TareaArqueoCaja"); private final Group cGroup = (Group)rule.eContents().get(1); private final Action cTareaArqueoCajaAction_0 = (Action)cGroup.eContents().get(0); private final Keyword cSeHizoElArqueoDeCajaKeyword_1 = (Keyword)cGroup.eContents().get(1); //TareaArqueoCaja: // {TareaArqueoCaja} "se hizo el arqueo de caja"; @Override public ParserRule getRule() { return rule; } //{TareaArqueoCaja} "se hizo el arqueo de caja" public Group getGroup() { return cGroup; } //{TareaArqueoCaja} public Action getTareaArqueoCajaAction_0() { return cTareaArqueoCajaAction_0; } //"se hizo el arqueo de caja" public Keyword getSeHizoElArqueoDeCajaKeyword_1() { return cSeHizoElArqueoDeCajaKeyword_1; } } public class TareaLimpiezaLocalElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "TareaLimpiezaLocal"); private final Group cGroup = (Group)rule.eContents().get(1); private final Action cTareaLimpiezaLocalAction_0 = (Action)cGroup.eContents().get(0); private final Keyword cSeLimpioElLocalKeyword_1 = (Keyword)cGroup.eContents().get(1); //TareaLimpiezaLocal: // {TareaLimpiezaLocal} "se limpio el local"; @Override public ParserRule getRule() { return rule; } //{TareaLimpiezaLocal} "se limpio el local" public Group getGroup() { return cGroup; } //{TareaLimpiezaLocal} public Action getTareaLimpiezaLocalAction_0() { return cTareaLimpiezaLocalAction_0; } //"se limpio el local" public Keyword getSeLimpioElLocalKeyword_1() { return cSeLimpioElLocalKeyword_1; } } public class LapsoTiempoElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "LapsoTiempo"); private final Alternatives cAlternatives = (Alternatives)rule.eContents().get(1); private final RuleCall cLapsoTiempoLiteralParserRuleCall_0 = (RuleCall)cAlternatives.eContents().get(0); private final RuleCall cLapsoTiempoNumericoParserRuleCall_1 = (RuleCall)cAlternatives.eContents().get(1); //// Duracion //LapsoTiempo: // LapsoTiempoLiteral | LapsoTiempoNumerico; @Override public ParserRule getRule() { return rule; } //LapsoTiempoLiteral | LapsoTiempoNumerico public Alternatives getAlternatives() { return cAlternatives; } //LapsoTiempoLiteral public RuleCall getLapsoTiempoLiteralParserRuleCall_0() { return cLapsoTiempoLiteralParserRuleCall_0; } //LapsoTiempoNumerico public RuleCall getLapsoTiempoNumericoParserRuleCall_1() { return cLapsoTiempoNumericoParserRuleCall_1; } } public class LapsoTiempoNumericoElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "LapsoTiempoNumerico"); private final Alternatives cAlternatives = (Alternatives)rule.eContents().get(1); private final Group cGroup_0 = (Group)cAlternatives.eContents().get(0); private final Assignment cMinutosAssignment_0_0 = (Assignment)cGroup_0.eContents().get(0); private final RuleCall cMinutosINTTerminalRuleCall_0_0_0 = (RuleCall)cMinutosAssignment_0_0.eContents().get(0); private final Keyword cMinutosKeyword_0_1 = (Keyword)cGroup_0.eContents().get(1); private final Group cGroup_1 = (Group)cAlternatives.eContents().get(1); private final Assignment cHorasAssignment_1_0 = (Assignment)cGroup_1.eContents().get(0); private final RuleCall cHorasINTTerminalRuleCall_1_0_0 = (RuleCall)cHorasAssignment_1_0.eContents().get(0); private final Alternatives cAlternatives_1_1 = (Alternatives)cGroup_1.eContents().get(1); private final Keyword cHoraKeyword_1_1_0 = (Keyword)cAlternatives_1_1.eContents().get(0); private final Keyword cHorasKeyword_1_1_1 = (Keyword)cAlternatives_1_1.eContents().get(1); private final Group cGroup_1_2 = (Group)cGroup_1.eContents().get(2); private final Keyword cYKeyword_1_2_0 = (Keyword)cGroup_1_2.eContents().get(0); private final Assignment cMinutosAssignment_1_2_1 = (Assignment)cGroup_1_2.eContents().get(1); private final RuleCall cMinutosINTTerminalRuleCall_1_2_1_0 = (RuleCall)cMinutosAssignment_1_2_1.eContents().get(0); private final Keyword cMinutosKeyword_1_2_2 = (Keyword)cGroup_1_2.eContents().get(2); //LapsoTiempoNumerico: // minutos=INT "minutos" | horas=INT ("hora" | "horas") ("y" minutos=INT "minutos")?; @Override public ParserRule getRule() { return rule; } //minutos=INT "minutos" | horas=INT ("hora" | "horas") ("y" minutos=INT "minutos")? public Alternatives getAlternatives() { return cAlternatives; } //minutos=INT "minutos" public Group getGroup_0() { return cGroup_0; } //minutos=INT public Assignment getMinutosAssignment_0_0() { return cMinutosAssignment_0_0; } //INT public RuleCall getMinutosINTTerminalRuleCall_0_0_0() { return cMinutosINTTerminalRuleCall_0_0_0; } //"minutos" public Keyword getMinutosKeyword_0_1() { return cMinutosKeyword_0_1; } //horas=INT ("hora" | "horas") ("y" minutos=INT "minutos")? public Group getGroup_1() { return cGroup_1; } //horas=INT public Assignment getHorasAssignment_1_0() { return cHorasAssignment_1_0; } //INT public RuleCall getHorasINTTerminalRuleCall_1_0_0() { return cHorasINTTerminalRuleCall_1_0_0; } //"hora" | "horas" public Alternatives getAlternatives_1_1() { return cAlternatives_1_1; } //"hora" public Keyword getHoraKeyword_1_1_0() { return cHoraKeyword_1_1_0; } //"horas" public Keyword getHorasKeyword_1_1_1() { return cHorasKeyword_1_1_1; } //("y" minutos=INT "minutos")? public Group getGroup_1_2() { return cGroup_1_2; } //"y" public Keyword getYKeyword_1_2_0() { return cYKeyword_1_2_0; } //minutos=INT public Assignment getMinutosAssignment_1_2_1() { return cMinutosAssignment_1_2_1; } //INT public RuleCall getMinutosINTTerminalRuleCall_1_2_1_0() { return cMinutosINTTerminalRuleCall_1_2_1_0; } //"minutos" public Keyword getMinutosKeyword_1_2_2() { return cMinutosKeyword_1_2_2; } } public class LapsoTiempoLiteralElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "LapsoTiempoLiteral"); private final Group cGroup = (Group)rule.eContents().get(1); private final Assignment cHorasAssignment_0 = (Assignment)cGroup.eContents().get(0); private final RuleCall cHorasHorasLiteralParserRuleCall_0_0 = (RuleCall)cHorasAssignment_0.eContents().get(0); private final Alternatives cAlternatives_1 = (Alternatives)cGroup.eContents().get(1); private final Keyword cHoraKeyword_1_0 = (Keyword)cAlternatives_1.eContents().get(0); private final Keyword cHorasKeyword_1_1 = (Keyword)cAlternatives_1.eContents().get(1); private final Group cGroup_2 = (Group)cGroup.eContents().get(2); private final Keyword cYKeyword_2_0 = (Keyword)cGroup_2.eContents().get(0); private final Assignment cFraccionHoraAssignment_2_1 = (Assignment)cGroup_2.eContents().get(1); private final RuleCall cFraccionHoraFraccionHoraLiteralParserRuleCall_2_1_0 = (RuleCall)cFraccionHoraAssignment_2_1.eContents().get(0); //LapsoTiempoLiteral: // horas=HorasLiteral ("hora" | "horas") ("y" fraccionHora=FraccionHoraLiteral)?; @Override public ParserRule getRule() { return rule; } //horas=HorasLiteral ("hora" | "horas") ("y" fraccionHora=FraccionHoraLiteral)? public Group getGroup() { return cGroup; } //horas=HorasLiteral public Assignment getHorasAssignment_0() { return cHorasAssignment_0; } //HorasLiteral public RuleCall getHorasHorasLiteralParserRuleCall_0_0() { return cHorasHorasLiteralParserRuleCall_0_0; } //"hora" | "horas" public Alternatives getAlternatives_1() { return cAlternatives_1; } //"hora" public Keyword getHoraKeyword_1_0() { return cHoraKeyword_1_0; } //"horas" public Keyword getHorasKeyword_1_1() { return cHorasKeyword_1_1; } //("y" fraccionHora=FraccionHoraLiteral)? public Group getGroup_2() { return cGroup_2; } //"y" public Keyword getYKeyword_2_0() { return cYKeyword_2_0; } //fraccionHora=FraccionHoraLiteral public Assignment getFraccionHoraAssignment_2_1() { return cFraccionHoraAssignment_2_1; } //FraccionHoraLiteral public RuleCall getFraccionHoraFraccionHoraLiteralParserRuleCall_2_1_0() { return cFraccionHoraFraccionHoraLiteralParserRuleCall_2_1_0; } } public class HorasLiteralElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "HorasLiteral"); private final Assignment cLiteralAssignment = (Assignment)rule.eContents().get(1); private final Alternatives cLiteralAlternatives_0 = (Alternatives)cLiteralAssignment.eContents().get(0); private final Keyword cLiteralUnaKeyword_0_0 = (Keyword)cLiteralAlternatives_0.eContents().get(0); private final Keyword cLiteralDosKeyword_0_1 = (Keyword)cLiteralAlternatives_0.eContents().get(1); private final Keyword cLiteralTresKeyword_0_2 = (Keyword)cLiteralAlternatives_0.eContents().get(2); private final Keyword cLiteralCuatroKeyword_0_3 = (Keyword)cLiteralAlternatives_0.eContents().get(3); private final Keyword cLiteralCincoKeyword_0_4 = (Keyword)cLiteralAlternatives_0.eContents().get(4); private final Keyword cLiteralSeisKeyword_0_5 = (Keyword)cLiteralAlternatives_0.eContents().get(5); private final Keyword cLiteralSieteKeyword_0_6 = (Keyword)cLiteralAlternatives_0.eContents().get(6); private final Keyword cLiteralOchoKeyword_0_7 = (Keyword)cLiteralAlternatives_0.eContents().get(7); private final Keyword cLiteralNueveKeyword_0_8 = (Keyword)cLiteralAlternatives_0.eContents().get(8); private final Keyword cLiteralDiezKeyword_0_9 = (Keyword)cLiteralAlternatives_0.eContents().get(9); private final Keyword cLiteralOnceKeyword_0_10 = (Keyword)cLiteralAlternatives_0.eContents().get(10); private final Keyword cLiteralDoceKeyword_0_11 = (Keyword)cLiteralAlternatives_0.eContents().get(11); //// componentes horarios y de tiempo //HorasLiteral: // literal=("una" | "dos" | "tres" | "cuatro" | "cinco" | "seis" | "siete" | "ocho" | "nueve" | "diez" | "once" | // "doce"); @Override public ParserRule getRule() { return rule; } //literal=("una" | "dos" | "tres" | "cuatro" | "cinco" | "seis" | "siete" | "ocho" | "nueve" | "diez" | "once" | "doce") public Assignment getLiteralAssignment() { return cLiteralAssignment; } //"una" | "dos" | "tres" | "cuatro" | "cinco" | "seis" | "siete" | "ocho" | "nueve" | "diez" | "once" | "doce" public Alternatives getLiteralAlternatives_0() { return cLiteralAlternatives_0; } //"una" public Keyword getLiteralUnaKeyword_0_0() { return cLiteralUnaKeyword_0_0; } //"dos" public Keyword getLiteralDosKeyword_0_1() { return cLiteralDosKeyword_0_1; } //"tres" public Keyword getLiteralTresKeyword_0_2() { return cLiteralTresKeyword_0_2; } //"cuatro" public Keyword getLiteralCuatroKeyword_0_3() { return cLiteralCuatroKeyword_0_3; } //"cinco" public Keyword getLiteralCincoKeyword_0_4() { return cLiteralCincoKeyword_0_4; } //"seis" public Keyword getLiteralSeisKeyword_0_5() { return cLiteralSeisKeyword_0_5; } //"siete" public Keyword getLiteralSieteKeyword_0_6() { return cLiteralSieteKeyword_0_6; } //"ocho" public Keyword getLiteralOchoKeyword_0_7() { return cLiteralOchoKeyword_0_7; } //"nueve" public Keyword getLiteralNueveKeyword_0_8() { return cLiteralNueveKeyword_0_8; } //"diez" public Keyword getLiteralDiezKeyword_0_9() { return cLiteralDiezKeyword_0_9; } //"once" public Keyword getLiteralOnceKeyword_0_10() { return cLiteralOnceKeyword_0_10; } //"doce" public Keyword getLiteralDoceKeyword_0_11() { return cLiteralDoceKeyword_0_11; } } public class FraccionHoraLiteralElements extends AbstractParserRuleElementFinder { private final ParserRule rule = (ParserRule) GrammarUtil.findRuleForName(getGrammar(), "FraccionHoraLiteral"); private final Assignment cLiteralAssignment = (Assignment)rule.eContents().get(1); private final Alternatives cLiteralAlternatives_0 = (Alternatives)cLiteralAssignment.eContents().get(0); private final Keyword cLiteralCuartoKeyword_0_0 = (Keyword)cLiteralAlternatives_0.eContents().get(0); private final Keyword cLiteralMediaKeyword_0_1 = (Keyword)cLiteralAlternatives_0.eContents().get(1); /// * //Cliente Jose al dia. //Cliente Juan, debe 20 pesitos //Cliente Roman, tiene credito 10 pesos //Cliente Ana, tiene credito 10 pesos // // //Las papas salen 10 pesos el kilo. //Las cerezas salen 20 pesitos los 200 gramos. //Los tomates salen 15 pesos el kilo. //las batatas salen 25 pesos el kilo. // //Ana compra un kilo de papas, // paga 20 pesos. // //Ana compra un kilo de papas, // paga 20 pesos. // //Ana compra un kilo de papas, // queda debiendo 40 pesos. // //Ana compra un kilo de papas, // paga 20 pesos. // //Jose compra un kilo de papas, // paga 200 pesos. // //Jose compra un kilo de papas, // queda debiendo 210 pesos. // //Jose compra un kilo de papas, // paga 50 pesos. // //Roman compra un kilo de papas. // //Roman compra 200 gramos de cerezas, // un kilo de tomates. // //Roman compra un kilo de papas, // paga 440 pesos. // //Roman compra un kilo de papas, // se redondea a 100 pesos, // queda debiendo 440 pesos. // //Juan compra un kilo de papas, // paga 40 pesos. // //Juan compra 100 gramos de cerezas, // queda debiendo 500 pesitos. // //Juan compra un kilo de papas, // paga 510 pesos. // //A las cinco y media se revisaron las cerezas, tomo 5 minutos. //A las diez se hizo el arqueo de caja , tomo una hora. // * / FraccionHoraLiteral: // literal=("cuarto" | "media"); @Override public ParserRule getRule() { return rule; } //literal=("cuarto" | "media") public Assignment getLiteralAssignment() { return cLiteralAssignment; } //"cuarto" | "media" public Alternatives getLiteralAlternatives_0() { return cLiteralAlternatives_0; } //"cuarto" public Keyword getLiteralCuartoKeyword_0_0() { return cLiteralCuartoKeyword_0_0; } //"media" public Keyword getLiteralMediaKeyword_0_1() { return cLiteralMediaKeyword_0_1; } } private final VerduleriaElements pVerduleria; private final ClienteElements pCliente; private final ClienteConDeudaElements pClienteConDeuda; private final ClienteAlDiaElements pClienteAlDia; private final ClienteConCreditoElements pClienteConCredito; private final ProductoConPrecioElements pProductoConPrecio; private final ProductoElements pProducto; private final PrecioPorPesoElements pPrecioPorPeso; private final MontoDineroElements pMontoDinero; private final PesoElements pPeso; private final PesoMagnitudVariableElements pPesoMagnitudVariable; private final PesoMagnitudFijaElements pPesoMagnitudFija; private final MedidaPesoElements pMedidaPeso; private final MedidaPesoGramosElements pMedidaPesoGramos; private final MedidaPesoKiloElements pMedidaPesoKilo; private final VentaElements pVenta; private final ItemVentaElements pItemVenta; private final TareaElements pTarea; private final HorarioElements pHorario; private final HorarioLiteralElements pHorarioLiteral; private final HorarioNumericoElements pHorarioNumerico; private final TipoTareaElements pTipoTarea; private final TareaRevisionProductosElements pTareaRevisionProductos; private final TareaArqueoCajaElements pTareaArqueoCaja; private final TareaLimpiezaLocalElements pTareaLimpiezaLocal; private final LapsoTiempoElements pLapsoTiempo; private final LapsoTiempoNumericoElements pLapsoTiempoNumerico; private final LapsoTiempoLiteralElements pLapsoTiempoLiteral; private final HorasLiteralElements pHorasLiteral; private final FraccionHoraLiteralElements pFraccionHoraLiteral; private final Grammar grammar; private final TerminalsGrammarAccess gaTerminals; @Inject public TextualVerdulerGrammarAccess(GrammarProvider grammarProvider, TerminalsGrammarAccess gaTerminals) { this.grammar = internalFindGrammar(grammarProvider); this.gaTerminals = gaTerminals; this.pVerduleria = new VerduleriaElements(); this.pCliente = new ClienteElements(); this.pClienteConDeuda = new ClienteConDeudaElements(); this.pClienteAlDia = new ClienteAlDiaElements(); this.pClienteConCredito = new ClienteConCreditoElements(); this.pProductoConPrecio = new ProductoConPrecioElements(); this.pProducto = new ProductoElements(); this.pPrecioPorPeso = new PrecioPorPesoElements(); this.pMontoDinero = new MontoDineroElements(); this.pPeso = new PesoElements(); this.pPesoMagnitudVariable = new PesoMagnitudVariableElements(); this.pPesoMagnitudFija = new PesoMagnitudFijaElements(); this.pMedidaPeso = new MedidaPesoElements(); this.pMedidaPesoGramos = new MedidaPesoGramosElements(); this.pMedidaPesoKilo = new MedidaPesoKiloElements(); this.pVenta = new VentaElements(); this.pItemVenta = new ItemVentaElements(); this.pTarea = new TareaElements(); this.pHorario = new HorarioElements(); this.pHorarioLiteral = new HorarioLiteralElements(); this.pHorarioNumerico = new HorarioNumericoElements(); this.pTipoTarea = new TipoTareaElements(); this.pTareaRevisionProductos = new TareaRevisionProductosElements(); this.pTareaArqueoCaja = new TareaArqueoCajaElements(); this.pTareaLimpiezaLocal = new TareaLimpiezaLocalElements(); this.pLapsoTiempo = new LapsoTiempoElements(); this.pLapsoTiempoNumerico = new LapsoTiempoNumericoElements(); this.pLapsoTiempoLiteral = new LapsoTiempoLiteralElements(); this.pHorasLiteral = new HorasLiteralElements(); this.pFraccionHoraLiteral = new FraccionHoraLiteralElements(); } protected Grammar internalFindGrammar(GrammarProvider grammarProvider) { Grammar grammar = grammarProvider.getGrammar(this); while (grammar != null) { if ("org.xtext.example.mydsl.TextualVerduler".equals(grammar.getName())) { return grammar; } List<Grammar> grammars = grammar.getUsedGrammars(); if (!grammars.isEmpty()) { grammar = grammars.iterator().next(); } else { return null; } } return grammar; } @Override public Grammar getGrammar() { return grammar; } public TerminalsGrammarAccess getTerminalsGrammarAccess() { return gaTerminals; } //Verduleria: // (clientes+=Cliente | productos+=ProductoConPrecio | ventas+=Venta | tareas+=Tarea)*; public VerduleriaElements getVerduleriaAccess() { return pVerduleria; } public ParserRule getVerduleriaRule() { return getVerduleriaAccess().getRule(); } //Cliente: // ClienteConDeuda | ClienteAlDia | ClienteConCredito; public ClienteElements getClienteAccess() { return pCliente; } public ParserRule getClienteRule() { return getClienteAccess().getRule(); } //ClienteConDeuda: // "Cliente" name=ID ", debe" montoDeuda=MontoDinero; public ClienteConDeudaElements getClienteConDeudaAccess() { return pClienteConDeuda; } public ParserRule getClienteConDeudaRule() { return getClienteConDeudaAccess().getRule(); } //ClienteAlDia: // "Cliente" name=ID "al dia."; public ClienteAlDiaElements getClienteAlDiaAccess() { return pClienteAlDia; } public ParserRule getClienteAlDiaRule() { return getClienteAlDiaAccess().getRule(); } //ClienteConCredito: // "Cliente" name=ID ", tiene credito" montoCredito=MontoDinero; public ClienteConCreditoElements getClienteConCreditoAccess() { return pClienteConCredito; } public ParserRule getClienteConCreditoRule() { return getClienteConCreditoAccess().getRule(); } //ProductoConPrecio: // Producto "salen" precio=PrecioPorPeso "."; public ProductoConPrecioElements getProductoConPrecioAccess() { return pProductoConPrecio; } public ParserRule getProductoConPrecioRule() { return getProductoConPrecioAccess().getRule(); } //Producto: // ("Los" | "Las" | "los" | "las") name=ID; public ProductoElements getProductoAccess() { return pProducto; } public ParserRule getProductoRule() { return getProductoAccess().getRule(); } //PrecioPorPeso: // precio=MontoDinero ("los" pesaje=PesoMagnitudVariable | "el" pesaje=PesoMagnitudFija); public PrecioPorPesoElements getPrecioPorPesoAccess() { return pPrecioPorPeso; } public ParserRule getPrecioPorPesoRule() { return getPrecioPorPesoAccess().getRule(); } //MontoDinero: // se aceptan las diferentes alternativas para nombrar a la moneda tanto para contemplar la posibilidad del "un peso" //// como para adaptarse a la jerga verdulera y así abarcar un mayor número de potenciales clientes. // precio=INT ("peso" | "pesos" | "pesito" | "pesitos"); public MontoDineroElements getMontoDineroAccess() { return pMontoDinero; } public ParserRule getMontoDineroRule() { return getMontoDineroAccess().getRule(); } //Peso: // PesoMagnitudVariable | PesoMagnitudFija; public PesoElements getPesoAccess() { return pPeso; } public ParserRule getPesoRule() { return getPesoAccess().getRule(); } //PesoMagnitudVariable: // magnitud=INT medidaPeso=MedidaPeso; public PesoMagnitudVariableElements getPesoMagnitudVariableAccess() { return pPesoMagnitudVariable; } public ParserRule getPesoMagnitudVariableRule() { return getPesoMagnitudVariableAccess().getRule(); } //PesoMagnitudFija: // pesaMedioKilo?="medio kilo" | "un"? pesaCuartoKilo?="cuarto kilo" | "un"? pesaUnKilo?="kilo"; public PesoMagnitudFijaElements getPesoMagnitudFijaAccess() { return pPesoMagnitudFija; } public ParserRule getPesoMagnitudFijaRule() { return getPesoMagnitudFijaAccess().getRule(); } //MedidaPeso: // MedidaPesoKilo | MedidaPesoGramos; public MedidaPesoElements getMedidaPesoAccess() { return pMedidaPeso; } public ParserRule getMedidaPesoRule() { return getMedidaPesoAccess().getRule(); } //MedidaPesoGramos: // {MedidaPesoGramos} "gramos"; public MedidaPesoGramosElements getMedidaPesoGramosAccess() { return pMedidaPesoGramos; } public ParserRule getMedidaPesoGramosRule() { return getMedidaPesoGramosAccess().getRule(); } //MedidaPesoKilo: // {MedidaPesoKilo} ("kilo" | "kilos"); public MedidaPesoKiloElements getMedidaPesoKiloAccess() { return pMedidaPesoKilo; } public ParserRule getMedidaPesoKiloRule() { return getMedidaPesoKiloAccess().getRule(); } //// Modificada //Venta: // comprador=[Cliente] "compra" // hack para que meter la coma obligatoria despues de los items opcionales así las siguientes lineas que son opcionales (rodondea y paga) // // pueden llevar la coma adelante y no hay que andar haciendo chanchadas peores. // itemsVendidos+=ItemVenta ("," itemsVendidos+=ItemVenta)* // cambie los nombres de las variables precio1,2,3 por nombres mas declarativos // ("," ("se redondea a" totalRedondeado=MontoDinero))? // saque el precio exacto que no se neceitaba // ("," ("paga" totalPagado=MontoDinero))? ("," ("queda debiendo" totalDebiendo=MontoDinero))? // puse el monto que debe como opcional // "."; public VentaElements getVentaAccess() { return pVenta; } public ParserRule getVentaRule() { return getVentaAccess().getRule(); } //ItemVenta: // cantidad=Peso "de" producto=[Producto]; public ItemVentaElements getItemVentaAccess() { return pItemVenta; } public ParserRule getItemVentaRule() { return getItemVentaAccess().getRule(); } //Tarea: // "A las" horario=Horario tareaRealizada=TipoTarea ", tomo" duracion=LapsoTiempo "."; public TareaElements getTareaAccess() { return pTarea; } public ParserRule getTareaRule() { return getTareaAccess().getRule(); } //// Horarios //Horario: // HorarioNumerico | HorarioLiteral; public HorarioElements getHorarioAccess() { return pHorario; } public ParserRule getHorarioRule() { return getHorarioAccess().getRule(); } //HorarioLiteral: // hora=HorasLiteral ("y" fraccionHora=FraccionHoraLiteral)?; public HorarioLiteralElements getHorarioLiteralAccess() { return pHorarioLiteral; } public ParserRule getHorarioLiteralRule() { return getHorarioLiteralAccess().getRule(); } //HorarioNumerico: // hora=INT (":" minutos=INT)?; public HorarioNumericoElements getHorarioNumericoAccess() { return pHorarioNumerico; } public ParserRule getHorarioNumericoRule() { return getHorarioNumericoAccess().getRule(); } //// Tipo de tarea //TipoTarea: // TareaLimpiezaLocal | TareaArqueoCaja | TareaRevisionProductos; public TipoTareaElements getTipoTareaAccess() { return pTipoTarea; } public ParserRule getTipoTareaRule() { return getTipoTareaAccess().getRule(); } //TareaRevisionProductos: // "se revisaron" (("las" | "los") productosRevisados+=[Producto] ",")* ("las" | "los") productosRevisados+=[Producto]; public TareaRevisionProductosElements getTareaRevisionProductosAccess() { return pTareaRevisionProductos; } public ParserRule getTareaRevisionProductosRule() { return getTareaRevisionProductosAccess().getRule(); } //TareaArqueoCaja: // {TareaArqueoCaja} "se hizo el arqueo de caja"; public TareaArqueoCajaElements getTareaArqueoCajaAccess() { return pTareaArqueoCaja; } public ParserRule getTareaArqueoCajaRule() { return getTareaArqueoCajaAccess().getRule(); } //TareaLimpiezaLocal: // {TareaLimpiezaLocal} "se limpio el local"; public TareaLimpiezaLocalElements getTareaLimpiezaLocalAccess() { return pTareaLimpiezaLocal; } public ParserRule getTareaLimpiezaLocalRule() { return getTareaLimpiezaLocalAccess().getRule(); } //// Duracion //LapsoTiempo: // LapsoTiempoLiteral | LapsoTiempoNumerico; public LapsoTiempoElements getLapsoTiempoAccess() { return pLapsoTiempo; } public ParserRule getLapsoTiempoRule() { return getLapsoTiempoAccess().getRule(); } //LapsoTiempoNumerico: // minutos=INT "minutos" | horas=INT ("hora" | "horas") ("y" minutos=INT "minutos")?; public LapsoTiempoNumericoElements getLapsoTiempoNumericoAccess() { return pLapsoTiempoNumerico; } public ParserRule getLapsoTiempoNumericoRule() { return getLapsoTiempoNumericoAccess().getRule(); } //LapsoTiempoLiteral: // horas=HorasLiteral ("hora" | "horas") ("y" fraccionHora=FraccionHoraLiteral)?; public LapsoTiempoLiteralElements getLapsoTiempoLiteralAccess() { return pLapsoTiempoLiteral; } public ParserRule getLapsoTiempoLiteralRule() { return getLapsoTiempoLiteralAccess().getRule(); } //// componentes horarios y de tiempo //HorasLiteral: // literal=("una" | "dos" | "tres" | "cuatro" | "cinco" | "seis" | "siete" | "ocho" | "nueve" | "diez" | "once" | // "doce"); public HorasLiteralElements getHorasLiteralAccess() { return pHorasLiteral; } public ParserRule getHorasLiteralRule() { return getHorasLiteralAccess().getRule(); } /// * //Cliente Jose al dia. //Cliente Juan, debe 20 pesitos //Cliente Roman, tiene credito 10 pesos //Cliente Ana, tiene credito 10 pesos // // //Las papas salen 10 pesos el kilo. //Las cerezas salen 20 pesitos los 200 gramos. //Los tomates salen 15 pesos el kilo. //las batatas salen 25 pesos el kilo. // //Ana compra un kilo de papas, // paga 20 pesos. // //Ana compra un kilo de papas, // paga 20 pesos. // //Ana compra un kilo de papas, // queda debiendo 40 pesos. // //Ana compra un kilo de papas, // paga 20 pesos. // //Jose compra un kilo de papas, // paga 200 pesos. // //Jose compra un kilo de papas, // queda debiendo 210 pesos. // //Jose compra un kilo de papas, // paga 50 pesos. // //Roman compra un kilo de papas. // //Roman compra 200 gramos de cerezas, // un kilo de tomates. // //Roman compra un kilo de papas, // paga 440 pesos. // //Roman compra un kilo de papas, // se redondea a 100 pesos, // queda debiendo 440 pesos. // //Juan compra un kilo de papas, // paga 40 pesos. // //Juan compra 100 gramos de cerezas, // queda debiendo 500 pesitos. // //Juan compra un kilo de papas, // paga 510 pesos. // //A las cinco y media se revisaron las cerezas, tomo 5 minutos. //A las diez se hizo el arqueo de caja , tomo una hora. // * / FraccionHoraLiteral: // literal=("cuarto" | "media"); public FraccionHoraLiteralElements getFraccionHoraLiteralAccess() { return pFraccionHoraLiteral; } public ParserRule getFraccionHoraLiteralRule() { return getFraccionHoraLiteralAccess().getRule(); } //terminal ID: // "^"? ("a".."z" | "A".."Z" | "_") ("a".."z" | "A".."Z" | "_" | "0".."9")*; public TerminalRule getIDRule() { return gaTerminals.getIDRule(); } //terminal INT returns ecore::EInt: // "0".."9"+; public TerminalRule getINTRule() { return gaTerminals.getINTRule(); } //terminal STRING: // "\"" ("\\" . / * 'b'|'t'|'n'|'f'|'r'|'u'|'"'|"'"|'\\' * / | !("\\" | "\""))* "\"" | "\'" ("\\" . // / * 'b'|'t'|'n'|'f'|'r'|'u'|'"'|"'"|'\\' * / | !("\\" | "\'"))* "\'"; public TerminalRule getSTRINGRule() { return gaTerminals.getSTRINGRule(); } //terminal ML_COMMENT: // "/ *"->"* /"; public TerminalRule getML_COMMENTRule() { return gaTerminals.getML_COMMENTRule(); } //terminal SL_COMMENT: // "//" !("\n" | "\r")* ("\r"? "\n")?; public TerminalRule getSL_COMMENTRule() { return gaTerminals.getSL_COMMENTRule(); } //terminal WS: // (" " | "\t" | "\r" | "\n")+; public TerminalRule getWSRule() { return gaTerminals.getWSRule(); } //terminal ANY_OTHER: // .; public TerminalRule getANY_OTHERRule() { return gaTerminals.getANY_OTHERRule(); } } <file_sep>/* * generated by Xtext */ package org.xtext.example.mydsl.serializer; import com.google.inject.Inject; import com.google.inject.Provider; import org.eclipse.emf.ecore.EObject; import org.eclipse.xtext.serializer.acceptor.ISemanticSequenceAcceptor; import org.eclipse.xtext.serializer.acceptor.SequenceFeeder; import org.eclipse.xtext.serializer.diagnostic.ISemanticSequencerDiagnosticProvider; import org.eclipse.xtext.serializer.diagnostic.ISerializationDiagnostic.Acceptor; import org.eclipse.xtext.serializer.sequencer.AbstractDelegatingSemanticSequencer; import org.eclipse.xtext.serializer.sequencer.GenericSequencer; import org.eclipse.xtext.serializer.sequencer.ISemanticNodeProvider.INodesForEObjectProvider; import org.eclipse.xtext.serializer.sequencer.ISemanticSequencer; import org.eclipse.xtext.serializer.sequencer.ITransientValueService; import org.eclipse.xtext.serializer.sequencer.ITransientValueService.ValueTransient; import org.xtext.example.mydsl.services.TextualVerdulerGrammarAccess; import org.xtext.example.mydsl.textualVerduler.ClienteAlDia; import org.xtext.example.mydsl.textualVerduler.ClienteConCredito; import org.xtext.example.mydsl.textualVerduler.ClienteConDeuda; import org.xtext.example.mydsl.textualVerduler.FraccionHoraLiteral; import org.xtext.example.mydsl.textualVerduler.HorarioLiteral; import org.xtext.example.mydsl.textualVerduler.HorarioNumerico; import org.xtext.example.mydsl.textualVerduler.HorasLiteral; import org.xtext.example.mydsl.textualVerduler.ItemVenta; import org.xtext.example.mydsl.textualVerduler.LapsoTiempoLiteral; import org.xtext.example.mydsl.textualVerduler.LapsoTiempoNumerico; import org.xtext.example.mydsl.textualVerduler.MedidaPesoGramos; import org.xtext.example.mydsl.textualVerduler.MedidaPesoKilo; import org.xtext.example.mydsl.textualVerduler.MontoDinero; import org.xtext.example.mydsl.textualVerduler.PesoMagnitudFija; import org.xtext.example.mydsl.textualVerduler.PesoMagnitudVariable; import org.xtext.example.mydsl.textualVerduler.PrecioPorPeso; import org.xtext.example.mydsl.textualVerduler.Producto; import org.xtext.example.mydsl.textualVerduler.Tarea; import org.xtext.example.mydsl.textualVerduler.TareaArqueoCaja; import org.xtext.example.mydsl.textualVerduler.TareaLimpiezaLocal; import org.xtext.example.mydsl.textualVerduler.TareaRevisionProductos; import org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage; import org.xtext.example.mydsl.textualVerduler.Venta; import org.xtext.example.mydsl.textualVerduler.Verduleria; @SuppressWarnings("all") public class TextualVerdulerSemanticSequencer extends AbstractDelegatingSemanticSequencer { @Inject private TextualVerdulerGrammarAccess grammarAccess; @Override public void createSequence(EObject context, EObject semanticObject) { if(semanticObject.eClass().getEPackage() == TextualVerdulerPackage.eINSTANCE) switch(semanticObject.eClass().getClassifierID()) { case TextualVerdulerPackage.CLIENTE_AL_DIA: sequence_ClienteAlDia(context, (ClienteAlDia) semanticObject); return; case TextualVerdulerPackage.CLIENTE_CON_CREDITO: sequence_ClienteConCredito(context, (ClienteConCredito) semanticObject); return; case TextualVerdulerPackage.CLIENTE_CON_DEUDA: sequence_ClienteConDeuda(context, (ClienteConDeuda) semanticObject); return; case TextualVerdulerPackage.FRACCION_HORA_LITERAL: sequence_FraccionHoraLiteral(context, (FraccionHoraLiteral) semanticObject); return; case TextualVerdulerPackage.HORARIO_LITERAL: sequence_HorarioLiteral(context, (HorarioLiteral) semanticObject); return; case TextualVerdulerPackage.HORARIO_NUMERICO: sequence_HorarioNumerico(context, (HorarioNumerico) semanticObject); return; case TextualVerdulerPackage.HORAS_LITERAL: sequence_HorasLiteral(context, (HorasLiteral) semanticObject); return; case TextualVerdulerPackage.ITEM_VENTA: sequence_ItemVenta(context, (ItemVenta) semanticObject); return; case TextualVerdulerPackage.LAPSO_TIEMPO_LITERAL: sequence_LapsoTiempoLiteral(context, (LapsoTiempoLiteral) semanticObject); return; case TextualVerdulerPackage.LAPSO_TIEMPO_NUMERICO: sequence_LapsoTiempoNumerico(context, (LapsoTiempoNumerico) semanticObject); return; case TextualVerdulerPackage.MEDIDA_PESO_GRAMOS: sequence_MedidaPesoGramos(context, (MedidaPesoGramos) semanticObject); return; case TextualVerdulerPackage.MEDIDA_PESO_KILO: sequence_MedidaPesoKilo(context, (MedidaPesoKilo) semanticObject); return; case TextualVerdulerPackage.MONTO_DINERO: sequence_MontoDinero(context, (MontoDinero) semanticObject); return; case TextualVerdulerPackage.PESO_MAGNITUD_FIJA: sequence_PesoMagnitudFija(context, (PesoMagnitudFija) semanticObject); return; case TextualVerdulerPackage.PESO_MAGNITUD_VARIABLE: sequence_PesoMagnitudVariable(context, (PesoMagnitudVariable) semanticObject); return; case TextualVerdulerPackage.PRECIO_POR_PESO: sequence_PrecioPorPeso(context, (PrecioPorPeso) semanticObject); return; case TextualVerdulerPackage.PRODUCTO: if(context == grammarAccess.getProductoRule()) { sequence_Producto(context, (Producto) semanticObject); return; } else if(context == grammarAccess.getProductoConPrecioRule()) { sequence_Producto_ProductoConPrecio(context, (Producto) semanticObject); return; } else break; case TextualVerdulerPackage.TAREA: sequence_Tarea(context, (Tarea) semanticObject); return; case TextualVerdulerPackage.TAREA_ARQUEO_CAJA: sequence_TareaArqueoCaja(context, (TareaArqueoCaja) semanticObject); return; case TextualVerdulerPackage.TAREA_LIMPIEZA_LOCAL: sequence_TareaLimpiezaLocal(context, (TareaLimpiezaLocal) semanticObject); return; case TextualVerdulerPackage.TAREA_REVISION_PRODUCTOS: sequence_TareaRevisionProductos(context, (TareaRevisionProductos) semanticObject); return; case TextualVerdulerPackage.VENTA: sequence_Venta(context, (Venta) semanticObject); return; case TextualVerdulerPackage.VERDULERIA: sequence_Verduleria(context, (Verduleria) semanticObject); return; } if (errorAcceptor != null) errorAcceptor.accept(diagnosticProvider.createInvalidContextOrTypeDiagnostic(semanticObject, context)); } /** * Constraint: * name=ID */ protected void sequence_ClienteAlDia(EObject context, ClienteAlDia semanticObject) { if(errorAcceptor != null) { if(transientValues.isValueTransient(semanticObject, TextualVerdulerPackage.Literals.CLIENTE__NAME) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, TextualVerdulerPackage.Literals.CLIENTE__NAME)); } INodesForEObjectProvider nodes = createNodeProvider(semanticObject); SequenceFeeder feeder = createSequencerFeeder(semanticObject, nodes); feeder.accept(grammarAccess.getClienteAlDiaAccess().getNameIDTerminalRuleCall_1_0(), semanticObject.getName()); feeder.finish(); } /** * Constraint: * (name=ID montoCredito=MontoDinero) */ protected void sequence_ClienteConCredito(EObject context, ClienteConCredito semanticObject) { if(errorAcceptor != null) { if(transientValues.isValueTransient(semanticObject, TextualVerdulerPackage.Literals.CLIENTE__NAME) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, TextualVerdulerPackage.Literals.CLIENTE__NAME)); if(transientValues.isValueTransient(semanticObject, TextualVerdulerPackage.Literals.CLIENTE_CON_CREDITO__MONTO_CREDITO) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, TextualVerdulerPackage.Literals.CLIENTE_CON_CREDITO__MONTO_CREDITO)); } INodesForEObjectProvider nodes = createNodeProvider(semanticObject); SequenceFeeder feeder = createSequencerFeeder(semanticObject, nodes); feeder.accept(grammarAccess.getClienteConCreditoAccess().getNameIDTerminalRuleCall_1_0(), semanticObject.getName()); feeder.accept(grammarAccess.getClienteConCreditoAccess().getMontoCreditoMontoDineroParserRuleCall_3_0(), semanticObject.getMontoCredito()); feeder.finish(); } /** * Constraint: * (name=ID montoDeuda=MontoDinero) */ protected void sequence_ClienteConDeuda(EObject context, ClienteConDeuda semanticObject) { if(errorAcceptor != null) { if(transientValues.isValueTransient(semanticObject, TextualVerdulerPackage.Literals.CLIENTE__NAME) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, TextualVerdulerPackage.Literals.CLIENTE__NAME)); if(transientValues.isValueTransient(semanticObject, TextualVerdulerPackage.Literals.CLIENTE_CON_DEUDA__MONTO_DEUDA) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, TextualVerdulerPackage.Literals.CLIENTE_CON_DEUDA__MONTO_DEUDA)); } INodesForEObjectProvider nodes = createNodeProvider(semanticObject); SequenceFeeder feeder = createSequencerFeeder(semanticObject, nodes); feeder.accept(grammarAccess.getClienteConDeudaAccess().getNameIDTerminalRuleCall_1_0(), semanticObject.getName()); feeder.accept(grammarAccess.getClienteConDeudaAccess().getMontoDeudaMontoDineroParserRuleCall_3_0(), semanticObject.getMontoDeuda()); feeder.finish(); } /** * Constraint: * (literal='cuarto' | literal='media') */ protected void sequence_FraccionHoraLiteral(EObject context, FraccionHoraLiteral semanticObject) { genericSequencer.createSequence(context, semanticObject); } /** * Constraint: * (hora=HorasLiteral fraccionHora=FraccionHoraLiteral?) */ protected void sequence_HorarioLiteral(EObject context, HorarioLiteral semanticObject) { genericSequencer.createSequence(context, semanticObject); } /** * Constraint: * (hora=INT minutos=INT?) */ protected void sequence_HorarioNumerico(EObject context, HorarioNumerico semanticObject) { genericSequencer.createSequence(context, semanticObject); } /** * Constraint: * ( * literal='una' | * literal='dos' | * literal='tres' | * literal='cuatro' | * literal='cinco' | * literal='seis' | * literal='siete' | * literal='ocho' | * literal='nueve' | * literal='diez' | * literal='once' | * literal='doce' * ) */ protected void sequence_HorasLiteral(EObject context, HorasLiteral semanticObject) { genericSequencer.createSequence(context, semanticObject); } /** * Constraint: * (cantidad=Peso producto=[Producto|ID]) */ protected void sequence_ItemVenta(EObject context, ItemVenta semanticObject) { if(errorAcceptor != null) { if(transientValues.isValueTransient(semanticObject, TextualVerdulerPackage.Literals.ITEM_VENTA__CANTIDAD) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, TextualVerdulerPackage.Literals.ITEM_VENTA__CANTIDAD)); if(transientValues.isValueTransient(semanticObject, TextualVerdulerPackage.Literals.ITEM_VENTA__PRODUCTO) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, TextualVerdulerPackage.Literals.ITEM_VENTA__PRODUCTO)); } INodesForEObjectProvider nodes = createNodeProvider(semanticObject); SequenceFeeder feeder = createSequencerFeeder(semanticObject, nodes); feeder.accept(grammarAccess.getItemVentaAccess().getCantidadPesoParserRuleCall_0_0(), semanticObject.getCantidad()); feeder.accept(grammarAccess.getItemVentaAccess().getProductoProductoIDTerminalRuleCall_2_0_1(), semanticObject.getProducto()); feeder.finish(); } /** * Constraint: * (horas=HorasLiteral fraccionHora=FraccionHoraLiteral?) */ protected void sequence_LapsoTiempoLiteral(EObject context, LapsoTiempoLiteral semanticObject) { genericSequencer.createSequence(context, semanticObject); } /** * Constraint: * (minutos=INT | (horas=INT minutos=INT?)) */ protected void sequence_LapsoTiempoNumerico(EObject context, LapsoTiempoNumerico semanticObject) { genericSequencer.createSequence(context, semanticObject); } /** * Constraint: * {MedidaPesoGramos} */ protected void sequence_MedidaPesoGramos(EObject context, MedidaPesoGramos semanticObject) { genericSequencer.createSequence(context, semanticObject); } /** * Constraint: * {MedidaPesoKilo} */ protected void sequence_MedidaPesoKilo(EObject context, MedidaPesoKilo semanticObject) { genericSequencer.createSequence(context, semanticObject); } /** * Constraint: * precio=INT */ protected void sequence_MontoDinero(EObject context, MontoDinero semanticObject) { if(errorAcceptor != null) { if(transientValues.isValueTransient(semanticObject, TextualVerdulerPackage.Literals.MONTO_DINERO__PRECIO) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, TextualVerdulerPackage.Literals.MONTO_DINERO__PRECIO)); } INodesForEObjectProvider nodes = createNodeProvider(semanticObject); SequenceFeeder feeder = createSequencerFeeder(semanticObject, nodes); feeder.accept(grammarAccess.getMontoDineroAccess().getPrecioINTTerminalRuleCall_0_0(), semanticObject.getPrecio()); feeder.finish(); } /** * Constraint: * (pesaMedioKilo?='medio kilo' | pesaCuartoKilo?='cuarto kilo' | pesaUnKilo?='kilo') */ protected void sequence_PesoMagnitudFija(EObject context, PesoMagnitudFija semanticObject) { genericSequencer.createSequence(context, semanticObject); } /** * Constraint: * (magnitud=INT medidaPeso=MedidaPeso) */ protected void sequence_PesoMagnitudVariable(EObject context, PesoMagnitudVariable semanticObject) { if(errorAcceptor != null) { if(transientValues.isValueTransient(semanticObject, TextualVerdulerPackage.Literals.PESO_MAGNITUD_VARIABLE__MAGNITUD) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, TextualVerdulerPackage.Literals.PESO_MAGNITUD_VARIABLE__MAGNITUD)); if(transientValues.isValueTransient(semanticObject, TextualVerdulerPackage.Literals.PESO_MAGNITUD_VARIABLE__MEDIDA_PESO) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, TextualVerdulerPackage.Literals.PESO_MAGNITUD_VARIABLE__MEDIDA_PESO)); } INodesForEObjectProvider nodes = createNodeProvider(semanticObject); SequenceFeeder feeder = createSequencerFeeder(semanticObject, nodes); feeder.accept(grammarAccess.getPesoMagnitudVariableAccess().getMagnitudINTTerminalRuleCall_0_0(), semanticObject.getMagnitud()); feeder.accept(grammarAccess.getPesoMagnitudVariableAccess().getMedidaPesoMedidaPesoParserRuleCall_1_0(), semanticObject.getMedidaPeso()); feeder.finish(); } /** * Constraint: * (precio=MontoDinero (pesaje=PesoMagnitudVariable | pesaje=PesoMagnitudFija)) */ protected void sequence_PrecioPorPeso(EObject context, PrecioPorPeso semanticObject) { genericSequencer.createSequence(context, semanticObject); } /** * Constraint: * name=ID */ protected void sequence_Producto(EObject context, Producto semanticObject) { genericSequencer.createSequence(context, semanticObject); } /** * Constraint: * (name=ID precio=PrecioPorPeso) */ protected void sequence_Producto_ProductoConPrecio(EObject context, Producto semanticObject) { if(errorAcceptor != null) { if(transientValues.isValueTransient(semanticObject, TextualVerdulerPackage.Literals.PRODUCTO__PRECIO) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, TextualVerdulerPackage.Literals.PRODUCTO__PRECIO)); if(transientValues.isValueTransient(semanticObject, TextualVerdulerPackage.Literals.PRODUCTO__NAME) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, TextualVerdulerPackage.Literals.PRODUCTO__NAME)); } INodesForEObjectProvider nodes = createNodeProvider(semanticObject); SequenceFeeder feeder = createSequencerFeeder(semanticObject, nodes); feeder.accept(grammarAccess.getProductoAccess().getNameIDTerminalRuleCall_1_0(), semanticObject.getName()); feeder.accept(grammarAccess.getProductoConPrecioAccess().getPrecioPrecioPorPesoParserRuleCall_2_0(), semanticObject.getPrecio()); feeder.finish(); } /** * Constraint: * {TareaArqueoCaja} */ protected void sequence_TareaArqueoCaja(EObject context, TareaArqueoCaja semanticObject) { genericSequencer.createSequence(context, semanticObject); } /** * Constraint: * {TareaLimpiezaLocal} */ protected void sequence_TareaLimpiezaLocal(EObject context, TareaLimpiezaLocal semanticObject) { genericSequencer.createSequence(context, semanticObject); } /** * Constraint: * (productosRevisados+=[Producto|ID]* productosRevisados+=[Producto|ID]) */ protected void sequence_TareaRevisionProductos(EObject context, TareaRevisionProductos semanticObject) { genericSequencer.createSequence(context, semanticObject); } /** * Constraint: * (horario=Horario tareaRealizada=TipoTarea duracion=LapsoTiempo) */ protected void sequence_Tarea(EObject context, Tarea semanticObject) { if(errorAcceptor != null) { if(transientValues.isValueTransient(semanticObject, TextualVerdulerPackage.Literals.TAREA__HORARIO) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, TextualVerdulerPackage.Literals.TAREA__HORARIO)); if(transientValues.isValueTransient(semanticObject, TextualVerdulerPackage.Literals.TAREA__TAREA_REALIZADA) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, TextualVerdulerPackage.Literals.TAREA__TAREA_REALIZADA)); if(transientValues.isValueTransient(semanticObject, TextualVerdulerPackage.Literals.TAREA__DURACION) == ValueTransient.YES) errorAcceptor.accept(diagnosticProvider.createFeatureValueMissing(semanticObject, TextualVerdulerPackage.Literals.TAREA__DURACION)); } INodesForEObjectProvider nodes = createNodeProvider(semanticObject); SequenceFeeder feeder = createSequencerFeeder(semanticObject, nodes); feeder.accept(grammarAccess.getTareaAccess().getHorarioHorarioParserRuleCall_1_0(), semanticObject.getHorario()); feeder.accept(grammarAccess.getTareaAccess().getTareaRealizadaTipoTareaParserRuleCall_2_0(), semanticObject.getTareaRealizada()); feeder.accept(grammarAccess.getTareaAccess().getDuracionLapsoTiempoParserRuleCall_4_0(), semanticObject.getDuracion()); feeder.finish(); } /** * Constraint: * ( * comprador=[Cliente|ID] * itemsVendidos+=ItemVenta * itemsVendidos+=ItemVenta* * totalRedondeado=MontoDinero? * totalPagado=MontoDinero? * totalDebiendo=MontoDinero? * ) */ protected void sequence_Venta(EObject context, Venta semanticObject) { genericSequencer.createSequence(context, semanticObject); } /** * Constraint: * (clientes+=Cliente | productos+=ProductoConPrecio | ventas+=Venta | tareas+=Tarea)* */ protected void sequence_Verduleria(EObject context, Verduleria semanticObject) { genericSequencer.createSequence(context, semanticObject); } } <file_sep>/** */ package org.xtext.example.mydsl.textualVerduler.impl; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; import org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage; import org.xtext.example.mydsl.textualVerduler.TipoTarea; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Tipo Tarea</b></em>'. * <!-- end-user-doc --> * * @generated */ public class TipoTareaImpl extends MinimalEObjectImpl.Container implements TipoTarea { /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected TipoTareaImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return TextualVerdulerPackage.Literals.TIPO_TAREA; } } //TipoTareaImpl <file_sep>/** */ package org.xtext.example.mydsl.textualVerduler.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.xtext.example.mydsl.textualVerduler.PrecioPorPeso; import org.xtext.example.mydsl.textualVerduler.Producto; import org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Producto</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.ProductoImpl#getPrecio <em>Precio</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.ProductoImpl#getName <em>Name</em>}</li> * </ul> * * @generated */ public class ProductoImpl extends ProductoConPrecioImpl implements Producto { /** * The cached value of the '{@link #getPrecio() <em>Precio</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getPrecio() * @generated * @ordered */ protected PrecioPorPeso precio; /** * The default value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName() * @generated * @ordered */ protected static final String NAME_EDEFAULT = null; /** * The cached value of the '{@link #getName() <em>Name</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getName() * @generated * @ordered */ protected String name = NAME_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected ProductoImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return TextualVerdulerPackage.Literals.PRODUCTO; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public PrecioPorPeso getPrecio() { return precio; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetPrecio(PrecioPorPeso newPrecio, NotificationChain msgs) { PrecioPorPeso oldPrecio = precio; precio = newPrecio; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.PRODUCTO__PRECIO, oldPrecio, newPrecio); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setPrecio(PrecioPorPeso newPrecio) { if (newPrecio != precio) { NotificationChain msgs = null; if (precio != null) msgs = ((InternalEObject)precio).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.PRODUCTO__PRECIO, null, msgs); if (newPrecio != null) msgs = ((InternalEObject)newPrecio).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.PRODUCTO__PRECIO, null, msgs); msgs = basicSetPrecio(newPrecio, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.PRODUCTO__PRECIO, newPrecio, newPrecio)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getName() { return name; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setName(String newName) { String oldName = name; name = newName; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.PRODUCTO__NAME, oldName, name)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case TextualVerdulerPackage.PRODUCTO__PRECIO: return basicSetPrecio(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case TextualVerdulerPackage.PRODUCTO__PRECIO: return getPrecio(); case TextualVerdulerPackage.PRODUCTO__NAME: return getName(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case TextualVerdulerPackage.PRODUCTO__PRECIO: setPrecio((PrecioPorPeso)newValue); return; case TextualVerdulerPackage.PRODUCTO__NAME: setName((String)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case TextualVerdulerPackage.PRODUCTO__PRECIO: setPrecio((PrecioPorPeso)null); return; case TextualVerdulerPackage.PRODUCTO__NAME: setName(NAME_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case TextualVerdulerPackage.PRODUCTO__PRECIO: return precio != null; case TextualVerdulerPackage.PRODUCTO__NAME: return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name); } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (name: "); result.append(name); result.append(')'); return result.toString(); } } //ProductoImpl <file_sep># unq-obj3-s1-2015-grupo7 ## TP3 DSLs: Textual Verduler - [Enunciado](https://sites.google.com/site/programacionhm/unq/obj3-tps-2015c1)<file_sep>/** */ package org.xtext.example.mydsl.textualVerduler; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>Peso Magnitud Fija</b></em>'. * <!-- end-user-doc --> * * <p> * The following features are supported: * </p> * <ul> * <li>{@link org.xtext.example.mydsl.textualVerduler.PesoMagnitudFija#isPesaMedioKilo <em>Pesa Medio Kilo</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.PesoMagnitudFija#isPesaCuartoKilo <em>Pesa Cuarto Kilo</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.PesoMagnitudFija#isPesaUnKilo <em>Pesa Un Kilo</em>}</li> * </ul> * * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getPesoMagnitudFija() * @model * @generated */ public interface PesoMagnitudFija extends Peso { /** * Returns the value of the '<em><b>Pesa Medio Kilo</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Pesa Medio Kilo</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Pesa Medio Kilo</em>' attribute. * @see #setPesaMedioKilo(boolean) * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getPesoMagnitudFija_PesaMedioKilo() * @model * @generated */ boolean isPesaMedioKilo(); /** * Sets the value of the '{@link org.xtext.example.mydsl.textualVerduler.PesoMagnitudFija#isPesaMedioKilo <em>Pesa Medio Kilo</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Pesa Medio Kilo</em>' attribute. * @see #isPesaMedioKilo() * @generated */ void setPesaMedioKilo(boolean value); /** * Returns the value of the '<em><b>Pesa Cuarto Kilo</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Pesa Cuarto Kilo</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Pesa Cuarto Kilo</em>' attribute. * @see #setPesaCuartoKilo(boolean) * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getPesoMagnitudFija_PesaCuartoKilo() * @model * @generated */ boolean isPesaCuartoKilo(); /** * Sets the value of the '{@link org.xtext.example.mydsl.textualVerduler.PesoMagnitudFija#isPesaCuartoKilo <em>Pesa Cuarto Kilo</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Pesa Cuarto Kilo</em>' attribute. * @see #isPesaCuartoKilo() * @generated */ void setPesaCuartoKilo(boolean value); /** * Returns the value of the '<em><b>Pesa Un Kilo</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Pesa Un Kilo</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Pesa Un Kilo</em>' attribute. * @see #setPesaUnKilo(boolean) * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getPesoMagnitudFija_PesaUnKilo() * @model * @generated */ boolean isPesaUnKilo(); /** * Sets the value of the '{@link org.xtext.example.mydsl.textualVerduler.PesoMagnitudFija#isPesaUnKilo <em>Pesa Un Kilo</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Pesa Un Kilo</em>' attribute. * @see #isPesaUnKilo() * @generated */ void setPesaUnKilo(boolean value); } // PesoMagnitudFija <file_sep>/** */ package org.xtext.example.mydsl.textualVerduler; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>Peso Magnitud Variable</b></em>'. * <!-- end-user-doc --> * * <p> * The following features are supported: * </p> * <ul> * <li>{@link org.xtext.example.mydsl.textualVerduler.PesoMagnitudVariable#getMagnitud <em>Magnitud</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.PesoMagnitudVariable#getMedidaPeso <em>Medida Peso</em>}</li> * </ul> * * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getPesoMagnitudVariable() * @model * @generated */ public interface PesoMagnitudVariable extends Peso { /** * Returns the value of the '<em><b>Magnitud</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Magnitud</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Magnitud</em>' attribute. * @see #setMagnitud(int) * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getPesoMagnitudVariable_Magnitud() * @model * @generated */ int getMagnitud(); /** * Sets the value of the '{@link org.xtext.example.mydsl.textualVerduler.PesoMagnitudVariable#getMagnitud <em>Magnitud</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Magnitud</em>' attribute. * @see #getMagnitud() * @generated */ void setMagnitud(int value); /** * Returns the value of the '<em><b>Medida Peso</b></em>' containment reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Medida Peso</em>' containment reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Medida Peso</em>' containment reference. * @see #setMedidaPeso(MedidaPeso) * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getPesoMagnitudVariable_MedidaPeso() * @model containment="true" * @generated */ MedidaPeso getMedidaPeso(); /** * Sets the value of the '{@link org.xtext.example.mydsl.textualVerduler.PesoMagnitudVariable#getMedidaPeso <em>Medida Peso</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Medida Peso</em>' containment reference. * @see #getMedidaPeso() * @generated */ void setMedidaPeso(MedidaPeso value); } // PesoMagnitudVariable <file_sep>#Sun Jul 05 00:22:16 ART 2015 org.eclipse.core.runtime=2 org.eclipse.platform=4.4.2.v20150204-1700 <file_sep>/** */ package org.xtext.example.mydsl.textualVerduler.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.impl.MinimalEObjectImpl; import org.xtext.example.mydsl.textualVerduler.Horario; import org.xtext.example.mydsl.textualVerduler.LapsoTiempo; import org.xtext.example.mydsl.textualVerduler.Tarea; import org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage; import org.xtext.example.mydsl.textualVerduler.TipoTarea; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Tarea</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.TareaImpl#getHorario <em>Horario</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.TareaImpl#getTareaRealizada <em>Tarea Realizada</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.TareaImpl#getDuracion <em>Duracion</em>}</li> * </ul> * * @generated */ public class TareaImpl extends MinimalEObjectImpl.Container implements Tarea { /** * The cached value of the '{@link #getHorario() <em>Horario</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getHorario() * @generated * @ordered */ protected Horario horario; /** * The cached value of the '{@link #getTareaRealizada() <em>Tarea Realizada</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getTareaRealizada() * @generated * @ordered */ protected TipoTarea tareaRealizada; /** * The cached value of the '{@link #getDuracion() <em>Duracion</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getDuracion() * @generated * @ordered */ protected LapsoTiempo duracion; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected TareaImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return TextualVerdulerPackage.Literals.TAREA; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public Horario getHorario() { return horario; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetHorario(Horario newHorario, NotificationChain msgs) { Horario oldHorario = horario; horario = newHorario; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.TAREA__HORARIO, oldHorario, newHorario); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setHorario(Horario newHorario) { if (newHorario != horario) { NotificationChain msgs = null; if (horario != null) msgs = ((InternalEObject)horario).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.TAREA__HORARIO, null, msgs); if (newHorario != null) msgs = ((InternalEObject)newHorario).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.TAREA__HORARIO, null, msgs); msgs = basicSetHorario(newHorario, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.TAREA__HORARIO, newHorario, newHorario)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public TipoTarea getTareaRealizada() { return tareaRealizada; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetTareaRealizada(TipoTarea newTareaRealizada, NotificationChain msgs) { TipoTarea oldTareaRealizada = tareaRealizada; tareaRealizada = newTareaRealizada; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.TAREA__TAREA_REALIZADA, oldTareaRealizada, newTareaRealizada); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setTareaRealizada(TipoTarea newTareaRealizada) { if (newTareaRealizada != tareaRealizada) { NotificationChain msgs = null; if (tareaRealizada != null) msgs = ((InternalEObject)tareaRealizada).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.TAREA__TAREA_REALIZADA, null, msgs); if (newTareaRealizada != null) msgs = ((InternalEObject)newTareaRealizada).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.TAREA__TAREA_REALIZADA, null, msgs); msgs = basicSetTareaRealizada(newTareaRealizada, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.TAREA__TAREA_REALIZADA, newTareaRealizada, newTareaRealizada)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public LapsoTiempo getDuracion() { return duracion; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetDuracion(LapsoTiempo newDuracion, NotificationChain msgs) { LapsoTiempo oldDuracion = duracion; duracion = newDuracion; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.TAREA__DURACION, oldDuracion, newDuracion); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setDuracion(LapsoTiempo newDuracion) { if (newDuracion != duracion) { NotificationChain msgs = null; if (duracion != null) msgs = ((InternalEObject)duracion).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.TAREA__DURACION, null, msgs); if (newDuracion != null) msgs = ((InternalEObject)newDuracion).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.TAREA__DURACION, null, msgs); msgs = basicSetDuracion(newDuracion, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.TAREA__DURACION, newDuracion, newDuracion)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case TextualVerdulerPackage.TAREA__HORARIO: return basicSetHorario(null, msgs); case TextualVerdulerPackage.TAREA__TAREA_REALIZADA: return basicSetTareaRealizada(null, msgs); case TextualVerdulerPackage.TAREA__DURACION: return basicSetDuracion(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case TextualVerdulerPackage.TAREA__HORARIO: return getHorario(); case TextualVerdulerPackage.TAREA__TAREA_REALIZADA: return getTareaRealizada(); case TextualVerdulerPackage.TAREA__DURACION: return getDuracion(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case TextualVerdulerPackage.TAREA__HORARIO: setHorario((Horario)newValue); return; case TextualVerdulerPackage.TAREA__TAREA_REALIZADA: setTareaRealizada((TipoTarea)newValue); return; case TextualVerdulerPackage.TAREA__DURACION: setDuracion((LapsoTiempo)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case TextualVerdulerPackage.TAREA__HORARIO: setHorario((Horario)null); return; case TextualVerdulerPackage.TAREA__TAREA_REALIZADA: setTareaRealizada((TipoTarea)null); return; case TextualVerdulerPackage.TAREA__DURACION: setDuracion((LapsoTiempo)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case TextualVerdulerPackage.TAREA__HORARIO: return horario != null; case TextualVerdulerPackage.TAREA__TAREA_REALIZADA: return tareaRealizada != null; case TextualVerdulerPackage.TAREA__DURACION: return duracion != null; } return super.eIsSet(featureID); } } //TareaImpl <file_sep>#Configuration File #Sun Jul 05 00:45:50 ART 2015 org.eclipse.update.reconcile=false osgi.install.area=file\:/home/damian/Descargas/eclipse/ eclipse.p2.profile=epp.package.dsl [email protected]/workspace osgi.framework=file\:/home/damian/Descargas/eclipse/plugins/org.eclipse.osgi_3.10.2.v20150203-1939.jar equinox.use.ds=true eclipse.buildId=4.4.2.M20150204-1700 osgi.bundles=reference\:file\:/home/damian/Descargas/eclipse/plugins/org.eclipse.equinox.simpleconfigurator_1.1.0.v20131217-1203.jar@1\:start org.eclipse.equinox.simpleconfigurator.configUrl=file\:/home/damian/Descargas/eclipse/ws/.metadata/.plugins/org.eclipse.pde.core/Launch Runtime Eclipse/org.eclipse.equinox.simpleconfigurator/bundles.info osgi.configuration.cascaded=false eclipse.product=org.eclipse.platform.ide osgi.splashPath=file\:/home/damian/Descargas/eclipse/plugins/org.eclipse.platform_4.4.2.v20150204-1700 eclipse.application=org.eclipse.ui.ide.workbench [email protected]/.p2 osgi.bundles.defaultStartLevel=4 osgi.framework.extensions=reference\:file\:/home/damian/Descargas/eclipse/plugins/org.eclipse.osgi.compatibility.state_1.0.1.v20140709-1414.jar <file_sep>/** */ package org.xtext.example.mydsl.textualVerduler.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.xtext.example.mydsl.textualVerduler.MedidaPeso; import org.xtext.example.mydsl.textualVerduler.PesoMagnitudVariable; import org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Peso Magnitud Variable</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.PesoMagnitudVariableImpl#getMagnitud <em>Magnitud</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.PesoMagnitudVariableImpl#getMedidaPeso <em>Medida Peso</em>}</li> * </ul> * * @generated */ public class PesoMagnitudVariableImpl extends PesoImpl implements PesoMagnitudVariable { /** * The default value of the '{@link #getMagnitud() <em>Magnitud</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMagnitud() * @generated * @ordered */ protected static final int MAGNITUD_EDEFAULT = 0; /** * The cached value of the '{@link #getMagnitud() <em>Magnitud</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMagnitud() * @generated * @ordered */ protected int magnitud = MAGNITUD_EDEFAULT; /** * The cached value of the '{@link #getMedidaPeso() <em>Medida Peso</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMedidaPeso() * @generated * @ordered */ protected MedidaPeso medidaPeso; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected PesoMagnitudVariableImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return TextualVerdulerPackage.Literals.PESO_MAGNITUD_VARIABLE; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public int getMagnitud() { return magnitud; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setMagnitud(int newMagnitud) { int oldMagnitud = magnitud; magnitud = newMagnitud; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.PESO_MAGNITUD_VARIABLE__MAGNITUD, oldMagnitud, magnitud)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public MedidaPeso getMedidaPeso() { return medidaPeso; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetMedidaPeso(MedidaPeso newMedidaPeso, NotificationChain msgs) { MedidaPeso oldMedidaPeso = medidaPeso; medidaPeso = newMedidaPeso; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.PESO_MAGNITUD_VARIABLE__MEDIDA_PESO, oldMedidaPeso, newMedidaPeso); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setMedidaPeso(MedidaPeso newMedidaPeso) { if (newMedidaPeso != medidaPeso) { NotificationChain msgs = null; if (medidaPeso != null) msgs = ((InternalEObject)medidaPeso).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.PESO_MAGNITUD_VARIABLE__MEDIDA_PESO, null, msgs); if (newMedidaPeso != null) msgs = ((InternalEObject)newMedidaPeso).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.PESO_MAGNITUD_VARIABLE__MEDIDA_PESO, null, msgs); msgs = basicSetMedidaPeso(newMedidaPeso, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.PESO_MAGNITUD_VARIABLE__MEDIDA_PESO, newMedidaPeso, newMedidaPeso)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case TextualVerdulerPackage.PESO_MAGNITUD_VARIABLE__MEDIDA_PESO: return basicSetMedidaPeso(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case TextualVerdulerPackage.PESO_MAGNITUD_VARIABLE__MAGNITUD: return getMagnitud(); case TextualVerdulerPackage.PESO_MAGNITUD_VARIABLE__MEDIDA_PESO: return getMedidaPeso(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case TextualVerdulerPackage.PESO_MAGNITUD_VARIABLE__MAGNITUD: setMagnitud((Integer)newValue); return; case TextualVerdulerPackage.PESO_MAGNITUD_VARIABLE__MEDIDA_PESO: setMedidaPeso((MedidaPeso)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case TextualVerdulerPackage.PESO_MAGNITUD_VARIABLE__MAGNITUD: setMagnitud(MAGNITUD_EDEFAULT); return; case TextualVerdulerPackage.PESO_MAGNITUD_VARIABLE__MEDIDA_PESO: setMedidaPeso((MedidaPeso)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case TextualVerdulerPackage.PESO_MAGNITUD_VARIABLE__MAGNITUD: return magnitud != MAGNITUD_EDEFAULT; case TextualVerdulerPackage.PESO_MAGNITUD_VARIABLE__MEDIDA_PESO: return medidaPeso != null; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (magnitud: "); result.append(magnitud); result.append(')'); return result.toString(); } } //PesoMagnitudVariableImpl <file_sep>/** */ package org.xtext.example.mydsl.textualVerduler; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>Tarea Arqueo Caja</b></em>'. * <!-- end-user-doc --> * * * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getTareaArqueoCaja() * @model * @generated */ public interface TareaArqueoCaja extends TipoTarea { } // TareaArqueoCaja <file_sep>/** */ package org.xtext.example.mydsl.textualVerduler; import org.eclipse.emf.ecore.EObject; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>Precio Por Peso</b></em>'. * <!-- end-user-doc --> * * <p> * The following features are supported: * </p> * <ul> * <li>{@link org.xtext.example.mydsl.textualVerduler.PrecioPorPeso#getPrecio <em>Precio</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.PrecioPorPeso#getPesaje <em>Pesaje</em>}</li> * </ul> * * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getPrecioPorPeso() * @model * @generated */ public interface PrecioPorPeso extends EObject { /** * Returns the value of the '<em><b>Precio</b></em>' containment reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Precio</em>' containment reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Precio</em>' containment reference. * @see #setPrecio(MontoDinero) * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getPrecioPorPeso_Precio() * @model containment="true" * @generated */ MontoDinero getPrecio(); /** * Sets the value of the '{@link org.xtext.example.mydsl.textualVerduler.PrecioPorPeso#getPrecio <em>Precio</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Precio</em>' containment reference. * @see #getPrecio() * @generated */ void setPrecio(MontoDinero value); /** * Returns the value of the '<em><b>Pesaje</b></em>' containment reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Pesaje</em>' containment reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Pesaje</em>' containment reference. * @see #setPesaje(Peso) * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getPrecioPorPeso_Pesaje() * @model containment="true" * @generated */ Peso getPesaje(); /** * Sets the value of the '{@link org.xtext.example.mydsl.textualVerduler.PrecioPorPeso#getPesaje <em>Pesaje</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Pesaje</em>' containment reference. * @see #getPesaje() * @generated */ void setPesaje(Peso value); } // PrecioPorPeso <file_sep>package org.xtext.example.mydsl.ui.contentassist.antlr.internal; import java.io.InputStream; import org.eclipse.xtext.*; import org.eclipse.xtext.parser.*; import org.eclipse.xtext.parser.impl.*; import org.eclipse.emf.ecore.util.EcoreUtil; import org.eclipse.emf.ecore.EObject; import org.eclipse.xtext.parser.antlr.XtextTokenStream; import org.eclipse.xtext.parser.antlr.XtextTokenStream.HiddenTokens; import org.eclipse.xtext.ui.editor.contentassist.antlr.internal.AbstractInternalContentAssistParser; import org.eclipse.xtext.ui.editor.contentassist.antlr.internal.DFA; import org.xtext.example.mydsl.services.TextualVerdulerGrammarAccess; import org.antlr.runtime.*; import java.util.Stack; import java.util.List; import java.util.ArrayList; @SuppressWarnings("all") public class InternalTextualVerdulerParser extends AbstractInternalContentAssistParser { public static final String[] tokenNames = new String[] { "<invalid>", "<EOR>", "<DOWN>", "<UP>", "RULE_ID", "RULE_INT", "RULE_STRING", "RULE_ML_COMMENT", "RULE_SL_COMMENT", "RULE_WS", "RULE_ANY_OTHER", "'Los'", "'Las'", "'los'", "'las'", "'peso'", "'pesos'", "'pesito'", "'pesitos'", "'kilo'", "'kilos'", "'hora'", "'horas'", "'una'", "'dos'", "'tres'", "'cuatro'", "'cinco'", "'seis'", "'siete'", "'ocho'", "'nueve'", "'diez'", "'once'", "'doce'", "'cuarto'", "'media'", "'Cliente'", "', debe'", "'al dia.'", "', tiene credito'", "'salen'", "'.'", "'el'", "'un'", "'gramos'", "'compra'", "','", "'se redondea a'", "'paga'", "'queda debiendo'", "'de'", "'A las'", "', tomo'", "'y'", "':'", "'se revisaron'", "'se hizo el arqueo de caja'", "'se limpio el local'", "'minutos'", "'medio kilo'", "'cuarto kilo'" }; public static final int T__50=50; public static final int T__19=19; public static final int T__15=15; public static final int T__59=59; public static final int T__16=16; public static final int T__17=17; public static final int T__18=18; public static final int T__11=11; public static final int T__55=55; public static final int T__12=12; public static final int T__56=56; public static final int T__13=13; public static final int T__57=57; public static final int T__14=14; public static final int T__58=58; public static final int T__51=51; public static final int T__52=52; public static final int T__53=53; public static final int T__54=54; public static final int T__60=60; public static final int T__61=61; public static final int RULE_ID=4; public static final int T__26=26; public static final int T__27=27; public static final int T__28=28; public static final int RULE_INT=5; public static final int T__29=29; public static final int T__22=22; public static final int RULE_ML_COMMENT=7; public static final int T__23=23; public static final int T__24=24; public static final int T__25=25; public static final int T__20=20; public static final int T__21=21; public static final int RULE_STRING=6; public static final int RULE_SL_COMMENT=8; public static final int T__37=37; public static final int T__38=38; public static final int T__39=39; public static final int T__33=33; public static final int T__34=34; public static final int T__35=35; public static final int T__36=36; public static final int EOF=-1; public static final int T__30=30; public static final int T__31=31; public static final int T__32=32; public static final int RULE_WS=9; public static final int RULE_ANY_OTHER=10; public static final int T__48=48; public static final int T__49=49; public static final int T__44=44; public static final int T__45=45; public static final int T__46=46; public static final int T__47=47; public static final int T__40=40; public static final int T__41=41; public static final int T__42=42; public static final int T__43=43; // delegates // delegators public InternalTextualVerdulerParser(TokenStream input) { this(input, new RecognizerSharedState()); } public InternalTextualVerdulerParser(TokenStream input, RecognizerSharedState state) { super(input, state); } public String[] getTokenNames() { return InternalTextualVerdulerParser.tokenNames; } public String getGrammarFileName() { return "../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g"; } private TextualVerdulerGrammarAccess grammarAccess; public void setGrammarAccess(TextualVerdulerGrammarAccess grammarAccess) { this.grammarAccess = grammarAccess; } @Override protected Grammar getGrammar() { return grammarAccess.getGrammar(); } @Override protected String getValueForTokenName(String tokenName) { return tokenName; } // $ANTLR start "entryRuleVerduleria" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:60:1: entryRuleVerduleria : ruleVerduleria EOF ; public final void entryRuleVerduleria() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:61:1: ( ruleVerduleria EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:62:1: ruleVerduleria EOF { before(grammarAccess.getVerduleriaRule()); pushFollow(FOLLOW_ruleVerduleria_in_entryRuleVerduleria61); ruleVerduleria(); state._fsp--; after(grammarAccess.getVerduleriaRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleVerduleria68); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleVerduleria" // $ANTLR start "ruleVerduleria" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:69:1: ruleVerduleria : ( ( rule__Verduleria__Alternatives )* ) ; public final void ruleVerduleria() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:73:2: ( ( ( rule__Verduleria__Alternatives )* ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:74:1: ( ( rule__Verduleria__Alternatives )* ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:74:1: ( ( rule__Verduleria__Alternatives )* ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:75:1: ( rule__Verduleria__Alternatives )* { before(grammarAccess.getVerduleriaAccess().getAlternatives()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:76:1: ( rule__Verduleria__Alternatives )* loop1: do { int alt1=2; int LA1_0 = input.LA(1); if ( (LA1_0==RULE_ID||(LA1_0>=11 && LA1_0<=14)||LA1_0==37||LA1_0==52) ) { alt1=1; } switch (alt1) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:76:2: rule__Verduleria__Alternatives { pushFollow(FOLLOW_rule__Verduleria__Alternatives_in_ruleVerduleria94); rule__Verduleria__Alternatives(); state._fsp--; } break; default : break loop1; } } while (true); after(grammarAccess.getVerduleriaAccess().getAlternatives()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleVerduleria" // $ANTLR start "entryRuleCliente" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:88:1: entryRuleCliente : ruleCliente EOF ; public final void entryRuleCliente() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:89:1: ( ruleCliente EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:90:1: ruleCliente EOF { before(grammarAccess.getClienteRule()); pushFollow(FOLLOW_ruleCliente_in_entryRuleCliente122); ruleCliente(); state._fsp--; after(grammarAccess.getClienteRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleCliente129); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleCliente" // $ANTLR start "ruleCliente" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:97:1: ruleCliente : ( ( rule__Cliente__Alternatives ) ) ; public final void ruleCliente() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:101:2: ( ( ( rule__Cliente__Alternatives ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:102:1: ( ( rule__Cliente__Alternatives ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:102:1: ( ( rule__Cliente__Alternatives ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:103:1: ( rule__Cliente__Alternatives ) { before(grammarAccess.getClienteAccess().getAlternatives()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:104:1: ( rule__Cliente__Alternatives ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:104:2: rule__Cliente__Alternatives { pushFollow(FOLLOW_rule__Cliente__Alternatives_in_ruleCliente155); rule__Cliente__Alternatives(); state._fsp--; } after(grammarAccess.getClienteAccess().getAlternatives()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleCliente" // $ANTLR start "entryRuleClienteConDeuda" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:116:1: entryRuleClienteConDeuda : ruleClienteConDeuda EOF ; public final void entryRuleClienteConDeuda() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:117:1: ( ruleClienteConDeuda EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:118:1: ruleClienteConDeuda EOF { before(grammarAccess.getClienteConDeudaRule()); pushFollow(FOLLOW_ruleClienteConDeuda_in_entryRuleClienteConDeuda182); ruleClienteConDeuda(); state._fsp--; after(grammarAccess.getClienteConDeudaRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleClienteConDeuda189); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleClienteConDeuda" // $ANTLR start "ruleClienteConDeuda" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:125:1: ruleClienteConDeuda : ( ( rule__ClienteConDeuda__Group__0 ) ) ; public final void ruleClienteConDeuda() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:129:2: ( ( ( rule__ClienteConDeuda__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:130:1: ( ( rule__ClienteConDeuda__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:130:1: ( ( rule__ClienteConDeuda__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:131:1: ( rule__ClienteConDeuda__Group__0 ) { before(grammarAccess.getClienteConDeudaAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:132:1: ( rule__ClienteConDeuda__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:132:2: rule__ClienteConDeuda__Group__0 { pushFollow(FOLLOW_rule__ClienteConDeuda__Group__0_in_ruleClienteConDeuda215); rule__ClienteConDeuda__Group__0(); state._fsp--; } after(grammarAccess.getClienteConDeudaAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleClienteConDeuda" // $ANTLR start "entryRuleClienteAlDia" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:144:1: entryRuleClienteAlDia : ruleClienteAlDia EOF ; public final void entryRuleClienteAlDia() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:145:1: ( ruleClienteAlDia EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:146:1: ruleClienteAlDia EOF { before(grammarAccess.getClienteAlDiaRule()); pushFollow(FOLLOW_ruleClienteAlDia_in_entryRuleClienteAlDia242); ruleClienteAlDia(); state._fsp--; after(grammarAccess.getClienteAlDiaRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleClienteAlDia249); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleClienteAlDia" // $ANTLR start "ruleClienteAlDia" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:153:1: ruleClienteAlDia : ( ( rule__ClienteAlDia__Group__0 ) ) ; public final void ruleClienteAlDia() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:157:2: ( ( ( rule__ClienteAlDia__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:158:1: ( ( rule__ClienteAlDia__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:158:1: ( ( rule__ClienteAlDia__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:159:1: ( rule__ClienteAlDia__Group__0 ) { before(grammarAccess.getClienteAlDiaAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:160:1: ( rule__ClienteAlDia__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:160:2: rule__ClienteAlDia__Group__0 { pushFollow(FOLLOW_rule__ClienteAlDia__Group__0_in_ruleClienteAlDia275); rule__ClienteAlDia__Group__0(); state._fsp--; } after(grammarAccess.getClienteAlDiaAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleClienteAlDia" // $ANTLR start "entryRuleClienteConCredito" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:172:1: entryRuleClienteConCredito : ruleClienteConCredito EOF ; public final void entryRuleClienteConCredito() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:173:1: ( ruleClienteConCredito EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:174:1: ruleClienteConCredito EOF { before(grammarAccess.getClienteConCreditoRule()); pushFollow(FOLLOW_ruleClienteConCredito_in_entryRuleClienteConCredito302); ruleClienteConCredito(); state._fsp--; after(grammarAccess.getClienteConCreditoRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleClienteConCredito309); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleClienteConCredito" // $ANTLR start "ruleClienteConCredito" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:181:1: ruleClienteConCredito : ( ( rule__ClienteConCredito__Group__0 ) ) ; public final void ruleClienteConCredito() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:185:2: ( ( ( rule__ClienteConCredito__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:186:1: ( ( rule__ClienteConCredito__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:186:1: ( ( rule__ClienteConCredito__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:187:1: ( rule__ClienteConCredito__Group__0 ) { before(grammarAccess.getClienteConCreditoAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:188:1: ( rule__ClienteConCredito__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:188:2: rule__ClienteConCredito__Group__0 { pushFollow(FOLLOW_rule__ClienteConCredito__Group__0_in_ruleClienteConCredito335); rule__ClienteConCredito__Group__0(); state._fsp--; } after(grammarAccess.getClienteConCreditoAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleClienteConCredito" // $ANTLR start "entryRuleProductoConPrecio" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:200:1: entryRuleProductoConPrecio : ruleProductoConPrecio EOF ; public final void entryRuleProductoConPrecio() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:201:1: ( ruleProductoConPrecio EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:202:1: ruleProductoConPrecio EOF { before(grammarAccess.getProductoConPrecioRule()); pushFollow(FOLLOW_ruleProductoConPrecio_in_entryRuleProductoConPrecio362); ruleProductoConPrecio(); state._fsp--; after(grammarAccess.getProductoConPrecioRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleProductoConPrecio369); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleProductoConPrecio" // $ANTLR start "ruleProductoConPrecio" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:209:1: ruleProductoConPrecio : ( ( rule__ProductoConPrecio__Group__0 ) ) ; public final void ruleProductoConPrecio() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:213:2: ( ( ( rule__ProductoConPrecio__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:214:1: ( ( rule__ProductoConPrecio__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:214:1: ( ( rule__ProductoConPrecio__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:215:1: ( rule__ProductoConPrecio__Group__0 ) { before(grammarAccess.getProductoConPrecioAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:216:1: ( rule__ProductoConPrecio__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:216:2: rule__ProductoConPrecio__Group__0 { pushFollow(FOLLOW_rule__ProductoConPrecio__Group__0_in_ruleProductoConPrecio395); rule__ProductoConPrecio__Group__0(); state._fsp--; } after(grammarAccess.getProductoConPrecioAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleProductoConPrecio" // $ANTLR start "entryRuleProducto" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:228:1: entryRuleProducto : ruleProducto EOF ; public final void entryRuleProducto() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:229:1: ( ruleProducto EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:230:1: ruleProducto EOF { before(grammarAccess.getProductoRule()); pushFollow(FOLLOW_ruleProducto_in_entryRuleProducto422); ruleProducto(); state._fsp--; after(grammarAccess.getProductoRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleProducto429); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleProducto" // $ANTLR start "ruleProducto" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:237:1: ruleProducto : ( ( rule__Producto__Group__0 ) ) ; public final void ruleProducto() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:241:2: ( ( ( rule__Producto__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:242:1: ( ( rule__Producto__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:242:1: ( ( rule__Producto__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:243:1: ( rule__Producto__Group__0 ) { before(grammarAccess.getProductoAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:244:1: ( rule__Producto__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:244:2: rule__Producto__Group__0 { pushFollow(FOLLOW_rule__Producto__Group__0_in_ruleProducto455); rule__Producto__Group__0(); state._fsp--; } after(grammarAccess.getProductoAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleProducto" // $ANTLR start "entryRulePrecioPorPeso" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:256:1: entryRulePrecioPorPeso : rulePrecioPorPeso EOF ; public final void entryRulePrecioPorPeso() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:257:1: ( rulePrecioPorPeso EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:258:1: rulePrecioPorPeso EOF { before(grammarAccess.getPrecioPorPesoRule()); pushFollow(FOLLOW_rulePrecioPorPeso_in_entryRulePrecioPorPeso482); rulePrecioPorPeso(); state._fsp--; after(grammarAccess.getPrecioPorPesoRule()); match(input,EOF,FOLLOW_EOF_in_entryRulePrecioPorPeso489); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRulePrecioPorPeso" // $ANTLR start "rulePrecioPorPeso" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:265:1: rulePrecioPorPeso : ( ( rule__PrecioPorPeso__Group__0 ) ) ; public final void rulePrecioPorPeso() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:269:2: ( ( ( rule__PrecioPorPeso__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:270:1: ( ( rule__PrecioPorPeso__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:270:1: ( ( rule__PrecioPorPeso__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:271:1: ( rule__PrecioPorPeso__Group__0 ) { before(grammarAccess.getPrecioPorPesoAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:272:1: ( rule__PrecioPorPeso__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:272:2: rule__PrecioPorPeso__Group__0 { pushFollow(FOLLOW_rule__PrecioPorPeso__Group__0_in_rulePrecioPorPeso515); rule__PrecioPorPeso__Group__0(); state._fsp--; } after(grammarAccess.getPrecioPorPesoAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rulePrecioPorPeso" // $ANTLR start "entryRuleMontoDinero" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:284:1: entryRuleMontoDinero : ruleMontoDinero EOF ; public final void entryRuleMontoDinero() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:285:1: ( ruleMontoDinero EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:286:1: ruleMontoDinero EOF { before(grammarAccess.getMontoDineroRule()); pushFollow(FOLLOW_ruleMontoDinero_in_entryRuleMontoDinero542); ruleMontoDinero(); state._fsp--; after(grammarAccess.getMontoDineroRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleMontoDinero549); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleMontoDinero" // $ANTLR start "ruleMontoDinero" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:293:1: ruleMontoDinero : ( ( rule__MontoDinero__Group__0 ) ) ; public final void ruleMontoDinero() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:297:2: ( ( ( rule__MontoDinero__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:298:1: ( ( rule__MontoDinero__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:298:1: ( ( rule__MontoDinero__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:299:1: ( rule__MontoDinero__Group__0 ) { before(grammarAccess.getMontoDineroAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:300:1: ( rule__MontoDinero__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:300:2: rule__MontoDinero__Group__0 { pushFollow(FOLLOW_rule__MontoDinero__Group__0_in_ruleMontoDinero575); rule__MontoDinero__Group__0(); state._fsp--; } after(grammarAccess.getMontoDineroAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleMontoDinero" // $ANTLR start "entryRulePeso" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:312:1: entryRulePeso : rulePeso EOF ; public final void entryRulePeso() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:313:1: ( rulePeso EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:314:1: rulePeso EOF { before(grammarAccess.getPesoRule()); pushFollow(FOLLOW_rulePeso_in_entryRulePeso602); rulePeso(); state._fsp--; after(grammarAccess.getPesoRule()); match(input,EOF,FOLLOW_EOF_in_entryRulePeso609); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRulePeso" // $ANTLR start "rulePeso" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:321:1: rulePeso : ( ( rule__Peso__Alternatives ) ) ; public final void rulePeso() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:325:2: ( ( ( rule__Peso__Alternatives ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:326:1: ( ( rule__Peso__Alternatives ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:326:1: ( ( rule__Peso__Alternatives ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:327:1: ( rule__Peso__Alternatives ) { before(grammarAccess.getPesoAccess().getAlternatives()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:328:1: ( rule__Peso__Alternatives ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:328:2: rule__Peso__Alternatives { pushFollow(FOLLOW_rule__Peso__Alternatives_in_rulePeso635); rule__Peso__Alternatives(); state._fsp--; } after(grammarAccess.getPesoAccess().getAlternatives()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rulePeso" // $ANTLR start "entryRulePesoMagnitudVariable" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:340:1: entryRulePesoMagnitudVariable : rulePesoMagnitudVariable EOF ; public final void entryRulePesoMagnitudVariable() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:341:1: ( rulePesoMagnitudVariable EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:342:1: rulePesoMagnitudVariable EOF { before(grammarAccess.getPesoMagnitudVariableRule()); pushFollow(FOLLOW_rulePesoMagnitudVariable_in_entryRulePesoMagnitudVariable662); rulePesoMagnitudVariable(); state._fsp--; after(grammarAccess.getPesoMagnitudVariableRule()); match(input,EOF,FOLLOW_EOF_in_entryRulePesoMagnitudVariable669); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRulePesoMagnitudVariable" // $ANTLR start "rulePesoMagnitudVariable" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:349:1: rulePesoMagnitudVariable : ( ( rule__PesoMagnitudVariable__Group__0 ) ) ; public final void rulePesoMagnitudVariable() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:353:2: ( ( ( rule__PesoMagnitudVariable__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:354:1: ( ( rule__PesoMagnitudVariable__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:354:1: ( ( rule__PesoMagnitudVariable__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:355:1: ( rule__PesoMagnitudVariable__Group__0 ) { before(grammarAccess.getPesoMagnitudVariableAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:356:1: ( rule__PesoMagnitudVariable__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:356:2: rule__PesoMagnitudVariable__Group__0 { pushFollow(FOLLOW_rule__PesoMagnitudVariable__Group__0_in_rulePesoMagnitudVariable695); rule__PesoMagnitudVariable__Group__0(); state._fsp--; } after(grammarAccess.getPesoMagnitudVariableAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rulePesoMagnitudVariable" // $ANTLR start "entryRulePesoMagnitudFija" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:368:1: entryRulePesoMagnitudFija : rulePesoMagnitudFija EOF ; public final void entryRulePesoMagnitudFija() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:369:1: ( rulePesoMagnitudFija EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:370:1: rulePesoMagnitudFija EOF { before(grammarAccess.getPesoMagnitudFijaRule()); pushFollow(FOLLOW_rulePesoMagnitudFija_in_entryRulePesoMagnitudFija722); rulePesoMagnitudFija(); state._fsp--; after(grammarAccess.getPesoMagnitudFijaRule()); match(input,EOF,FOLLOW_EOF_in_entryRulePesoMagnitudFija729); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRulePesoMagnitudFija" // $ANTLR start "rulePesoMagnitudFija" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:377:1: rulePesoMagnitudFija : ( ( rule__PesoMagnitudFija__Alternatives ) ) ; public final void rulePesoMagnitudFija() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:381:2: ( ( ( rule__PesoMagnitudFija__Alternatives ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:382:1: ( ( rule__PesoMagnitudFija__Alternatives ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:382:1: ( ( rule__PesoMagnitudFija__Alternatives ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:383:1: ( rule__PesoMagnitudFija__Alternatives ) { before(grammarAccess.getPesoMagnitudFijaAccess().getAlternatives()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:384:1: ( rule__PesoMagnitudFija__Alternatives ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:384:2: rule__PesoMagnitudFija__Alternatives { pushFollow(FOLLOW_rule__PesoMagnitudFija__Alternatives_in_rulePesoMagnitudFija755); rule__PesoMagnitudFija__Alternatives(); state._fsp--; } after(grammarAccess.getPesoMagnitudFijaAccess().getAlternatives()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rulePesoMagnitudFija" // $ANTLR start "entryRuleMedidaPeso" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:396:1: entryRuleMedidaPeso : ruleMedidaPeso EOF ; public final void entryRuleMedidaPeso() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:397:1: ( ruleMedidaPeso EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:398:1: ruleMedidaPeso EOF { before(grammarAccess.getMedidaPesoRule()); pushFollow(FOLLOW_ruleMedidaPeso_in_entryRuleMedidaPeso782); ruleMedidaPeso(); state._fsp--; after(grammarAccess.getMedidaPesoRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleMedidaPeso789); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleMedidaPeso" // $ANTLR start "ruleMedidaPeso" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:405:1: ruleMedidaPeso : ( ( rule__MedidaPeso__Alternatives ) ) ; public final void ruleMedidaPeso() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:409:2: ( ( ( rule__MedidaPeso__Alternatives ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:410:1: ( ( rule__MedidaPeso__Alternatives ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:410:1: ( ( rule__MedidaPeso__Alternatives ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:411:1: ( rule__MedidaPeso__Alternatives ) { before(grammarAccess.getMedidaPesoAccess().getAlternatives()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:412:1: ( rule__MedidaPeso__Alternatives ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:412:2: rule__MedidaPeso__Alternatives { pushFollow(FOLLOW_rule__MedidaPeso__Alternatives_in_ruleMedidaPeso815); rule__MedidaPeso__Alternatives(); state._fsp--; } after(grammarAccess.getMedidaPesoAccess().getAlternatives()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleMedidaPeso" // $ANTLR start "entryRuleMedidaPesoGramos" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:424:1: entryRuleMedidaPesoGramos : ruleMedidaPesoGramos EOF ; public final void entryRuleMedidaPesoGramos() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:425:1: ( ruleMedidaPesoGramos EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:426:1: ruleMedidaPesoGramos EOF { before(grammarAccess.getMedidaPesoGramosRule()); pushFollow(FOLLOW_ruleMedidaPesoGramos_in_entryRuleMedidaPesoGramos842); ruleMedidaPesoGramos(); state._fsp--; after(grammarAccess.getMedidaPesoGramosRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleMedidaPesoGramos849); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleMedidaPesoGramos" // $ANTLR start "ruleMedidaPesoGramos" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:433:1: ruleMedidaPesoGramos : ( ( rule__MedidaPesoGramos__Group__0 ) ) ; public final void ruleMedidaPesoGramos() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:437:2: ( ( ( rule__MedidaPesoGramos__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:438:1: ( ( rule__MedidaPesoGramos__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:438:1: ( ( rule__MedidaPesoGramos__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:439:1: ( rule__MedidaPesoGramos__Group__0 ) { before(grammarAccess.getMedidaPesoGramosAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:440:1: ( rule__MedidaPesoGramos__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:440:2: rule__MedidaPesoGramos__Group__0 { pushFollow(FOLLOW_rule__MedidaPesoGramos__Group__0_in_ruleMedidaPesoGramos875); rule__MedidaPesoGramos__Group__0(); state._fsp--; } after(grammarAccess.getMedidaPesoGramosAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleMedidaPesoGramos" // $ANTLR start "entryRuleMedidaPesoKilo" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:452:1: entryRuleMedidaPesoKilo : ruleMedidaPesoKilo EOF ; public final void entryRuleMedidaPesoKilo() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:453:1: ( ruleMedidaPesoKilo EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:454:1: ruleMedidaPesoKilo EOF { before(grammarAccess.getMedidaPesoKiloRule()); pushFollow(FOLLOW_ruleMedidaPesoKilo_in_entryRuleMedidaPesoKilo902); ruleMedidaPesoKilo(); state._fsp--; after(grammarAccess.getMedidaPesoKiloRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleMedidaPesoKilo909); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleMedidaPesoKilo" // $ANTLR start "ruleMedidaPesoKilo" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:461:1: ruleMedidaPesoKilo : ( ( rule__MedidaPesoKilo__Group__0 ) ) ; public final void ruleMedidaPesoKilo() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:465:2: ( ( ( rule__MedidaPesoKilo__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:466:1: ( ( rule__MedidaPesoKilo__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:466:1: ( ( rule__MedidaPesoKilo__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:467:1: ( rule__MedidaPesoKilo__Group__0 ) { before(grammarAccess.getMedidaPesoKiloAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:468:1: ( rule__MedidaPesoKilo__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:468:2: rule__MedidaPesoKilo__Group__0 { pushFollow(FOLLOW_rule__MedidaPesoKilo__Group__0_in_ruleMedidaPesoKilo935); rule__MedidaPesoKilo__Group__0(); state._fsp--; } after(grammarAccess.getMedidaPesoKiloAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleMedidaPesoKilo" // $ANTLR start "entryRuleVenta" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:480:1: entryRuleVenta : ruleVenta EOF ; public final void entryRuleVenta() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:481:1: ( ruleVenta EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:482:1: ruleVenta EOF { before(grammarAccess.getVentaRule()); pushFollow(FOLLOW_ruleVenta_in_entryRuleVenta962); ruleVenta(); state._fsp--; after(grammarAccess.getVentaRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleVenta969); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleVenta" // $ANTLR start "ruleVenta" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:489:1: ruleVenta : ( ( rule__Venta__Group__0 ) ) ; public final void ruleVenta() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:493:2: ( ( ( rule__Venta__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:494:1: ( ( rule__Venta__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:494:1: ( ( rule__Venta__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:495:1: ( rule__Venta__Group__0 ) { before(grammarAccess.getVentaAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:496:1: ( rule__Venta__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:496:2: rule__Venta__Group__0 { pushFollow(FOLLOW_rule__Venta__Group__0_in_ruleVenta995); rule__Venta__Group__0(); state._fsp--; } after(grammarAccess.getVentaAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleVenta" // $ANTLR start "entryRuleItemVenta" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:508:1: entryRuleItemVenta : ruleItemVenta EOF ; public final void entryRuleItemVenta() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:509:1: ( ruleItemVenta EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:510:1: ruleItemVenta EOF { before(grammarAccess.getItemVentaRule()); pushFollow(FOLLOW_ruleItemVenta_in_entryRuleItemVenta1022); ruleItemVenta(); state._fsp--; after(grammarAccess.getItemVentaRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleItemVenta1029); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleItemVenta" // $ANTLR start "ruleItemVenta" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:517:1: ruleItemVenta : ( ( rule__ItemVenta__Group__0 ) ) ; public final void ruleItemVenta() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:521:2: ( ( ( rule__ItemVenta__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:522:1: ( ( rule__ItemVenta__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:522:1: ( ( rule__ItemVenta__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:523:1: ( rule__ItemVenta__Group__0 ) { before(grammarAccess.getItemVentaAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:524:1: ( rule__ItemVenta__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:524:2: rule__ItemVenta__Group__0 { pushFollow(FOLLOW_rule__ItemVenta__Group__0_in_ruleItemVenta1055); rule__ItemVenta__Group__0(); state._fsp--; } after(grammarAccess.getItemVentaAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleItemVenta" // $ANTLR start "entryRuleTarea" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:536:1: entryRuleTarea : ruleTarea EOF ; public final void entryRuleTarea() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:537:1: ( ruleTarea EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:538:1: ruleTarea EOF { before(grammarAccess.getTareaRule()); pushFollow(FOLLOW_ruleTarea_in_entryRuleTarea1082); ruleTarea(); state._fsp--; after(grammarAccess.getTareaRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleTarea1089); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleTarea" // $ANTLR start "ruleTarea" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:545:1: ruleTarea : ( ( rule__Tarea__Group__0 ) ) ; public final void ruleTarea() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:549:2: ( ( ( rule__Tarea__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:550:1: ( ( rule__Tarea__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:550:1: ( ( rule__Tarea__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:551:1: ( rule__Tarea__Group__0 ) { before(grammarAccess.getTareaAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:552:1: ( rule__Tarea__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:552:2: rule__Tarea__Group__0 { pushFollow(FOLLOW_rule__Tarea__Group__0_in_ruleTarea1115); rule__Tarea__Group__0(); state._fsp--; } after(grammarAccess.getTareaAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleTarea" // $ANTLR start "entryRuleHorario" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:564:1: entryRuleHorario : ruleHorario EOF ; public final void entryRuleHorario() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:565:1: ( ruleHorario EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:566:1: ruleHorario EOF { before(grammarAccess.getHorarioRule()); pushFollow(FOLLOW_ruleHorario_in_entryRuleHorario1142); ruleHorario(); state._fsp--; after(grammarAccess.getHorarioRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleHorario1149); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleHorario" // $ANTLR start "ruleHorario" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:573:1: ruleHorario : ( ( rule__Horario__Alternatives ) ) ; public final void ruleHorario() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:577:2: ( ( ( rule__Horario__Alternatives ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:578:1: ( ( rule__Horario__Alternatives ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:578:1: ( ( rule__Horario__Alternatives ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:579:1: ( rule__Horario__Alternatives ) { before(grammarAccess.getHorarioAccess().getAlternatives()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:580:1: ( rule__Horario__Alternatives ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:580:2: rule__Horario__Alternatives { pushFollow(FOLLOW_rule__Horario__Alternatives_in_ruleHorario1175); rule__Horario__Alternatives(); state._fsp--; } after(grammarAccess.getHorarioAccess().getAlternatives()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleHorario" // $ANTLR start "entryRuleHorarioLiteral" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:592:1: entryRuleHorarioLiteral : ruleHorarioLiteral EOF ; public final void entryRuleHorarioLiteral() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:593:1: ( ruleHorarioLiteral EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:594:1: ruleHorarioLiteral EOF { before(grammarAccess.getHorarioLiteralRule()); pushFollow(FOLLOW_ruleHorarioLiteral_in_entryRuleHorarioLiteral1202); ruleHorarioLiteral(); state._fsp--; after(grammarAccess.getHorarioLiteralRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleHorarioLiteral1209); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleHorarioLiteral" // $ANTLR start "ruleHorarioLiteral" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:601:1: ruleHorarioLiteral : ( ( rule__HorarioLiteral__Group__0 ) ) ; public final void ruleHorarioLiteral() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:605:2: ( ( ( rule__HorarioLiteral__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:606:1: ( ( rule__HorarioLiteral__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:606:1: ( ( rule__HorarioLiteral__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:607:1: ( rule__HorarioLiteral__Group__0 ) { before(grammarAccess.getHorarioLiteralAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:608:1: ( rule__HorarioLiteral__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:608:2: rule__HorarioLiteral__Group__0 { pushFollow(FOLLOW_rule__HorarioLiteral__Group__0_in_ruleHorarioLiteral1235); rule__HorarioLiteral__Group__0(); state._fsp--; } after(grammarAccess.getHorarioLiteralAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleHorarioLiteral" // $ANTLR start "entryRuleHorarioNumerico" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:620:1: entryRuleHorarioNumerico : ruleHorarioNumerico EOF ; public final void entryRuleHorarioNumerico() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:621:1: ( ruleHorarioNumerico EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:622:1: ruleHorarioNumerico EOF { before(grammarAccess.getHorarioNumericoRule()); pushFollow(FOLLOW_ruleHorarioNumerico_in_entryRuleHorarioNumerico1262); ruleHorarioNumerico(); state._fsp--; after(grammarAccess.getHorarioNumericoRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleHorarioNumerico1269); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleHorarioNumerico" // $ANTLR start "ruleHorarioNumerico" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:629:1: ruleHorarioNumerico : ( ( rule__HorarioNumerico__Group__0 ) ) ; public final void ruleHorarioNumerico() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:633:2: ( ( ( rule__HorarioNumerico__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:634:1: ( ( rule__HorarioNumerico__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:634:1: ( ( rule__HorarioNumerico__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:635:1: ( rule__HorarioNumerico__Group__0 ) { before(grammarAccess.getHorarioNumericoAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:636:1: ( rule__HorarioNumerico__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:636:2: rule__HorarioNumerico__Group__0 { pushFollow(FOLLOW_rule__HorarioNumerico__Group__0_in_ruleHorarioNumerico1295); rule__HorarioNumerico__Group__0(); state._fsp--; } after(grammarAccess.getHorarioNumericoAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleHorarioNumerico" // $ANTLR start "entryRuleTipoTarea" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:648:1: entryRuleTipoTarea : ruleTipoTarea EOF ; public final void entryRuleTipoTarea() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:649:1: ( ruleTipoTarea EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:650:1: ruleTipoTarea EOF { before(grammarAccess.getTipoTareaRule()); pushFollow(FOLLOW_ruleTipoTarea_in_entryRuleTipoTarea1322); ruleTipoTarea(); state._fsp--; after(grammarAccess.getTipoTareaRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleTipoTarea1329); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleTipoTarea" // $ANTLR start "ruleTipoTarea" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:657:1: ruleTipoTarea : ( ( rule__TipoTarea__Alternatives ) ) ; public final void ruleTipoTarea() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:661:2: ( ( ( rule__TipoTarea__Alternatives ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:662:1: ( ( rule__TipoTarea__Alternatives ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:662:1: ( ( rule__TipoTarea__Alternatives ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:663:1: ( rule__TipoTarea__Alternatives ) { before(grammarAccess.getTipoTareaAccess().getAlternatives()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:664:1: ( rule__TipoTarea__Alternatives ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:664:2: rule__TipoTarea__Alternatives { pushFollow(FOLLOW_rule__TipoTarea__Alternatives_in_ruleTipoTarea1355); rule__TipoTarea__Alternatives(); state._fsp--; } after(grammarAccess.getTipoTareaAccess().getAlternatives()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleTipoTarea" // $ANTLR start "entryRuleTareaRevisionProductos" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:676:1: entryRuleTareaRevisionProductos : ruleTareaRevisionProductos EOF ; public final void entryRuleTareaRevisionProductos() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:677:1: ( ruleTareaRevisionProductos EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:678:1: ruleTareaRevisionProductos EOF { before(grammarAccess.getTareaRevisionProductosRule()); pushFollow(FOLLOW_ruleTareaRevisionProductos_in_entryRuleTareaRevisionProductos1382); ruleTareaRevisionProductos(); state._fsp--; after(grammarAccess.getTareaRevisionProductosRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleTareaRevisionProductos1389); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleTareaRevisionProductos" // $ANTLR start "ruleTareaRevisionProductos" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:685:1: ruleTareaRevisionProductos : ( ( rule__TareaRevisionProductos__Group__0 ) ) ; public final void ruleTareaRevisionProductos() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:689:2: ( ( ( rule__TareaRevisionProductos__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:690:1: ( ( rule__TareaRevisionProductos__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:690:1: ( ( rule__TareaRevisionProductos__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:691:1: ( rule__TareaRevisionProductos__Group__0 ) { before(grammarAccess.getTareaRevisionProductosAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:692:1: ( rule__TareaRevisionProductos__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:692:2: rule__TareaRevisionProductos__Group__0 { pushFollow(FOLLOW_rule__TareaRevisionProductos__Group__0_in_ruleTareaRevisionProductos1415); rule__TareaRevisionProductos__Group__0(); state._fsp--; } after(grammarAccess.getTareaRevisionProductosAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleTareaRevisionProductos" // $ANTLR start "entryRuleTareaArqueoCaja" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:704:1: entryRuleTareaArqueoCaja : ruleTareaArqueoCaja EOF ; public final void entryRuleTareaArqueoCaja() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:705:1: ( ruleTareaArqueoCaja EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:706:1: ruleTareaArqueoCaja EOF { before(grammarAccess.getTareaArqueoCajaRule()); pushFollow(FOLLOW_ruleTareaArqueoCaja_in_entryRuleTareaArqueoCaja1442); ruleTareaArqueoCaja(); state._fsp--; after(grammarAccess.getTareaArqueoCajaRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleTareaArqueoCaja1449); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleTareaArqueoCaja" // $ANTLR start "ruleTareaArqueoCaja" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:713:1: ruleTareaArqueoCaja : ( ( rule__TareaArqueoCaja__Group__0 ) ) ; public final void ruleTareaArqueoCaja() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:717:2: ( ( ( rule__TareaArqueoCaja__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:718:1: ( ( rule__TareaArqueoCaja__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:718:1: ( ( rule__TareaArqueoCaja__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:719:1: ( rule__TareaArqueoCaja__Group__0 ) { before(grammarAccess.getTareaArqueoCajaAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:720:1: ( rule__TareaArqueoCaja__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:720:2: rule__TareaArqueoCaja__Group__0 { pushFollow(FOLLOW_rule__TareaArqueoCaja__Group__0_in_ruleTareaArqueoCaja1475); rule__TareaArqueoCaja__Group__0(); state._fsp--; } after(grammarAccess.getTareaArqueoCajaAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleTareaArqueoCaja" // $ANTLR start "entryRuleTareaLimpiezaLocal" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:732:1: entryRuleTareaLimpiezaLocal : ruleTareaLimpiezaLocal EOF ; public final void entryRuleTareaLimpiezaLocal() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:733:1: ( ruleTareaLimpiezaLocal EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:734:1: ruleTareaLimpiezaLocal EOF { before(grammarAccess.getTareaLimpiezaLocalRule()); pushFollow(FOLLOW_ruleTareaLimpiezaLocal_in_entryRuleTareaLimpiezaLocal1502); ruleTareaLimpiezaLocal(); state._fsp--; after(grammarAccess.getTareaLimpiezaLocalRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleTareaLimpiezaLocal1509); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleTareaLimpiezaLocal" // $ANTLR start "ruleTareaLimpiezaLocal" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:741:1: ruleTareaLimpiezaLocal : ( ( rule__TareaLimpiezaLocal__Group__0 ) ) ; public final void ruleTareaLimpiezaLocal() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:745:2: ( ( ( rule__TareaLimpiezaLocal__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:746:1: ( ( rule__TareaLimpiezaLocal__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:746:1: ( ( rule__TareaLimpiezaLocal__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:747:1: ( rule__TareaLimpiezaLocal__Group__0 ) { before(grammarAccess.getTareaLimpiezaLocalAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:748:1: ( rule__TareaLimpiezaLocal__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:748:2: rule__TareaLimpiezaLocal__Group__0 { pushFollow(FOLLOW_rule__TareaLimpiezaLocal__Group__0_in_ruleTareaLimpiezaLocal1535); rule__TareaLimpiezaLocal__Group__0(); state._fsp--; } after(grammarAccess.getTareaLimpiezaLocalAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleTareaLimpiezaLocal" // $ANTLR start "entryRuleLapsoTiempo" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:760:1: entryRuleLapsoTiempo : ruleLapsoTiempo EOF ; public final void entryRuleLapsoTiempo() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:761:1: ( ruleLapsoTiempo EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:762:1: ruleLapsoTiempo EOF { before(grammarAccess.getLapsoTiempoRule()); pushFollow(FOLLOW_ruleLapsoTiempo_in_entryRuleLapsoTiempo1562); ruleLapsoTiempo(); state._fsp--; after(grammarAccess.getLapsoTiempoRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleLapsoTiempo1569); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleLapsoTiempo" // $ANTLR start "ruleLapsoTiempo" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:769:1: ruleLapsoTiempo : ( ( rule__LapsoTiempo__Alternatives ) ) ; public final void ruleLapsoTiempo() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:773:2: ( ( ( rule__LapsoTiempo__Alternatives ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:774:1: ( ( rule__LapsoTiempo__Alternatives ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:774:1: ( ( rule__LapsoTiempo__Alternatives ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:775:1: ( rule__LapsoTiempo__Alternatives ) { before(grammarAccess.getLapsoTiempoAccess().getAlternatives()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:776:1: ( rule__LapsoTiempo__Alternatives ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:776:2: rule__LapsoTiempo__Alternatives { pushFollow(FOLLOW_rule__LapsoTiempo__Alternatives_in_ruleLapsoTiempo1595); rule__LapsoTiempo__Alternatives(); state._fsp--; } after(grammarAccess.getLapsoTiempoAccess().getAlternatives()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleLapsoTiempo" // $ANTLR start "entryRuleLapsoTiempoNumerico" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:788:1: entryRuleLapsoTiempoNumerico : ruleLapsoTiempoNumerico EOF ; public final void entryRuleLapsoTiempoNumerico() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:789:1: ( ruleLapsoTiempoNumerico EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:790:1: ruleLapsoTiempoNumerico EOF { before(grammarAccess.getLapsoTiempoNumericoRule()); pushFollow(FOLLOW_ruleLapsoTiempoNumerico_in_entryRuleLapsoTiempoNumerico1622); ruleLapsoTiempoNumerico(); state._fsp--; after(grammarAccess.getLapsoTiempoNumericoRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleLapsoTiempoNumerico1629); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleLapsoTiempoNumerico" // $ANTLR start "ruleLapsoTiempoNumerico" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:797:1: ruleLapsoTiempoNumerico : ( ( rule__LapsoTiempoNumerico__Alternatives ) ) ; public final void ruleLapsoTiempoNumerico() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:801:2: ( ( ( rule__LapsoTiempoNumerico__Alternatives ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:802:1: ( ( rule__LapsoTiempoNumerico__Alternatives ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:802:1: ( ( rule__LapsoTiempoNumerico__Alternatives ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:803:1: ( rule__LapsoTiempoNumerico__Alternatives ) { before(grammarAccess.getLapsoTiempoNumericoAccess().getAlternatives()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:804:1: ( rule__LapsoTiempoNumerico__Alternatives ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:804:2: rule__LapsoTiempoNumerico__Alternatives { pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Alternatives_in_ruleLapsoTiempoNumerico1655); rule__LapsoTiempoNumerico__Alternatives(); state._fsp--; } after(grammarAccess.getLapsoTiempoNumericoAccess().getAlternatives()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleLapsoTiempoNumerico" // $ANTLR start "entryRuleLapsoTiempoLiteral" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:816:1: entryRuleLapsoTiempoLiteral : ruleLapsoTiempoLiteral EOF ; public final void entryRuleLapsoTiempoLiteral() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:817:1: ( ruleLapsoTiempoLiteral EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:818:1: ruleLapsoTiempoLiteral EOF { before(grammarAccess.getLapsoTiempoLiteralRule()); pushFollow(FOLLOW_ruleLapsoTiempoLiteral_in_entryRuleLapsoTiempoLiteral1682); ruleLapsoTiempoLiteral(); state._fsp--; after(grammarAccess.getLapsoTiempoLiteralRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleLapsoTiempoLiteral1689); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleLapsoTiempoLiteral" // $ANTLR start "ruleLapsoTiempoLiteral" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:825:1: ruleLapsoTiempoLiteral : ( ( rule__LapsoTiempoLiteral__Group__0 ) ) ; public final void ruleLapsoTiempoLiteral() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:829:2: ( ( ( rule__LapsoTiempoLiteral__Group__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:830:1: ( ( rule__LapsoTiempoLiteral__Group__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:830:1: ( ( rule__LapsoTiempoLiteral__Group__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:831:1: ( rule__LapsoTiempoLiteral__Group__0 ) { before(grammarAccess.getLapsoTiempoLiteralAccess().getGroup()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:832:1: ( rule__LapsoTiempoLiteral__Group__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:832:2: rule__LapsoTiempoLiteral__Group__0 { pushFollow(FOLLOW_rule__LapsoTiempoLiteral__Group__0_in_ruleLapsoTiempoLiteral1715); rule__LapsoTiempoLiteral__Group__0(); state._fsp--; } after(grammarAccess.getLapsoTiempoLiteralAccess().getGroup()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleLapsoTiempoLiteral" // $ANTLR start "entryRuleHorasLiteral" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:844:1: entryRuleHorasLiteral : ruleHorasLiteral EOF ; public final void entryRuleHorasLiteral() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:845:1: ( ruleHorasLiteral EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:846:1: ruleHorasLiteral EOF { before(grammarAccess.getHorasLiteralRule()); pushFollow(FOLLOW_ruleHorasLiteral_in_entryRuleHorasLiteral1742); ruleHorasLiteral(); state._fsp--; after(grammarAccess.getHorasLiteralRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleHorasLiteral1749); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleHorasLiteral" // $ANTLR start "ruleHorasLiteral" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:853:1: ruleHorasLiteral : ( ( rule__HorasLiteral__LiteralAssignment ) ) ; public final void ruleHorasLiteral() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:857:2: ( ( ( rule__HorasLiteral__LiteralAssignment ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:858:1: ( ( rule__HorasLiteral__LiteralAssignment ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:858:1: ( ( rule__HorasLiteral__LiteralAssignment ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:859:1: ( rule__HorasLiteral__LiteralAssignment ) { before(grammarAccess.getHorasLiteralAccess().getLiteralAssignment()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:860:1: ( rule__HorasLiteral__LiteralAssignment ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:860:2: rule__HorasLiteral__LiteralAssignment { pushFollow(FOLLOW_rule__HorasLiteral__LiteralAssignment_in_ruleHorasLiteral1775); rule__HorasLiteral__LiteralAssignment(); state._fsp--; } after(grammarAccess.getHorasLiteralAccess().getLiteralAssignment()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleHorasLiteral" // $ANTLR start "entryRuleFraccionHoraLiteral" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:872:1: entryRuleFraccionHoraLiteral : ruleFraccionHoraLiteral EOF ; public final void entryRuleFraccionHoraLiteral() throws RecognitionException { try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:873:1: ( ruleFraccionHoraLiteral EOF ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:874:1: ruleFraccionHoraLiteral EOF { before(grammarAccess.getFraccionHoraLiteralRule()); pushFollow(FOLLOW_ruleFraccionHoraLiteral_in_entryRuleFraccionHoraLiteral1802); ruleFraccionHoraLiteral(); state._fsp--; after(grammarAccess.getFraccionHoraLiteralRule()); match(input,EOF,FOLLOW_EOF_in_entryRuleFraccionHoraLiteral1809); } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { } return ; } // $ANTLR end "entryRuleFraccionHoraLiteral" // $ANTLR start "ruleFraccionHoraLiteral" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:881:1: ruleFraccionHoraLiteral : ( ( rule__FraccionHoraLiteral__LiteralAssignment ) ) ; public final void ruleFraccionHoraLiteral() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:885:2: ( ( ( rule__FraccionHoraLiteral__LiteralAssignment ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:886:1: ( ( rule__FraccionHoraLiteral__LiteralAssignment ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:886:1: ( ( rule__FraccionHoraLiteral__LiteralAssignment ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:887:1: ( rule__FraccionHoraLiteral__LiteralAssignment ) { before(grammarAccess.getFraccionHoraLiteralAccess().getLiteralAssignment()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:888:1: ( rule__FraccionHoraLiteral__LiteralAssignment ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:888:2: rule__FraccionHoraLiteral__LiteralAssignment { pushFollow(FOLLOW_rule__FraccionHoraLiteral__LiteralAssignment_in_ruleFraccionHoraLiteral1835); rule__FraccionHoraLiteral__LiteralAssignment(); state._fsp--; } after(grammarAccess.getFraccionHoraLiteralAccess().getLiteralAssignment()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "ruleFraccionHoraLiteral" // $ANTLR start "rule__Verduleria__Alternatives" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:900:1: rule__Verduleria__Alternatives : ( ( ( rule__Verduleria__ClientesAssignment_0 ) ) | ( ( rule__Verduleria__ProductosAssignment_1 ) ) | ( ( rule__Verduleria__VentasAssignment_2 ) ) | ( ( rule__Verduleria__TareasAssignment_3 ) ) ); public final void rule__Verduleria__Alternatives() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:904:1: ( ( ( rule__Verduleria__ClientesAssignment_0 ) ) | ( ( rule__Verduleria__ProductosAssignment_1 ) ) | ( ( rule__Verduleria__VentasAssignment_2 ) ) | ( ( rule__Verduleria__TareasAssignment_3 ) ) ) int alt2=4; switch ( input.LA(1) ) { case 37: { alt2=1; } break; case 11: case 12: case 13: case 14: { alt2=2; } break; case RULE_ID: { alt2=3; } break; case 52: { alt2=4; } break; default: NoViableAltException nvae = new NoViableAltException("", 2, 0, input); throw nvae; } switch (alt2) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:905:1: ( ( rule__Verduleria__ClientesAssignment_0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:905:1: ( ( rule__Verduleria__ClientesAssignment_0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:906:1: ( rule__Verduleria__ClientesAssignment_0 ) { before(grammarAccess.getVerduleriaAccess().getClientesAssignment_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:907:1: ( rule__Verduleria__ClientesAssignment_0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:907:2: rule__Verduleria__ClientesAssignment_0 { pushFollow(FOLLOW_rule__Verduleria__ClientesAssignment_0_in_rule__Verduleria__Alternatives1871); rule__Verduleria__ClientesAssignment_0(); state._fsp--; } after(grammarAccess.getVerduleriaAccess().getClientesAssignment_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:911:6: ( ( rule__Verduleria__ProductosAssignment_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:911:6: ( ( rule__Verduleria__ProductosAssignment_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:912:1: ( rule__Verduleria__ProductosAssignment_1 ) { before(grammarAccess.getVerduleriaAccess().getProductosAssignment_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:913:1: ( rule__Verduleria__ProductosAssignment_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:913:2: rule__Verduleria__ProductosAssignment_1 { pushFollow(FOLLOW_rule__Verduleria__ProductosAssignment_1_in_rule__Verduleria__Alternatives1889); rule__Verduleria__ProductosAssignment_1(); state._fsp--; } after(grammarAccess.getVerduleriaAccess().getProductosAssignment_1()); } } break; case 3 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:917:6: ( ( rule__Verduleria__VentasAssignment_2 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:917:6: ( ( rule__Verduleria__VentasAssignment_2 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:918:1: ( rule__Verduleria__VentasAssignment_2 ) { before(grammarAccess.getVerduleriaAccess().getVentasAssignment_2()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:919:1: ( rule__Verduleria__VentasAssignment_2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:919:2: rule__Verduleria__VentasAssignment_2 { pushFollow(FOLLOW_rule__Verduleria__VentasAssignment_2_in_rule__Verduleria__Alternatives1907); rule__Verduleria__VentasAssignment_2(); state._fsp--; } after(grammarAccess.getVerduleriaAccess().getVentasAssignment_2()); } } break; case 4 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:923:6: ( ( rule__Verduleria__TareasAssignment_3 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:923:6: ( ( rule__Verduleria__TareasAssignment_3 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:924:1: ( rule__Verduleria__TareasAssignment_3 ) { before(grammarAccess.getVerduleriaAccess().getTareasAssignment_3()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:925:1: ( rule__Verduleria__TareasAssignment_3 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:925:2: rule__Verduleria__TareasAssignment_3 { pushFollow(FOLLOW_rule__Verduleria__TareasAssignment_3_in_rule__Verduleria__Alternatives1925); rule__Verduleria__TareasAssignment_3(); state._fsp--; } after(grammarAccess.getVerduleriaAccess().getTareasAssignment_3()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Verduleria__Alternatives" // $ANTLR start "rule__Cliente__Alternatives" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:934:1: rule__Cliente__Alternatives : ( ( ruleClienteConDeuda ) | ( ruleClienteAlDia ) | ( ruleClienteConCredito ) ); public final void rule__Cliente__Alternatives() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:938:1: ( ( ruleClienteConDeuda ) | ( ruleClienteAlDia ) | ( ruleClienteConCredito ) ) int alt3=3; int LA3_0 = input.LA(1); if ( (LA3_0==37) ) { int LA3_1 = input.LA(2); if ( (LA3_1==RULE_ID) ) { switch ( input.LA(3) ) { case 40: { alt3=3; } break; case 38: { alt3=1; } break; case 39: { alt3=2; } break; default: NoViableAltException nvae = new NoViableAltException("", 3, 2, input); throw nvae; } } else { NoViableAltException nvae = new NoViableAltException("", 3, 1, input); throw nvae; } } else { NoViableAltException nvae = new NoViableAltException("", 3, 0, input); throw nvae; } switch (alt3) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:939:1: ( ruleClienteConDeuda ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:939:1: ( ruleClienteConDeuda ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:940:1: ruleClienteConDeuda { before(grammarAccess.getClienteAccess().getClienteConDeudaParserRuleCall_0()); pushFollow(FOLLOW_ruleClienteConDeuda_in_rule__Cliente__Alternatives1958); ruleClienteConDeuda(); state._fsp--; after(grammarAccess.getClienteAccess().getClienteConDeudaParserRuleCall_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:945:6: ( ruleClienteAlDia ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:945:6: ( ruleClienteAlDia ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:946:1: ruleClienteAlDia { before(grammarAccess.getClienteAccess().getClienteAlDiaParserRuleCall_1()); pushFollow(FOLLOW_ruleClienteAlDia_in_rule__Cliente__Alternatives1975); ruleClienteAlDia(); state._fsp--; after(grammarAccess.getClienteAccess().getClienteAlDiaParserRuleCall_1()); } } break; case 3 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:951:6: ( ruleClienteConCredito ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:951:6: ( ruleClienteConCredito ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:952:1: ruleClienteConCredito { before(grammarAccess.getClienteAccess().getClienteConCreditoParserRuleCall_2()); pushFollow(FOLLOW_ruleClienteConCredito_in_rule__Cliente__Alternatives1992); ruleClienteConCredito(); state._fsp--; after(grammarAccess.getClienteAccess().getClienteConCreditoParserRuleCall_2()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Cliente__Alternatives" // $ANTLR start "rule__Producto__Alternatives_0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:962:1: rule__Producto__Alternatives_0 : ( ( 'Los' ) | ( 'Las' ) | ( 'los' ) | ( 'las' ) ); public final void rule__Producto__Alternatives_0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:966:1: ( ( 'Los' ) | ( 'Las' ) | ( 'los' ) | ( 'las' ) ) int alt4=4; switch ( input.LA(1) ) { case 11: { alt4=1; } break; case 12: { alt4=2; } break; case 13: { alt4=3; } break; case 14: { alt4=4; } break; default: NoViableAltException nvae = new NoViableAltException("", 4, 0, input); throw nvae; } switch (alt4) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:967:1: ( 'Los' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:967:1: ( 'Los' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:968:1: 'Los' { before(grammarAccess.getProductoAccess().getLosKeyword_0_0()); match(input,11,FOLLOW_11_in_rule__Producto__Alternatives_02025); after(grammarAccess.getProductoAccess().getLosKeyword_0_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:975:6: ( 'Las' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:975:6: ( 'Las' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:976:1: 'Las' { before(grammarAccess.getProductoAccess().getLasKeyword_0_1()); match(input,12,FOLLOW_12_in_rule__Producto__Alternatives_02045); after(grammarAccess.getProductoAccess().getLasKeyword_0_1()); } } break; case 3 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:983:6: ( 'los' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:983:6: ( 'los' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:984:1: 'los' { before(grammarAccess.getProductoAccess().getLosKeyword_0_2()); match(input,13,FOLLOW_13_in_rule__Producto__Alternatives_02065); after(grammarAccess.getProductoAccess().getLosKeyword_0_2()); } } break; case 4 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:991:6: ( 'las' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:991:6: ( 'las' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:992:1: 'las' { before(grammarAccess.getProductoAccess().getLasKeyword_0_3()); match(input,14,FOLLOW_14_in_rule__Producto__Alternatives_02085); after(grammarAccess.getProductoAccess().getLasKeyword_0_3()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Producto__Alternatives_0" // $ANTLR start "rule__PrecioPorPeso__Alternatives_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1004:1: rule__PrecioPorPeso__Alternatives_1 : ( ( ( rule__PrecioPorPeso__Group_1_0__0 ) ) | ( ( rule__PrecioPorPeso__Group_1_1__0 ) ) ); public final void rule__PrecioPorPeso__Alternatives_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1008:1: ( ( ( rule__PrecioPorPeso__Group_1_0__0 ) ) | ( ( rule__PrecioPorPeso__Group_1_1__0 ) ) ) int alt5=2; int LA5_0 = input.LA(1); if ( (LA5_0==13) ) { alt5=1; } else if ( (LA5_0==43) ) { alt5=2; } else { NoViableAltException nvae = new NoViableAltException("", 5, 0, input); throw nvae; } switch (alt5) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1009:1: ( ( rule__PrecioPorPeso__Group_1_0__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1009:1: ( ( rule__PrecioPorPeso__Group_1_0__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1010:1: ( rule__PrecioPorPeso__Group_1_0__0 ) { before(grammarAccess.getPrecioPorPesoAccess().getGroup_1_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1011:1: ( rule__PrecioPorPeso__Group_1_0__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1011:2: rule__PrecioPorPeso__Group_1_0__0 { pushFollow(FOLLOW_rule__PrecioPorPeso__Group_1_0__0_in_rule__PrecioPorPeso__Alternatives_12119); rule__PrecioPorPeso__Group_1_0__0(); state._fsp--; } after(grammarAccess.getPrecioPorPesoAccess().getGroup_1_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1015:6: ( ( rule__PrecioPorPeso__Group_1_1__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1015:6: ( ( rule__PrecioPorPeso__Group_1_1__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1016:1: ( rule__PrecioPorPeso__Group_1_1__0 ) { before(grammarAccess.getPrecioPorPesoAccess().getGroup_1_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1017:1: ( rule__PrecioPorPeso__Group_1_1__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1017:2: rule__PrecioPorPeso__Group_1_1__0 { pushFollow(FOLLOW_rule__PrecioPorPeso__Group_1_1__0_in_rule__PrecioPorPeso__Alternatives_12137); rule__PrecioPorPeso__Group_1_1__0(); state._fsp--; } after(grammarAccess.getPrecioPorPesoAccess().getGroup_1_1()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PrecioPorPeso__Alternatives_1" // $ANTLR start "rule__MontoDinero__Alternatives_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1026:1: rule__MontoDinero__Alternatives_1 : ( ( 'peso' ) | ( 'pesos' ) | ( 'pesito' ) | ( 'pesitos' ) ); public final void rule__MontoDinero__Alternatives_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1030:1: ( ( 'peso' ) | ( 'pesos' ) | ( 'pesito' ) | ( 'pesitos' ) ) int alt6=4; switch ( input.LA(1) ) { case 15: { alt6=1; } break; case 16: { alt6=2; } break; case 17: { alt6=3; } break; case 18: { alt6=4; } break; default: NoViableAltException nvae = new NoViableAltException("", 6, 0, input); throw nvae; } switch (alt6) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1031:1: ( 'peso' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1031:1: ( 'peso' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1032:1: 'peso' { before(grammarAccess.getMontoDineroAccess().getPesoKeyword_1_0()); match(input,15,FOLLOW_15_in_rule__MontoDinero__Alternatives_12171); after(grammarAccess.getMontoDineroAccess().getPesoKeyword_1_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1039:6: ( 'pesos' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1039:6: ( 'pesos' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1040:1: 'pesos' { before(grammarAccess.getMontoDineroAccess().getPesosKeyword_1_1()); match(input,16,FOLLOW_16_in_rule__MontoDinero__Alternatives_12191); after(grammarAccess.getMontoDineroAccess().getPesosKeyword_1_1()); } } break; case 3 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1047:6: ( 'pesito' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1047:6: ( 'pesito' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1048:1: 'pesito' { before(grammarAccess.getMontoDineroAccess().getPesitoKeyword_1_2()); match(input,17,FOLLOW_17_in_rule__MontoDinero__Alternatives_12211); after(grammarAccess.getMontoDineroAccess().getPesitoKeyword_1_2()); } } break; case 4 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1055:6: ( 'pesitos' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1055:6: ( 'pesitos' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1056:1: 'pesitos' { before(grammarAccess.getMontoDineroAccess().getPesitosKeyword_1_3()); match(input,18,FOLLOW_18_in_rule__MontoDinero__Alternatives_12231); after(grammarAccess.getMontoDineroAccess().getPesitosKeyword_1_3()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__MontoDinero__Alternatives_1" // $ANTLR start "rule__Peso__Alternatives" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1068:1: rule__Peso__Alternatives : ( ( rulePesoMagnitudVariable ) | ( rulePesoMagnitudFija ) ); public final void rule__Peso__Alternatives() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1072:1: ( ( rulePesoMagnitudVariable ) | ( rulePesoMagnitudFija ) ) int alt7=2; int LA7_0 = input.LA(1); if ( (LA7_0==RULE_INT) ) { alt7=1; } else if ( (LA7_0==19||LA7_0==44||(LA7_0>=60 && LA7_0<=61)) ) { alt7=2; } else { NoViableAltException nvae = new NoViableAltException("", 7, 0, input); throw nvae; } switch (alt7) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1073:1: ( rulePesoMagnitudVariable ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1073:1: ( rulePesoMagnitudVariable ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1074:1: rulePesoMagnitudVariable { before(grammarAccess.getPesoAccess().getPesoMagnitudVariableParserRuleCall_0()); pushFollow(FOLLOW_rulePesoMagnitudVariable_in_rule__Peso__Alternatives2265); rulePesoMagnitudVariable(); state._fsp--; after(grammarAccess.getPesoAccess().getPesoMagnitudVariableParserRuleCall_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1079:6: ( rulePesoMagnitudFija ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1079:6: ( rulePesoMagnitudFija ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1080:1: rulePesoMagnitudFija { before(grammarAccess.getPesoAccess().getPesoMagnitudFijaParserRuleCall_1()); pushFollow(FOLLOW_rulePesoMagnitudFija_in_rule__Peso__Alternatives2282); rulePesoMagnitudFija(); state._fsp--; after(grammarAccess.getPesoAccess().getPesoMagnitudFijaParserRuleCall_1()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Peso__Alternatives" // $ANTLR start "rule__PesoMagnitudFija__Alternatives" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1090:1: rule__PesoMagnitudFija__Alternatives : ( ( ( rule__PesoMagnitudFija__PesaMedioKiloAssignment_0 ) ) | ( ( rule__PesoMagnitudFija__Group_1__0 ) ) | ( ( rule__PesoMagnitudFija__Group_2__0 ) ) ); public final void rule__PesoMagnitudFija__Alternatives() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1094:1: ( ( ( rule__PesoMagnitudFija__PesaMedioKiloAssignment_0 ) ) | ( ( rule__PesoMagnitudFija__Group_1__0 ) ) | ( ( rule__PesoMagnitudFija__Group_2__0 ) ) ) int alt8=3; switch ( input.LA(1) ) { case 60: { alt8=1; } break; case 44: { int LA8_2 = input.LA(2); if ( (LA8_2==61) ) { alt8=2; } else if ( (LA8_2==19) ) { alt8=3; } else { NoViableAltException nvae = new NoViableAltException("", 8, 2, input); throw nvae; } } break; case 61: { alt8=2; } break; case 19: { alt8=3; } break; default: NoViableAltException nvae = new NoViableAltException("", 8, 0, input); throw nvae; } switch (alt8) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1095:1: ( ( rule__PesoMagnitudFija__PesaMedioKiloAssignment_0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1095:1: ( ( rule__PesoMagnitudFija__PesaMedioKiloAssignment_0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1096:1: ( rule__PesoMagnitudFija__PesaMedioKiloAssignment_0 ) { before(grammarAccess.getPesoMagnitudFijaAccess().getPesaMedioKiloAssignment_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1097:1: ( rule__PesoMagnitudFija__PesaMedioKiloAssignment_0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1097:2: rule__PesoMagnitudFija__PesaMedioKiloAssignment_0 { pushFollow(FOLLOW_rule__PesoMagnitudFija__PesaMedioKiloAssignment_0_in_rule__PesoMagnitudFija__Alternatives2314); rule__PesoMagnitudFija__PesaMedioKiloAssignment_0(); state._fsp--; } after(grammarAccess.getPesoMagnitudFijaAccess().getPesaMedioKiloAssignment_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1101:6: ( ( rule__PesoMagnitudFija__Group_1__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1101:6: ( ( rule__PesoMagnitudFija__Group_1__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1102:1: ( rule__PesoMagnitudFija__Group_1__0 ) { before(grammarAccess.getPesoMagnitudFijaAccess().getGroup_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1103:1: ( rule__PesoMagnitudFija__Group_1__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1103:2: rule__PesoMagnitudFija__Group_1__0 { pushFollow(FOLLOW_rule__PesoMagnitudFija__Group_1__0_in_rule__PesoMagnitudFija__Alternatives2332); rule__PesoMagnitudFija__Group_1__0(); state._fsp--; } after(grammarAccess.getPesoMagnitudFijaAccess().getGroup_1()); } } break; case 3 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1107:6: ( ( rule__PesoMagnitudFija__Group_2__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1107:6: ( ( rule__PesoMagnitudFija__Group_2__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1108:1: ( rule__PesoMagnitudFija__Group_2__0 ) { before(grammarAccess.getPesoMagnitudFijaAccess().getGroup_2()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1109:1: ( rule__PesoMagnitudFija__Group_2__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1109:2: rule__PesoMagnitudFija__Group_2__0 { pushFollow(FOLLOW_rule__PesoMagnitudFija__Group_2__0_in_rule__PesoMagnitudFija__Alternatives2350); rule__PesoMagnitudFija__Group_2__0(); state._fsp--; } after(grammarAccess.getPesoMagnitudFijaAccess().getGroup_2()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudFija__Alternatives" // $ANTLR start "rule__MedidaPeso__Alternatives" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1118:1: rule__MedidaPeso__Alternatives : ( ( ruleMedidaPesoKilo ) | ( ruleMedidaPesoGramos ) ); public final void rule__MedidaPeso__Alternatives() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1122:1: ( ( ruleMedidaPesoKilo ) | ( ruleMedidaPesoGramos ) ) int alt9=2; int LA9_0 = input.LA(1); if ( ((LA9_0>=19 && LA9_0<=20)) ) { alt9=1; } else if ( (LA9_0==45) ) { alt9=2; } else { NoViableAltException nvae = new NoViableAltException("", 9, 0, input); throw nvae; } switch (alt9) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1123:1: ( ruleMedidaPesoKilo ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1123:1: ( ruleMedidaPesoKilo ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1124:1: ruleMedidaPesoKilo { before(grammarAccess.getMedidaPesoAccess().getMedidaPesoKiloParserRuleCall_0()); pushFollow(FOLLOW_ruleMedidaPesoKilo_in_rule__MedidaPeso__Alternatives2383); ruleMedidaPesoKilo(); state._fsp--; after(grammarAccess.getMedidaPesoAccess().getMedidaPesoKiloParserRuleCall_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1129:6: ( ruleMedidaPesoGramos ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1129:6: ( ruleMedidaPesoGramos ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1130:1: ruleMedidaPesoGramos { before(grammarAccess.getMedidaPesoAccess().getMedidaPesoGramosParserRuleCall_1()); pushFollow(FOLLOW_ruleMedidaPesoGramos_in_rule__MedidaPeso__Alternatives2400); ruleMedidaPesoGramos(); state._fsp--; after(grammarAccess.getMedidaPesoAccess().getMedidaPesoGramosParserRuleCall_1()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__MedidaPeso__Alternatives" // $ANTLR start "rule__MedidaPesoKilo__Alternatives_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1140:1: rule__MedidaPesoKilo__Alternatives_1 : ( ( 'kilo' ) | ( 'kilos' ) ); public final void rule__MedidaPesoKilo__Alternatives_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1144:1: ( ( 'kilo' ) | ( 'kilos' ) ) int alt10=2; int LA10_0 = input.LA(1); if ( (LA10_0==19) ) { alt10=1; } else if ( (LA10_0==20) ) { alt10=2; } else { NoViableAltException nvae = new NoViableAltException("", 10, 0, input); throw nvae; } switch (alt10) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1145:1: ( 'kilo' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1145:1: ( 'kilo' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1146:1: 'kilo' { before(grammarAccess.getMedidaPesoKiloAccess().getKiloKeyword_1_0()); match(input,19,FOLLOW_19_in_rule__MedidaPesoKilo__Alternatives_12433); after(grammarAccess.getMedidaPesoKiloAccess().getKiloKeyword_1_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1153:6: ( 'kilos' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1153:6: ( 'kilos' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1154:1: 'kilos' { before(grammarAccess.getMedidaPesoKiloAccess().getKilosKeyword_1_1()); match(input,20,FOLLOW_20_in_rule__MedidaPesoKilo__Alternatives_12453); after(grammarAccess.getMedidaPesoKiloAccess().getKilosKeyword_1_1()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__MedidaPesoKilo__Alternatives_1" // $ANTLR start "rule__Horario__Alternatives" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1166:1: rule__Horario__Alternatives : ( ( ruleHorarioNumerico ) | ( ruleHorarioLiteral ) ); public final void rule__Horario__Alternatives() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1170:1: ( ( ruleHorarioNumerico ) | ( ruleHorarioLiteral ) ) int alt11=2; int LA11_0 = input.LA(1); if ( (LA11_0==RULE_INT) ) { alt11=1; } else if ( ((LA11_0>=23 && LA11_0<=34)) ) { alt11=2; } else { NoViableAltException nvae = new NoViableAltException("", 11, 0, input); throw nvae; } switch (alt11) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1171:1: ( ruleHorarioNumerico ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1171:1: ( ruleHorarioNumerico ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1172:1: ruleHorarioNumerico { before(grammarAccess.getHorarioAccess().getHorarioNumericoParserRuleCall_0()); pushFollow(FOLLOW_ruleHorarioNumerico_in_rule__Horario__Alternatives2487); ruleHorarioNumerico(); state._fsp--; after(grammarAccess.getHorarioAccess().getHorarioNumericoParserRuleCall_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1177:6: ( ruleHorarioLiteral ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1177:6: ( ruleHorarioLiteral ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1178:1: ruleHorarioLiteral { before(grammarAccess.getHorarioAccess().getHorarioLiteralParserRuleCall_1()); pushFollow(FOLLOW_ruleHorarioLiteral_in_rule__Horario__Alternatives2504); ruleHorarioLiteral(); state._fsp--; after(grammarAccess.getHorarioAccess().getHorarioLiteralParserRuleCall_1()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Horario__Alternatives" // $ANTLR start "rule__TipoTarea__Alternatives" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1188:1: rule__TipoTarea__Alternatives : ( ( ruleTareaLimpiezaLocal ) | ( ruleTareaArqueoCaja ) | ( ruleTareaRevisionProductos ) ); public final void rule__TipoTarea__Alternatives() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1192:1: ( ( ruleTareaLimpiezaLocal ) | ( ruleTareaArqueoCaja ) | ( ruleTareaRevisionProductos ) ) int alt12=3; switch ( input.LA(1) ) { case 58: { alt12=1; } break; case 57: { alt12=2; } break; case 56: { alt12=3; } break; default: NoViableAltException nvae = new NoViableAltException("", 12, 0, input); throw nvae; } switch (alt12) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1193:1: ( ruleTareaLimpiezaLocal ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1193:1: ( ruleTareaLimpiezaLocal ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1194:1: ruleTareaLimpiezaLocal { before(grammarAccess.getTipoTareaAccess().getTareaLimpiezaLocalParserRuleCall_0()); pushFollow(FOLLOW_ruleTareaLimpiezaLocal_in_rule__TipoTarea__Alternatives2536); ruleTareaLimpiezaLocal(); state._fsp--; after(grammarAccess.getTipoTareaAccess().getTareaLimpiezaLocalParserRuleCall_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1199:6: ( ruleTareaArqueoCaja ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1199:6: ( ruleTareaArqueoCaja ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1200:1: ruleTareaArqueoCaja { before(grammarAccess.getTipoTareaAccess().getTareaArqueoCajaParserRuleCall_1()); pushFollow(FOLLOW_ruleTareaArqueoCaja_in_rule__TipoTarea__Alternatives2553); ruleTareaArqueoCaja(); state._fsp--; after(grammarAccess.getTipoTareaAccess().getTareaArqueoCajaParserRuleCall_1()); } } break; case 3 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1205:6: ( ruleTareaRevisionProductos ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1205:6: ( ruleTareaRevisionProductos ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1206:1: ruleTareaRevisionProductos { before(grammarAccess.getTipoTareaAccess().getTareaRevisionProductosParserRuleCall_2()); pushFollow(FOLLOW_ruleTareaRevisionProductos_in_rule__TipoTarea__Alternatives2570); ruleTareaRevisionProductos(); state._fsp--; after(grammarAccess.getTipoTareaAccess().getTareaRevisionProductosParserRuleCall_2()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TipoTarea__Alternatives" // $ANTLR start "rule__TareaRevisionProductos__Alternatives_1_0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1216:1: rule__TareaRevisionProductos__Alternatives_1_0 : ( ( 'las' ) | ( 'los' ) ); public final void rule__TareaRevisionProductos__Alternatives_1_0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1220:1: ( ( 'las' ) | ( 'los' ) ) int alt13=2; int LA13_0 = input.LA(1); if ( (LA13_0==14) ) { alt13=1; } else if ( (LA13_0==13) ) { alt13=2; } else { NoViableAltException nvae = new NoViableAltException("", 13, 0, input); throw nvae; } switch (alt13) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1221:1: ( 'las' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1221:1: ( 'las' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1222:1: 'las' { before(grammarAccess.getTareaRevisionProductosAccess().getLasKeyword_1_0_0()); match(input,14,FOLLOW_14_in_rule__TareaRevisionProductos__Alternatives_1_02603); after(grammarAccess.getTareaRevisionProductosAccess().getLasKeyword_1_0_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1229:6: ( 'los' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1229:6: ( 'los' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1230:1: 'los' { before(grammarAccess.getTareaRevisionProductosAccess().getLosKeyword_1_0_1()); match(input,13,FOLLOW_13_in_rule__TareaRevisionProductos__Alternatives_1_02623); after(grammarAccess.getTareaRevisionProductosAccess().getLosKeyword_1_0_1()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__Alternatives_1_0" // $ANTLR start "rule__TareaRevisionProductos__Alternatives_2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1242:1: rule__TareaRevisionProductos__Alternatives_2 : ( ( 'las' ) | ( 'los' ) ); public final void rule__TareaRevisionProductos__Alternatives_2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1246:1: ( ( 'las' ) | ( 'los' ) ) int alt14=2; int LA14_0 = input.LA(1); if ( (LA14_0==14) ) { alt14=1; } else if ( (LA14_0==13) ) { alt14=2; } else { NoViableAltException nvae = new NoViableAltException("", 14, 0, input); throw nvae; } switch (alt14) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1247:1: ( 'las' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1247:1: ( 'las' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1248:1: 'las' { before(grammarAccess.getTareaRevisionProductosAccess().getLasKeyword_2_0()); match(input,14,FOLLOW_14_in_rule__TareaRevisionProductos__Alternatives_22658); after(grammarAccess.getTareaRevisionProductosAccess().getLasKeyword_2_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1255:6: ( 'los' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1255:6: ( 'los' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1256:1: 'los' { before(grammarAccess.getTareaRevisionProductosAccess().getLosKeyword_2_1()); match(input,13,FOLLOW_13_in_rule__TareaRevisionProductos__Alternatives_22678); after(grammarAccess.getTareaRevisionProductosAccess().getLosKeyword_2_1()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__Alternatives_2" // $ANTLR start "rule__LapsoTiempo__Alternatives" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1268:1: rule__LapsoTiempo__Alternatives : ( ( ruleLapsoTiempoLiteral ) | ( ruleLapsoTiempoNumerico ) ); public final void rule__LapsoTiempo__Alternatives() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1272:1: ( ( ruleLapsoTiempoLiteral ) | ( ruleLapsoTiempoNumerico ) ) int alt15=2; int LA15_0 = input.LA(1); if ( ((LA15_0>=23 && LA15_0<=34)) ) { alt15=1; } else if ( (LA15_0==RULE_INT) ) { alt15=2; } else { NoViableAltException nvae = new NoViableAltException("", 15, 0, input); throw nvae; } switch (alt15) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1273:1: ( ruleLapsoTiempoLiteral ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1273:1: ( ruleLapsoTiempoLiteral ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1274:1: ruleLapsoTiempoLiteral { before(grammarAccess.getLapsoTiempoAccess().getLapsoTiempoLiteralParserRuleCall_0()); pushFollow(FOLLOW_ruleLapsoTiempoLiteral_in_rule__LapsoTiempo__Alternatives2712); ruleLapsoTiempoLiteral(); state._fsp--; after(grammarAccess.getLapsoTiempoAccess().getLapsoTiempoLiteralParserRuleCall_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1279:6: ( ruleLapsoTiempoNumerico ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1279:6: ( ruleLapsoTiempoNumerico ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1280:1: ruleLapsoTiempoNumerico { before(grammarAccess.getLapsoTiempoAccess().getLapsoTiempoNumericoParserRuleCall_1()); pushFollow(FOLLOW_ruleLapsoTiempoNumerico_in_rule__LapsoTiempo__Alternatives2729); ruleLapsoTiempoNumerico(); state._fsp--; after(grammarAccess.getLapsoTiempoAccess().getLapsoTiempoNumericoParserRuleCall_1()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempo__Alternatives" // $ANTLR start "rule__LapsoTiempoNumerico__Alternatives" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1290:1: rule__LapsoTiempoNumerico__Alternatives : ( ( ( rule__LapsoTiempoNumerico__Group_0__0 ) ) | ( ( rule__LapsoTiempoNumerico__Group_1__0 ) ) ); public final void rule__LapsoTiempoNumerico__Alternatives() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1294:1: ( ( ( rule__LapsoTiempoNumerico__Group_0__0 ) ) | ( ( rule__LapsoTiempoNumerico__Group_1__0 ) ) ) int alt16=2; int LA16_0 = input.LA(1); if ( (LA16_0==RULE_INT) ) { int LA16_1 = input.LA(2); if ( (LA16_1==59) ) { alt16=1; } else if ( ((LA16_1>=21 && LA16_1<=22)) ) { alt16=2; } else { NoViableAltException nvae = new NoViableAltException("", 16, 1, input); throw nvae; } } else { NoViableAltException nvae = new NoViableAltException("", 16, 0, input); throw nvae; } switch (alt16) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1295:1: ( ( rule__LapsoTiempoNumerico__Group_0__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1295:1: ( ( rule__LapsoTiempoNumerico__Group_0__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1296:1: ( rule__LapsoTiempoNumerico__Group_0__0 ) { before(grammarAccess.getLapsoTiempoNumericoAccess().getGroup_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1297:1: ( rule__LapsoTiempoNumerico__Group_0__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1297:2: rule__LapsoTiempoNumerico__Group_0__0 { pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Group_0__0_in_rule__LapsoTiempoNumerico__Alternatives2761); rule__LapsoTiempoNumerico__Group_0__0(); state._fsp--; } after(grammarAccess.getLapsoTiempoNumericoAccess().getGroup_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1301:6: ( ( rule__LapsoTiempoNumerico__Group_1__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1301:6: ( ( rule__LapsoTiempoNumerico__Group_1__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1302:1: ( rule__LapsoTiempoNumerico__Group_1__0 ) { before(grammarAccess.getLapsoTiempoNumericoAccess().getGroup_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1303:1: ( rule__LapsoTiempoNumerico__Group_1__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1303:2: rule__LapsoTiempoNumerico__Group_1__0 { pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Group_1__0_in_rule__LapsoTiempoNumerico__Alternatives2779); rule__LapsoTiempoNumerico__Group_1__0(); state._fsp--; } after(grammarAccess.getLapsoTiempoNumericoAccess().getGroup_1()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Alternatives" // $ANTLR start "rule__LapsoTiempoNumerico__Alternatives_1_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1312:1: rule__LapsoTiempoNumerico__Alternatives_1_1 : ( ( 'hora' ) | ( 'horas' ) ); public final void rule__LapsoTiempoNumerico__Alternatives_1_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1316:1: ( ( 'hora' ) | ( 'horas' ) ) int alt17=2; int LA17_0 = input.LA(1); if ( (LA17_0==21) ) { alt17=1; } else if ( (LA17_0==22) ) { alt17=2; } else { NoViableAltException nvae = new NoViableAltException("", 17, 0, input); throw nvae; } switch (alt17) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1317:1: ( 'hora' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1317:1: ( 'hora' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1318:1: 'hora' { before(grammarAccess.getLapsoTiempoNumericoAccess().getHoraKeyword_1_1_0()); match(input,21,FOLLOW_21_in_rule__LapsoTiempoNumerico__Alternatives_1_12813); after(grammarAccess.getLapsoTiempoNumericoAccess().getHoraKeyword_1_1_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1325:6: ( 'horas' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1325:6: ( 'horas' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1326:1: 'horas' { before(grammarAccess.getLapsoTiempoNumericoAccess().getHorasKeyword_1_1_1()); match(input,22,FOLLOW_22_in_rule__LapsoTiempoNumerico__Alternatives_1_12833); after(grammarAccess.getLapsoTiempoNumericoAccess().getHorasKeyword_1_1_1()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Alternatives_1_1" // $ANTLR start "rule__LapsoTiempoLiteral__Alternatives_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1338:1: rule__LapsoTiempoLiteral__Alternatives_1 : ( ( 'hora' ) | ( 'horas' ) ); public final void rule__LapsoTiempoLiteral__Alternatives_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1342:1: ( ( 'hora' ) | ( 'horas' ) ) int alt18=2; int LA18_0 = input.LA(1); if ( (LA18_0==21) ) { alt18=1; } else if ( (LA18_0==22) ) { alt18=2; } else { NoViableAltException nvae = new NoViableAltException("", 18, 0, input); throw nvae; } switch (alt18) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1343:1: ( 'hora' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1343:1: ( 'hora' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1344:1: 'hora' { before(grammarAccess.getLapsoTiempoLiteralAccess().getHoraKeyword_1_0()); match(input,21,FOLLOW_21_in_rule__LapsoTiempoLiteral__Alternatives_12868); after(grammarAccess.getLapsoTiempoLiteralAccess().getHoraKeyword_1_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1351:6: ( 'horas' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1351:6: ( 'horas' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1352:1: 'horas' { before(grammarAccess.getLapsoTiempoLiteralAccess().getHorasKeyword_1_1()); match(input,22,FOLLOW_22_in_rule__LapsoTiempoLiteral__Alternatives_12888); after(grammarAccess.getLapsoTiempoLiteralAccess().getHorasKeyword_1_1()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoLiteral__Alternatives_1" // $ANTLR start "rule__HorasLiteral__LiteralAlternatives_0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1364:1: rule__HorasLiteral__LiteralAlternatives_0 : ( ( 'una' ) | ( 'dos' ) | ( 'tres' ) | ( 'cuatro' ) | ( 'cinco' ) | ( 'seis' ) | ( 'siete' ) | ( 'ocho' ) | ( 'nueve' ) | ( 'diez' ) | ( 'once' ) | ( 'doce' ) ); public final void rule__HorasLiteral__LiteralAlternatives_0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1368:1: ( ( 'una' ) | ( 'dos' ) | ( 'tres' ) | ( 'cuatro' ) | ( 'cinco' ) | ( 'seis' ) | ( 'siete' ) | ( 'ocho' ) | ( 'nueve' ) | ( 'diez' ) | ( 'once' ) | ( 'doce' ) ) int alt19=12; switch ( input.LA(1) ) { case 23: { alt19=1; } break; case 24: { alt19=2; } break; case 25: { alt19=3; } break; case 26: { alt19=4; } break; case 27: { alt19=5; } break; case 28: { alt19=6; } break; case 29: { alt19=7; } break; case 30: { alt19=8; } break; case 31: { alt19=9; } break; case 32: { alt19=10; } break; case 33: { alt19=11; } break; case 34: { alt19=12; } break; default: NoViableAltException nvae = new NoViableAltException("", 19, 0, input); throw nvae; } switch (alt19) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1369:1: ( 'una' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1369:1: ( 'una' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1370:1: 'una' { before(grammarAccess.getHorasLiteralAccess().getLiteralUnaKeyword_0_0()); match(input,23,FOLLOW_23_in_rule__HorasLiteral__LiteralAlternatives_02923); after(grammarAccess.getHorasLiteralAccess().getLiteralUnaKeyword_0_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1377:6: ( 'dos' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1377:6: ( 'dos' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1378:1: 'dos' { before(grammarAccess.getHorasLiteralAccess().getLiteralDosKeyword_0_1()); match(input,24,FOLLOW_24_in_rule__HorasLiteral__LiteralAlternatives_02943); after(grammarAccess.getHorasLiteralAccess().getLiteralDosKeyword_0_1()); } } break; case 3 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1385:6: ( 'tres' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1385:6: ( 'tres' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1386:1: 'tres' { before(grammarAccess.getHorasLiteralAccess().getLiteralTresKeyword_0_2()); match(input,25,FOLLOW_25_in_rule__HorasLiteral__LiteralAlternatives_02963); after(grammarAccess.getHorasLiteralAccess().getLiteralTresKeyword_0_2()); } } break; case 4 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1393:6: ( 'cuatro' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1393:6: ( 'cuatro' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1394:1: 'cuatro' { before(grammarAccess.getHorasLiteralAccess().getLiteralCuatroKeyword_0_3()); match(input,26,FOLLOW_26_in_rule__HorasLiteral__LiteralAlternatives_02983); after(grammarAccess.getHorasLiteralAccess().getLiteralCuatroKeyword_0_3()); } } break; case 5 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1401:6: ( 'cinco' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1401:6: ( 'cinco' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1402:1: 'cinco' { before(grammarAccess.getHorasLiteralAccess().getLiteralCincoKeyword_0_4()); match(input,27,FOLLOW_27_in_rule__HorasLiteral__LiteralAlternatives_03003); after(grammarAccess.getHorasLiteralAccess().getLiteralCincoKeyword_0_4()); } } break; case 6 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1409:6: ( 'seis' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1409:6: ( 'seis' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1410:1: 'seis' { before(grammarAccess.getHorasLiteralAccess().getLiteralSeisKeyword_0_5()); match(input,28,FOLLOW_28_in_rule__HorasLiteral__LiteralAlternatives_03023); after(grammarAccess.getHorasLiteralAccess().getLiteralSeisKeyword_0_5()); } } break; case 7 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1417:6: ( 'siete' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1417:6: ( 'siete' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1418:1: 'siete' { before(grammarAccess.getHorasLiteralAccess().getLiteralSieteKeyword_0_6()); match(input,29,FOLLOW_29_in_rule__HorasLiteral__LiteralAlternatives_03043); after(grammarAccess.getHorasLiteralAccess().getLiteralSieteKeyword_0_6()); } } break; case 8 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1425:6: ( 'ocho' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1425:6: ( 'ocho' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1426:1: 'ocho' { before(grammarAccess.getHorasLiteralAccess().getLiteralOchoKeyword_0_7()); match(input,30,FOLLOW_30_in_rule__HorasLiteral__LiteralAlternatives_03063); after(grammarAccess.getHorasLiteralAccess().getLiteralOchoKeyword_0_7()); } } break; case 9 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1433:6: ( 'nueve' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1433:6: ( 'nueve' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1434:1: 'nueve' { before(grammarAccess.getHorasLiteralAccess().getLiteralNueveKeyword_0_8()); match(input,31,FOLLOW_31_in_rule__HorasLiteral__LiteralAlternatives_03083); after(grammarAccess.getHorasLiteralAccess().getLiteralNueveKeyword_0_8()); } } break; case 10 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1441:6: ( 'diez' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1441:6: ( 'diez' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1442:1: 'diez' { before(grammarAccess.getHorasLiteralAccess().getLiteralDiezKeyword_0_9()); match(input,32,FOLLOW_32_in_rule__HorasLiteral__LiteralAlternatives_03103); after(grammarAccess.getHorasLiteralAccess().getLiteralDiezKeyword_0_9()); } } break; case 11 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1449:6: ( 'once' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1449:6: ( 'once' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1450:1: 'once' { before(grammarAccess.getHorasLiteralAccess().getLiteralOnceKeyword_0_10()); match(input,33,FOLLOW_33_in_rule__HorasLiteral__LiteralAlternatives_03123); after(grammarAccess.getHorasLiteralAccess().getLiteralOnceKeyword_0_10()); } } break; case 12 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1457:6: ( 'doce' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1457:6: ( 'doce' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1458:1: 'doce' { before(grammarAccess.getHorasLiteralAccess().getLiteralDoceKeyword_0_11()); match(input,34,FOLLOW_34_in_rule__HorasLiteral__LiteralAlternatives_03143); after(grammarAccess.getHorasLiteralAccess().getLiteralDoceKeyword_0_11()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorasLiteral__LiteralAlternatives_0" // $ANTLR start "rule__FraccionHoraLiteral__LiteralAlternatives_0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1470:1: rule__FraccionHoraLiteral__LiteralAlternatives_0 : ( ( 'cuarto' ) | ( 'media' ) ); public final void rule__FraccionHoraLiteral__LiteralAlternatives_0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1474:1: ( ( 'cuarto' ) | ( 'media' ) ) int alt20=2; int LA20_0 = input.LA(1); if ( (LA20_0==35) ) { alt20=1; } else if ( (LA20_0==36) ) { alt20=2; } else { NoViableAltException nvae = new NoViableAltException("", 20, 0, input); throw nvae; } switch (alt20) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1475:1: ( 'cuarto' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1475:1: ( 'cuarto' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1476:1: 'cuarto' { before(grammarAccess.getFraccionHoraLiteralAccess().getLiteralCuartoKeyword_0_0()); match(input,35,FOLLOW_35_in_rule__FraccionHoraLiteral__LiteralAlternatives_03178); after(grammarAccess.getFraccionHoraLiteralAccess().getLiteralCuartoKeyword_0_0()); } } break; case 2 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1483:6: ( 'media' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1483:6: ( 'media' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1484:1: 'media' { before(grammarAccess.getFraccionHoraLiteralAccess().getLiteralMediaKeyword_0_1()); match(input,36,FOLLOW_36_in_rule__FraccionHoraLiteral__LiteralAlternatives_03198); after(grammarAccess.getFraccionHoraLiteralAccess().getLiteralMediaKeyword_0_1()); } } break; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__FraccionHoraLiteral__LiteralAlternatives_0" // $ANTLR start "rule__ClienteConDeuda__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1498:1: rule__ClienteConDeuda__Group__0 : rule__ClienteConDeuda__Group__0__Impl rule__ClienteConDeuda__Group__1 ; public final void rule__ClienteConDeuda__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1502:1: ( rule__ClienteConDeuda__Group__0__Impl rule__ClienteConDeuda__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1503:2: rule__ClienteConDeuda__Group__0__Impl rule__ClienteConDeuda__Group__1 { pushFollow(FOLLOW_rule__ClienteConDeuda__Group__0__Impl_in_rule__ClienteConDeuda__Group__03230); rule__ClienteConDeuda__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__ClienteConDeuda__Group__1_in_rule__ClienteConDeuda__Group__03233); rule__ClienteConDeuda__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConDeuda__Group__0" // $ANTLR start "rule__ClienteConDeuda__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1510:1: rule__ClienteConDeuda__Group__0__Impl : ( 'Cliente' ) ; public final void rule__ClienteConDeuda__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1514:1: ( ( 'Cliente' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1515:1: ( 'Cliente' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1515:1: ( 'Cliente' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1516:1: 'Cliente' { before(grammarAccess.getClienteConDeudaAccess().getClienteKeyword_0()); match(input,37,FOLLOW_37_in_rule__ClienteConDeuda__Group__0__Impl3261); after(grammarAccess.getClienteConDeudaAccess().getClienteKeyword_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConDeuda__Group__0__Impl" // $ANTLR start "rule__ClienteConDeuda__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1529:1: rule__ClienteConDeuda__Group__1 : rule__ClienteConDeuda__Group__1__Impl rule__ClienteConDeuda__Group__2 ; public final void rule__ClienteConDeuda__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1533:1: ( rule__ClienteConDeuda__Group__1__Impl rule__ClienteConDeuda__Group__2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1534:2: rule__ClienteConDeuda__Group__1__Impl rule__ClienteConDeuda__Group__2 { pushFollow(FOLLOW_rule__ClienteConDeuda__Group__1__Impl_in_rule__ClienteConDeuda__Group__13292); rule__ClienteConDeuda__Group__1__Impl(); state._fsp--; pushFollow(FOLLOW_rule__ClienteConDeuda__Group__2_in_rule__ClienteConDeuda__Group__13295); rule__ClienteConDeuda__Group__2(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConDeuda__Group__1" // $ANTLR start "rule__ClienteConDeuda__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1541:1: rule__ClienteConDeuda__Group__1__Impl : ( ( rule__ClienteConDeuda__NameAssignment_1 ) ) ; public final void rule__ClienteConDeuda__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1545:1: ( ( ( rule__ClienteConDeuda__NameAssignment_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1546:1: ( ( rule__ClienteConDeuda__NameAssignment_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1546:1: ( ( rule__ClienteConDeuda__NameAssignment_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1547:1: ( rule__ClienteConDeuda__NameAssignment_1 ) { before(grammarAccess.getClienteConDeudaAccess().getNameAssignment_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1548:1: ( rule__ClienteConDeuda__NameAssignment_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1548:2: rule__ClienteConDeuda__NameAssignment_1 { pushFollow(FOLLOW_rule__ClienteConDeuda__NameAssignment_1_in_rule__ClienteConDeuda__Group__1__Impl3322); rule__ClienteConDeuda__NameAssignment_1(); state._fsp--; } after(grammarAccess.getClienteConDeudaAccess().getNameAssignment_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConDeuda__Group__1__Impl" // $ANTLR start "rule__ClienteConDeuda__Group__2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1558:1: rule__ClienteConDeuda__Group__2 : rule__ClienteConDeuda__Group__2__Impl rule__ClienteConDeuda__Group__3 ; public final void rule__ClienteConDeuda__Group__2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1562:1: ( rule__ClienteConDeuda__Group__2__Impl rule__ClienteConDeuda__Group__3 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1563:2: rule__ClienteConDeuda__Group__2__Impl rule__ClienteConDeuda__Group__3 { pushFollow(FOLLOW_rule__ClienteConDeuda__Group__2__Impl_in_rule__ClienteConDeuda__Group__23352); rule__ClienteConDeuda__Group__2__Impl(); state._fsp--; pushFollow(FOLLOW_rule__ClienteConDeuda__Group__3_in_rule__ClienteConDeuda__Group__23355); rule__ClienteConDeuda__Group__3(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConDeuda__Group__2" // $ANTLR start "rule__ClienteConDeuda__Group__2__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1570:1: rule__ClienteConDeuda__Group__2__Impl : ( ', debe' ) ; public final void rule__ClienteConDeuda__Group__2__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1574:1: ( ( ', debe' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1575:1: ( ', debe' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1575:1: ( ', debe' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1576:1: ', debe' { before(grammarAccess.getClienteConDeudaAccess().getDebeKeyword_2()); match(input,38,FOLLOW_38_in_rule__ClienteConDeuda__Group__2__Impl3383); after(grammarAccess.getClienteConDeudaAccess().getDebeKeyword_2()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConDeuda__Group__2__Impl" // $ANTLR start "rule__ClienteConDeuda__Group__3" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1589:1: rule__ClienteConDeuda__Group__3 : rule__ClienteConDeuda__Group__3__Impl ; public final void rule__ClienteConDeuda__Group__3() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1593:1: ( rule__ClienteConDeuda__Group__3__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1594:2: rule__ClienteConDeuda__Group__3__Impl { pushFollow(FOLLOW_rule__ClienteConDeuda__Group__3__Impl_in_rule__ClienteConDeuda__Group__33414); rule__ClienteConDeuda__Group__3__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConDeuda__Group__3" // $ANTLR start "rule__ClienteConDeuda__Group__3__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1600:1: rule__ClienteConDeuda__Group__3__Impl : ( ( rule__ClienteConDeuda__MontoDeudaAssignment_3 ) ) ; public final void rule__ClienteConDeuda__Group__3__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1604:1: ( ( ( rule__ClienteConDeuda__MontoDeudaAssignment_3 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1605:1: ( ( rule__ClienteConDeuda__MontoDeudaAssignment_3 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1605:1: ( ( rule__ClienteConDeuda__MontoDeudaAssignment_3 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1606:1: ( rule__ClienteConDeuda__MontoDeudaAssignment_3 ) { before(grammarAccess.getClienteConDeudaAccess().getMontoDeudaAssignment_3()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1607:1: ( rule__ClienteConDeuda__MontoDeudaAssignment_3 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1607:2: rule__ClienteConDeuda__MontoDeudaAssignment_3 { pushFollow(FOLLOW_rule__ClienteConDeuda__MontoDeudaAssignment_3_in_rule__ClienteConDeuda__Group__3__Impl3441); rule__ClienteConDeuda__MontoDeudaAssignment_3(); state._fsp--; } after(grammarAccess.getClienteConDeudaAccess().getMontoDeudaAssignment_3()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConDeuda__Group__3__Impl" // $ANTLR start "rule__ClienteAlDia__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1625:1: rule__ClienteAlDia__Group__0 : rule__ClienteAlDia__Group__0__Impl rule__ClienteAlDia__Group__1 ; public final void rule__ClienteAlDia__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1629:1: ( rule__ClienteAlDia__Group__0__Impl rule__ClienteAlDia__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1630:2: rule__ClienteAlDia__Group__0__Impl rule__ClienteAlDia__Group__1 { pushFollow(FOLLOW_rule__ClienteAlDia__Group__0__Impl_in_rule__ClienteAlDia__Group__03479); rule__ClienteAlDia__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__ClienteAlDia__Group__1_in_rule__ClienteAlDia__Group__03482); rule__ClienteAlDia__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteAlDia__Group__0" // $ANTLR start "rule__ClienteAlDia__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1637:1: rule__ClienteAlDia__Group__0__Impl : ( 'Cliente' ) ; public final void rule__ClienteAlDia__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1641:1: ( ( 'Cliente' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1642:1: ( 'Cliente' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1642:1: ( 'Cliente' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1643:1: 'Cliente' { before(grammarAccess.getClienteAlDiaAccess().getClienteKeyword_0()); match(input,37,FOLLOW_37_in_rule__ClienteAlDia__Group__0__Impl3510); after(grammarAccess.getClienteAlDiaAccess().getClienteKeyword_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteAlDia__Group__0__Impl" // $ANTLR start "rule__ClienteAlDia__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1656:1: rule__ClienteAlDia__Group__1 : rule__ClienteAlDia__Group__1__Impl rule__ClienteAlDia__Group__2 ; public final void rule__ClienteAlDia__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1660:1: ( rule__ClienteAlDia__Group__1__Impl rule__ClienteAlDia__Group__2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1661:2: rule__ClienteAlDia__Group__1__Impl rule__ClienteAlDia__Group__2 { pushFollow(FOLLOW_rule__ClienteAlDia__Group__1__Impl_in_rule__ClienteAlDia__Group__13541); rule__ClienteAlDia__Group__1__Impl(); state._fsp--; pushFollow(FOLLOW_rule__ClienteAlDia__Group__2_in_rule__ClienteAlDia__Group__13544); rule__ClienteAlDia__Group__2(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteAlDia__Group__1" // $ANTLR start "rule__ClienteAlDia__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1668:1: rule__ClienteAlDia__Group__1__Impl : ( ( rule__ClienteAlDia__NameAssignment_1 ) ) ; public final void rule__ClienteAlDia__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1672:1: ( ( ( rule__ClienteAlDia__NameAssignment_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1673:1: ( ( rule__ClienteAlDia__NameAssignment_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1673:1: ( ( rule__ClienteAlDia__NameAssignment_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1674:1: ( rule__ClienteAlDia__NameAssignment_1 ) { before(grammarAccess.getClienteAlDiaAccess().getNameAssignment_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1675:1: ( rule__ClienteAlDia__NameAssignment_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1675:2: rule__ClienteAlDia__NameAssignment_1 { pushFollow(FOLLOW_rule__ClienteAlDia__NameAssignment_1_in_rule__ClienteAlDia__Group__1__Impl3571); rule__ClienteAlDia__NameAssignment_1(); state._fsp--; } after(grammarAccess.getClienteAlDiaAccess().getNameAssignment_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteAlDia__Group__1__Impl" // $ANTLR start "rule__ClienteAlDia__Group__2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1685:1: rule__ClienteAlDia__Group__2 : rule__ClienteAlDia__Group__2__Impl ; public final void rule__ClienteAlDia__Group__2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1689:1: ( rule__ClienteAlDia__Group__2__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1690:2: rule__ClienteAlDia__Group__2__Impl { pushFollow(FOLLOW_rule__ClienteAlDia__Group__2__Impl_in_rule__ClienteAlDia__Group__23601); rule__ClienteAlDia__Group__2__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteAlDia__Group__2" // $ANTLR start "rule__ClienteAlDia__Group__2__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1696:1: rule__ClienteAlDia__Group__2__Impl : ( 'al dia.' ) ; public final void rule__ClienteAlDia__Group__2__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1700:1: ( ( 'al dia.' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1701:1: ( 'al dia.' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1701:1: ( 'al dia.' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1702:1: 'al dia.' { before(grammarAccess.getClienteAlDiaAccess().getAlDiaKeyword_2()); match(input,39,FOLLOW_39_in_rule__ClienteAlDia__Group__2__Impl3629); after(grammarAccess.getClienteAlDiaAccess().getAlDiaKeyword_2()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteAlDia__Group__2__Impl" // $ANTLR start "rule__ClienteConCredito__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1721:1: rule__ClienteConCredito__Group__0 : rule__ClienteConCredito__Group__0__Impl rule__ClienteConCredito__Group__1 ; public final void rule__ClienteConCredito__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1725:1: ( rule__ClienteConCredito__Group__0__Impl rule__ClienteConCredito__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1726:2: rule__ClienteConCredito__Group__0__Impl rule__ClienteConCredito__Group__1 { pushFollow(FOLLOW_rule__ClienteConCredito__Group__0__Impl_in_rule__ClienteConCredito__Group__03666); rule__ClienteConCredito__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__ClienteConCredito__Group__1_in_rule__ClienteConCredito__Group__03669); rule__ClienteConCredito__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConCredito__Group__0" // $ANTLR start "rule__ClienteConCredito__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1733:1: rule__ClienteConCredito__Group__0__Impl : ( 'Cliente' ) ; public final void rule__ClienteConCredito__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1737:1: ( ( 'Cliente' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1738:1: ( 'Cliente' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1738:1: ( 'Cliente' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1739:1: 'Cliente' { before(grammarAccess.getClienteConCreditoAccess().getClienteKeyword_0()); match(input,37,FOLLOW_37_in_rule__ClienteConCredito__Group__0__Impl3697); after(grammarAccess.getClienteConCreditoAccess().getClienteKeyword_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConCredito__Group__0__Impl" // $ANTLR start "rule__ClienteConCredito__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1752:1: rule__ClienteConCredito__Group__1 : rule__ClienteConCredito__Group__1__Impl rule__ClienteConCredito__Group__2 ; public final void rule__ClienteConCredito__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1756:1: ( rule__ClienteConCredito__Group__1__Impl rule__ClienteConCredito__Group__2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1757:2: rule__ClienteConCredito__Group__1__Impl rule__ClienteConCredito__Group__2 { pushFollow(FOLLOW_rule__ClienteConCredito__Group__1__Impl_in_rule__ClienteConCredito__Group__13728); rule__ClienteConCredito__Group__1__Impl(); state._fsp--; pushFollow(FOLLOW_rule__ClienteConCredito__Group__2_in_rule__ClienteConCredito__Group__13731); rule__ClienteConCredito__Group__2(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConCredito__Group__1" // $ANTLR start "rule__ClienteConCredito__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1764:1: rule__ClienteConCredito__Group__1__Impl : ( ( rule__ClienteConCredito__NameAssignment_1 ) ) ; public final void rule__ClienteConCredito__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1768:1: ( ( ( rule__ClienteConCredito__NameAssignment_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1769:1: ( ( rule__ClienteConCredito__NameAssignment_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1769:1: ( ( rule__ClienteConCredito__NameAssignment_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1770:1: ( rule__ClienteConCredito__NameAssignment_1 ) { before(grammarAccess.getClienteConCreditoAccess().getNameAssignment_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1771:1: ( rule__ClienteConCredito__NameAssignment_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1771:2: rule__ClienteConCredito__NameAssignment_1 { pushFollow(FOLLOW_rule__ClienteConCredito__NameAssignment_1_in_rule__ClienteConCredito__Group__1__Impl3758); rule__ClienteConCredito__NameAssignment_1(); state._fsp--; } after(grammarAccess.getClienteConCreditoAccess().getNameAssignment_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConCredito__Group__1__Impl" // $ANTLR start "rule__ClienteConCredito__Group__2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1781:1: rule__ClienteConCredito__Group__2 : rule__ClienteConCredito__Group__2__Impl rule__ClienteConCredito__Group__3 ; public final void rule__ClienteConCredito__Group__2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1785:1: ( rule__ClienteConCredito__Group__2__Impl rule__ClienteConCredito__Group__3 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1786:2: rule__ClienteConCredito__Group__2__Impl rule__ClienteConCredito__Group__3 { pushFollow(FOLLOW_rule__ClienteConCredito__Group__2__Impl_in_rule__ClienteConCredito__Group__23788); rule__ClienteConCredito__Group__2__Impl(); state._fsp--; pushFollow(FOLLOW_rule__ClienteConCredito__Group__3_in_rule__ClienteConCredito__Group__23791); rule__ClienteConCredito__Group__3(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConCredito__Group__2" // $ANTLR start "rule__ClienteConCredito__Group__2__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1793:1: rule__ClienteConCredito__Group__2__Impl : ( ', tiene credito' ) ; public final void rule__ClienteConCredito__Group__2__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1797:1: ( ( ', tiene credito' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1798:1: ( ', tiene credito' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1798:1: ( ', tiene credito' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1799:1: ', tiene credito' { before(grammarAccess.getClienteConCreditoAccess().getTieneCreditoKeyword_2()); match(input,40,FOLLOW_40_in_rule__ClienteConCredito__Group__2__Impl3819); after(grammarAccess.getClienteConCreditoAccess().getTieneCreditoKeyword_2()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConCredito__Group__2__Impl" // $ANTLR start "rule__ClienteConCredito__Group__3" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1812:1: rule__ClienteConCredito__Group__3 : rule__ClienteConCredito__Group__3__Impl ; public final void rule__ClienteConCredito__Group__3() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1816:1: ( rule__ClienteConCredito__Group__3__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1817:2: rule__ClienteConCredito__Group__3__Impl { pushFollow(FOLLOW_rule__ClienteConCredito__Group__3__Impl_in_rule__ClienteConCredito__Group__33850); rule__ClienteConCredito__Group__3__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConCredito__Group__3" // $ANTLR start "rule__ClienteConCredito__Group__3__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1823:1: rule__ClienteConCredito__Group__3__Impl : ( ( rule__ClienteConCredito__MontoCreditoAssignment_3 ) ) ; public final void rule__ClienteConCredito__Group__3__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1827:1: ( ( ( rule__ClienteConCredito__MontoCreditoAssignment_3 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1828:1: ( ( rule__ClienteConCredito__MontoCreditoAssignment_3 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1828:1: ( ( rule__ClienteConCredito__MontoCreditoAssignment_3 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1829:1: ( rule__ClienteConCredito__MontoCreditoAssignment_3 ) { before(grammarAccess.getClienteConCreditoAccess().getMontoCreditoAssignment_3()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1830:1: ( rule__ClienteConCredito__MontoCreditoAssignment_3 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1830:2: rule__ClienteConCredito__MontoCreditoAssignment_3 { pushFollow(FOLLOW_rule__ClienteConCredito__MontoCreditoAssignment_3_in_rule__ClienteConCredito__Group__3__Impl3877); rule__ClienteConCredito__MontoCreditoAssignment_3(); state._fsp--; } after(grammarAccess.getClienteConCreditoAccess().getMontoCreditoAssignment_3()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConCredito__Group__3__Impl" // $ANTLR start "rule__ProductoConPrecio__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1848:1: rule__ProductoConPrecio__Group__0 : rule__ProductoConPrecio__Group__0__Impl rule__ProductoConPrecio__Group__1 ; public final void rule__ProductoConPrecio__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1852:1: ( rule__ProductoConPrecio__Group__0__Impl rule__ProductoConPrecio__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1853:2: rule__ProductoConPrecio__Group__0__Impl rule__ProductoConPrecio__Group__1 { pushFollow(FOLLOW_rule__ProductoConPrecio__Group__0__Impl_in_rule__ProductoConPrecio__Group__03915); rule__ProductoConPrecio__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__ProductoConPrecio__Group__1_in_rule__ProductoConPrecio__Group__03918); rule__ProductoConPrecio__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ProductoConPrecio__Group__0" // $ANTLR start "rule__ProductoConPrecio__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1860:1: rule__ProductoConPrecio__Group__0__Impl : ( ruleProducto ) ; public final void rule__ProductoConPrecio__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1864:1: ( ( ruleProducto ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1865:1: ( ruleProducto ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1865:1: ( ruleProducto ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1866:1: ruleProducto { before(grammarAccess.getProductoConPrecioAccess().getProductoParserRuleCall_0()); pushFollow(FOLLOW_ruleProducto_in_rule__ProductoConPrecio__Group__0__Impl3945); ruleProducto(); state._fsp--; after(grammarAccess.getProductoConPrecioAccess().getProductoParserRuleCall_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ProductoConPrecio__Group__0__Impl" // $ANTLR start "rule__ProductoConPrecio__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1877:1: rule__ProductoConPrecio__Group__1 : rule__ProductoConPrecio__Group__1__Impl rule__ProductoConPrecio__Group__2 ; public final void rule__ProductoConPrecio__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1881:1: ( rule__ProductoConPrecio__Group__1__Impl rule__ProductoConPrecio__Group__2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1882:2: rule__ProductoConPrecio__Group__1__Impl rule__ProductoConPrecio__Group__2 { pushFollow(FOLLOW_rule__ProductoConPrecio__Group__1__Impl_in_rule__ProductoConPrecio__Group__13974); rule__ProductoConPrecio__Group__1__Impl(); state._fsp--; pushFollow(FOLLOW_rule__ProductoConPrecio__Group__2_in_rule__ProductoConPrecio__Group__13977); rule__ProductoConPrecio__Group__2(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ProductoConPrecio__Group__1" // $ANTLR start "rule__ProductoConPrecio__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1889:1: rule__ProductoConPrecio__Group__1__Impl : ( 'salen' ) ; public final void rule__ProductoConPrecio__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1893:1: ( ( 'salen' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1894:1: ( 'salen' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1894:1: ( 'salen' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1895:1: 'salen' { before(grammarAccess.getProductoConPrecioAccess().getSalenKeyword_1()); match(input,41,FOLLOW_41_in_rule__ProductoConPrecio__Group__1__Impl4005); after(grammarAccess.getProductoConPrecioAccess().getSalenKeyword_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ProductoConPrecio__Group__1__Impl" // $ANTLR start "rule__ProductoConPrecio__Group__2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1908:1: rule__ProductoConPrecio__Group__2 : rule__ProductoConPrecio__Group__2__Impl rule__ProductoConPrecio__Group__3 ; public final void rule__ProductoConPrecio__Group__2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1912:1: ( rule__ProductoConPrecio__Group__2__Impl rule__ProductoConPrecio__Group__3 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1913:2: rule__ProductoConPrecio__Group__2__Impl rule__ProductoConPrecio__Group__3 { pushFollow(FOLLOW_rule__ProductoConPrecio__Group__2__Impl_in_rule__ProductoConPrecio__Group__24036); rule__ProductoConPrecio__Group__2__Impl(); state._fsp--; pushFollow(FOLLOW_rule__ProductoConPrecio__Group__3_in_rule__ProductoConPrecio__Group__24039); rule__ProductoConPrecio__Group__3(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ProductoConPrecio__Group__2" // $ANTLR start "rule__ProductoConPrecio__Group__2__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1920:1: rule__ProductoConPrecio__Group__2__Impl : ( ( rule__ProductoConPrecio__PrecioAssignment_2 ) ) ; public final void rule__ProductoConPrecio__Group__2__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1924:1: ( ( ( rule__ProductoConPrecio__PrecioAssignment_2 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1925:1: ( ( rule__ProductoConPrecio__PrecioAssignment_2 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1925:1: ( ( rule__ProductoConPrecio__PrecioAssignment_2 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1926:1: ( rule__ProductoConPrecio__PrecioAssignment_2 ) { before(grammarAccess.getProductoConPrecioAccess().getPrecioAssignment_2()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1927:1: ( rule__ProductoConPrecio__PrecioAssignment_2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1927:2: rule__ProductoConPrecio__PrecioAssignment_2 { pushFollow(FOLLOW_rule__ProductoConPrecio__PrecioAssignment_2_in_rule__ProductoConPrecio__Group__2__Impl4066); rule__ProductoConPrecio__PrecioAssignment_2(); state._fsp--; } after(grammarAccess.getProductoConPrecioAccess().getPrecioAssignment_2()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ProductoConPrecio__Group__2__Impl" // $ANTLR start "rule__ProductoConPrecio__Group__3" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1937:1: rule__ProductoConPrecio__Group__3 : rule__ProductoConPrecio__Group__3__Impl ; public final void rule__ProductoConPrecio__Group__3() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1941:1: ( rule__ProductoConPrecio__Group__3__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1942:2: rule__ProductoConPrecio__Group__3__Impl { pushFollow(FOLLOW_rule__ProductoConPrecio__Group__3__Impl_in_rule__ProductoConPrecio__Group__34096); rule__ProductoConPrecio__Group__3__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ProductoConPrecio__Group__3" // $ANTLR start "rule__ProductoConPrecio__Group__3__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1948:1: rule__ProductoConPrecio__Group__3__Impl : ( '.' ) ; public final void rule__ProductoConPrecio__Group__3__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1952:1: ( ( '.' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1953:1: ( '.' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1953:1: ( '.' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1954:1: '.' { before(grammarAccess.getProductoConPrecioAccess().getFullStopKeyword_3()); match(input,42,FOLLOW_42_in_rule__ProductoConPrecio__Group__3__Impl4124); after(grammarAccess.getProductoConPrecioAccess().getFullStopKeyword_3()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ProductoConPrecio__Group__3__Impl" // $ANTLR start "rule__Producto__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1975:1: rule__Producto__Group__0 : rule__Producto__Group__0__Impl rule__Producto__Group__1 ; public final void rule__Producto__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1979:1: ( rule__Producto__Group__0__Impl rule__Producto__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1980:2: rule__Producto__Group__0__Impl rule__Producto__Group__1 { pushFollow(FOLLOW_rule__Producto__Group__0__Impl_in_rule__Producto__Group__04163); rule__Producto__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Producto__Group__1_in_rule__Producto__Group__04166); rule__Producto__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Producto__Group__0" // $ANTLR start "rule__Producto__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1987:1: rule__Producto__Group__0__Impl : ( ( rule__Producto__Alternatives_0 ) ) ; public final void rule__Producto__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1991:1: ( ( ( rule__Producto__Alternatives_0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1992:1: ( ( rule__Producto__Alternatives_0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1992:1: ( ( rule__Producto__Alternatives_0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1993:1: ( rule__Producto__Alternatives_0 ) { before(grammarAccess.getProductoAccess().getAlternatives_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1994:1: ( rule__Producto__Alternatives_0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:1994:2: rule__Producto__Alternatives_0 { pushFollow(FOLLOW_rule__Producto__Alternatives_0_in_rule__Producto__Group__0__Impl4193); rule__Producto__Alternatives_0(); state._fsp--; } after(grammarAccess.getProductoAccess().getAlternatives_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Producto__Group__0__Impl" // $ANTLR start "rule__Producto__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2004:1: rule__Producto__Group__1 : rule__Producto__Group__1__Impl ; public final void rule__Producto__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2008:1: ( rule__Producto__Group__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2009:2: rule__Producto__Group__1__Impl { pushFollow(FOLLOW_rule__Producto__Group__1__Impl_in_rule__Producto__Group__14223); rule__Producto__Group__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Producto__Group__1" // $ANTLR start "rule__Producto__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2015:1: rule__Producto__Group__1__Impl : ( ( rule__Producto__NameAssignment_1 ) ) ; public final void rule__Producto__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2019:1: ( ( ( rule__Producto__NameAssignment_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2020:1: ( ( rule__Producto__NameAssignment_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2020:1: ( ( rule__Producto__NameAssignment_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2021:1: ( rule__Producto__NameAssignment_1 ) { before(grammarAccess.getProductoAccess().getNameAssignment_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2022:1: ( rule__Producto__NameAssignment_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2022:2: rule__Producto__NameAssignment_1 { pushFollow(FOLLOW_rule__Producto__NameAssignment_1_in_rule__Producto__Group__1__Impl4250); rule__Producto__NameAssignment_1(); state._fsp--; } after(grammarAccess.getProductoAccess().getNameAssignment_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Producto__Group__1__Impl" // $ANTLR start "rule__PrecioPorPeso__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2036:1: rule__PrecioPorPeso__Group__0 : rule__PrecioPorPeso__Group__0__Impl rule__PrecioPorPeso__Group__1 ; public final void rule__PrecioPorPeso__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2040:1: ( rule__PrecioPorPeso__Group__0__Impl rule__PrecioPorPeso__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2041:2: rule__PrecioPorPeso__Group__0__Impl rule__PrecioPorPeso__Group__1 { pushFollow(FOLLOW_rule__PrecioPorPeso__Group__0__Impl_in_rule__PrecioPorPeso__Group__04284); rule__PrecioPorPeso__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__PrecioPorPeso__Group__1_in_rule__PrecioPorPeso__Group__04287); rule__PrecioPorPeso__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PrecioPorPeso__Group__0" // $ANTLR start "rule__PrecioPorPeso__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2048:1: rule__PrecioPorPeso__Group__0__Impl : ( ( rule__PrecioPorPeso__PrecioAssignment_0 ) ) ; public final void rule__PrecioPorPeso__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2052:1: ( ( ( rule__PrecioPorPeso__PrecioAssignment_0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2053:1: ( ( rule__PrecioPorPeso__PrecioAssignment_0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2053:1: ( ( rule__PrecioPorPeso__PrecioAssignment_0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2054:1: ( rule__PrecioPorPeso__PrecioAssignment_0 ) { before(grammarAccess.getPrecioPorPesoAccess().getPrecioAssignment_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2055:1: ( rule__PrecioPorPeso__PrecioAssignment_0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2055:2: rule__PrecioPorPeso__PrecioAssignment_0 { pushFollow(FOLLOW_rule__PrecioPorPeso__PrecioAssignment_0_in_rule__PrecioPorPeso__Group__0__Impl4314); rule__PrecioPorPeso__PrecioAssignment_0(); state._fsp--; } after(grammarAccess.getPrecioPorPesoAccess().getPrecioAssignment_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PrecioPorPeso__Group__0__Impl" // $ANTLR start "rule__PrecioPorPeso__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2065:1: rule__PrecioPorPeso__Group__1 : rule__PrecioPorPeso__Group__1__Impl ; public final void rule__PrecioPorPeso__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2069:1: ( rule__PrecioPorPeso__Group__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2070:2: rule__PrecioPorPeso__Group__1__Impl { pushFollow(FOLLOW_rule__PrecioPorPeso__Group__1__Impl_in_rule__PrecioPorPeso__Group__14344); rule__PrecioPorPeso__Group__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PrecioPorPeso__Group__1" // $ANTLR start "rule__PrecioPorPeso__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2076:1: rule__PrecioPorPeso__Group__1__Impl : ( ( rule__PrecioPorPeso__Alternatives_1 ) ) ; public final void rule__PrecioPorPeso__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2080:1: ( ( ( rule__PrecioPorPeso__Alternatives_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2081:1: ( ( rule__PrecioPorPeso__Alternatives_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2081:1: ( ( rule__PrecioPorPeso__Alternatives_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2082:1: ( rule__PrecioPorPeso__Alternatives_1 ) { before(grammarAccess.getPrecioPorPesoAccess().getAlternatives_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2083:1: ( rule__PrecioPorPeso__Alternatives_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2083:2: rule__PrecioPorPeso__Alternatives_1 { pushFollow(FOLLOW_rule__PrecioPorPeso__Alternatives_1_in_rule__PrecioPorPeso__Group__1__Impl4371); rule__PrecioPorPeso__Alternatives_1(); state._fsp--; } after(grammarAccess.getPrecioPorPesoAccess().getAlternatives_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PrecioPorPeso__Group__1__Impl" // $ANTLR start "rule__PrecioPorPeso__Group_1_0__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2097:1: rule__PrecioPorPeso__Group_1_0__0 : rule__PrecioPorPeso__Group_1_0__0__Impl rule__PrecioPorPeso__Group_1_0__1 ; public final void rule__PrecioPorPeso__Group_1_0__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2101:1: ( rule__PrecioPorPeso__Group_1_0__0__Impl rule__PrecioPorPeso__Group_1_0__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2102:2: rule__PrecioPorPeso__Group_1_0__0__Impl rule__PrecioPorPeso__Group_1_0__1 { pushFollow(FOLLOW_rule__PrecioPorPeso__Group_1_0__0__Impl_in_rule__PrecioPorPeso__Group_1_0__04405); rule__PrecioPorPeso__Group_1_0__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__PrecioPorPeso__Group_1_0__1_in_rule__PrecioPorPeso__Group_1_0__04408); rule__PrecioPorPeso__Group_1_0__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PrecioPorPeso__Group_1_0__0" // $ANTLR start "rule__PrecioPorPeso__Group_1_0__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2109:1: rule__PrecioPorPeso__Group_1_0__0__Impl : ( 'los' ) ; public final void rule__PrecioPorPeso__Group_1_0__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2113:1: ( ( 'los' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2114:1: ( 'los' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2114:1: ( 'los' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2115:1: 'los' { before(grammarAccess.getPrecioPorPesoAccess().getLosKeyword_1_0_0()); match(input,13,FOLLOW_13_in_rule__PrecioPorPeso__Group_1_0__0__Impl4436); after(grammarAccess.getPrecioPorPesoAccess().getLosKeyword_1_0_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PrecioPorPeso__Group_1_0__0__Impl" // $ANTLR start "rule__PrecioPorPeso__Group_1_0__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2128:1: rule__PrecioPorPeso__Group_1_0__1 : rule__PrecioPorPeso__Group_1_0__1__Impl ; public final void rule__PrecioPorPeso__Group_1_0__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2132:1: ( rule__PrecioPorPeso__Group_1_0__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2133:2: rule__PrecioPorPeso__Group_1_0__1__Impl { pushFollow(FOLLOW_rule__PrecioPorPeso__Group_1_0__1__Impl_in_rule__PrecioPorPeso__Group_1_0__14467); rule__PrecioPorPeso__Group_1_0__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PrecioPorPeso__Group_1_0__1" // $ANTLR start "rule__PrecioPorPeso__Group_1_0__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2139:1: rule__PrecioPorPeso__Group_1_0__1__Impl : ( ( rule__PrecioPorPeso__PesajeAssignment_1_0_1 ) ) ; public final void rule__PrecioPorPeso__Group_1_0__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2143:1: ( ( ( rule__PrecioPorPeso__PesajeAssignment_1_0_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2144:1: ( ( rule__PrecioPorPeso__PesajeAssignment_1_0_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2144:1: ( ( rule__PrecioPorPeso__PesajeAssignment_1_0_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2145:1: ( rule__PrecioPorPeso__PesajeAssignment_1_0_1 ) { before(grammarAccess.getPrecioPorPesoAccess().getPesajeAssignment_1_0_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2146:1: ( rule__PrecioPorPeso__PesajeAssignment_1_0_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2146:2: rule__PrecioPorPeso__PesajeAssignment_1_0_1 { pushFollow(FOLLOW_rule__PrecioPorPeso__PesajeAssignment_1_0_1_in_rule__PrecioPorPeso__Group_1_0__1__Impl4494); rule__PrecioPorPeso__PesajeAssignment_1_0_1(); state._fsp--; } after(grammarAccess.getPrecioPorPesoAccess().getPesajeAssignment_1_0_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PrecioPorPeso__Group_1_0__1__Impl" // $ANTLR start "rule__PrecioPorPeso__Group_1_1__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2160:1: rule__PrecioPorPeso__Group_1_1__0 : rule__PrecioPorPeso__Group_1_1__0__Impl rule__PrecioPorPeso__Group_1_1__1 ; public final void rule__PrecioPorPeso__Group_1_1__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2164:1: ( rule__PrecioPorPeso__Group_1_1__0__Impl rule__PrecioPorPeso__Group_1_1__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2165:2: rule__PrecioPorPeso__Group_1_1__0__Impl rule__PrecioPorPeso__Group_1_1__1 { pushFollow(FOLLOW_rule__PrecioPorPeso__Group_1_1__0__Impl_in_rule__PrecioPorPeso__Group_1_1__04528); rule__PrecioPorPeso__Group_1_1__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__PrecioPorPeso__Group_1_1__1_in_rule__PrecioPorPeso__Group_1_1__04531); rule__PrecioPorPeso__Group_1_1__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PrecioPorPeso__Group_1_1__0" // $ANTLR start "rule__PrecioPorPeso__Group_1_1__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2172:1: rule__PrecioPorPeso__Group_1_1__0__Impl : ( 'el' ) ; public final void rule__PrecioPorPeso__Group_1_1__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2176:1: ( ( 'el' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2177:1: ( 'el' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2177:1: ( 'el' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2178:1: 'el' { before(grammarAccess.getPrecioPorPesoAccess().getElKeyword_1_1_0()); match(input,43,FOLLOW_43_in_rule__PrecioPorPeso__Group_1_1__0__Impl4559); after(grammarAccess.getPrecioPorPesoAccess().getElKeyword_1_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PrecioPorPeso__Group_1_1__0__Impl" // $ANTLR start "rule__PrecioPorPeso__Group_1_1__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2191:1: rule__PrecioPorPeso__Group_1_1__1 : rule__PrecioPorPeso__Group_1_1__1__Impl ; public final void rule__PrecioPorPeso__Group_1_1__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2195:1: ( rule__PrecioPorPeso__Group_1_1__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2196:2: rule__PrecioPorPeso__Group_1_1__1__Impl { pushFollow(FOLLOW_rule__PrecioPorPeso__Group_1_1__1__Impl_in_rule__PrecioPorPeso__Group_1_1__14590); rule__PrecioPorPeso__Group_1_1__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PrecioPorPeso__Group_1_1__1" // $ANTLR start "rule__PrecioPorPeso__Group_1_1__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2202:1: rule__PrecioPorPeso__Group_1_1__1__Impl : ( ( rule__PrecioPorPeso__PesajeAssignment_1_1_1 ) ) ; public final void rule__PrecioPorPeso__Group_1_1__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2206:1: ( ( ( rule__PrecioPorPeso__PesajeAssignment_1_1_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2207:1: ( ( rule__PrecioPorPeso__PesajeAssignment_1_1_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2207:1: ( ( rule__PrecioPorPeso__PesajeAssignment_1_1_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2208:1: ( rule__PrecioPorPeso__PesajeAssignment_1_1_1 ) { before(grammarAccess.getPrecioPorPesoAccess().getPesajeAssignment_1_1_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2209:1: ( rule__PrecioPorPeso__PesajeAssignment_1_1_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2209:2: rule__PrecioPorPeso__PesajeAssignment_1_1_1 { pushFollow(FOLLOW_rule__PrecioPorPeso__PesajeAssignment_1_1_1_in_rule__PrecioPorPeso__Group_1_1__1__Impl4617); rule__PrecioPorPeso__PesajeAssignment_1_1_1(); state._fsp--; } after(grammarAccess.getPrecioPorPesoAccess().getPesajeAssignment_1_1_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PrecioPorPeso__Group_1_1__1__Impl" // $ANTLR start "rule__MontoDinero__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2223:1: rule__MontoDinero__Group__0 : rule__MontoDinero__Group__0__Impl rule__MontoDinero__Group__1 ; public final void rule__MontoDinero__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2227:1: ( rule__MontoDinero__Group__0__Impl rule__MontoDinero__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2228:2: rule__MontoDinero__Group__0__Impl rule__MontoDinero__Group__1 { pushFollow(FOLLOW_rule__MontoDinero__Group__0__Impl_in_rule__MontoDinero__Group__04651); rule__MontoDinero__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__MontoDinero__Group__1_in_rule__MontoDinero__Group__04654); rule__MontoDinero__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__MontoDinero__Group__0" // $ANTLR start "rule__MontoDinero__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2235:1: rule__MontoDinero__Group__0__Impl : ( ( rule__MontoDinero__PrecioAssignment_0 ) ) ; public final void rule__MontoDinero__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2239:1: ( ( ( rule__MontoDinero__PrecioAssignment_0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2240:1: ( ( rule__MontoDinero__PrecioAssignment_0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2240:1: ( ( rule__MontoDinero__PrecioAssignment_0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2241:1: ( rule__MontoDinero__PrecioAssignment_0 ) { before(grammarAccess.getMontoDineroAccess().getPrecioAssignment_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2242:1: ( rule__MontoDinero__PrecioAssignment_0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2242:2: rule__MontoDinero__PrecioAssignment_0 { pushFollow(FOLLOW_rule__MontoDinero__PrecioAssignment_0_in_rule__MontoDinero__Group__0__Impl4681); rule__MontoDinero__PrecioAssignment_0(); state._fsp--; } after(grammarAccess.getMontoDineroAccess().getPrecioAssignment_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__MontoDinero__Group__0__Impl" // $ANTLR start "rule__MontoDinero__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2252:1: rule__MontoDinero__Group__1 : rule__MontoDinero__Group__1__Impl ; public final void rule__MontoDinero__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2256:1: ( rule__MontoDinero__Group__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2257:2: rule__MontoDinero__Group__1__Impl { pushFollow(FOLLOW_rule__MontoDinero__Group__1__Impl_in_rule__MontoDinero__Group__14711); rule__MontoDinero__Group__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__MontoDinero__Group__1" // $ANTLR start "rule__MontoDinero__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2263:1: rule__MontoDinero__Group__1__Impl : ( ( rule__MontoDinero__Alternatives_1 ) ) ; public final void rule__MontoDinero__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2267:1: ( ( ( rule__MontoDinero__Alternatives_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2268:1: ( ( rule__MontoDinero__Alternatives_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2268:1: ( ( rule__MontoDinero__Alternatives_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2269:1: ( rule__MontoDinero__Alternatives_1 ) { before(grammarAccess.getMontoDineroAccess().getAlternatives_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2270:1: ( rule__MontoDinero__Alternatives_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2270:2: rule__MontoDinero__Alternatives_1 { pushFollow(FOLLOW_rule__MontoDinero__Alternatives_1_in_rule__MontoDinero__Group__1__Impl4738); rule__MontoDinero__Alternatives_1(); state._fsp--; } after(grammarAccess.getMontoDineroAccess().getAlternatives_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__MontoDinero__Group__1__Impl" // $ANTLR start "rule__PesoMagnitudVariable__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2284:1: rule__PesoMagnitudVariable__Group__0 : rule__PesoMagnitudVariable__Group__0__Impl rule__PesoMagnitudVariable__Group__1 ; public final void rule__PesoMagnitudVariable__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2288:1: ( rule__PesoMagnitudVariable__Group__0__Impl rule__PesoMagnitudVariable__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2289:2: rule__PesoMagnitudVariable__Group__0__Impl rule__PesoMagnitudVariable__Group__1 { pushFollow(FOLLOW_rule__PesoMagnitudVariable__Group__0__Impl_in_rule__PesoMagnitudVariable__Group__04772); rule__PesoMagnitudVariable__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__PesoMagnitudVariable__Group__1_in_rule__PesoMagnitudVariable__Group__04775); rule__PesoMagnitudVariable__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudVariable__Group__0" // $ANTLR start "rule__PesoMagnitudVariable__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2296:1: rule__PesoMagnitudVariable__Group__0__Impl : ( ( rule__PesoMagnitudVariable__MagnitudAssignment_0 ) ) ; public final void rule__PesoMagnitudVariable__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2300:1: ( ( ( rule__PesoMagnitudVariable__MagnitudAssignment_0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2301:1: ( ( rule__PesoMagnitudVariable__MagnitudAssignment_0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2301:1: ( ( rule__PesoMagnitudVariable__MagnitudAssignment_0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2302:1: ( rule__PesoMagnitudVariable__MagnitudAssignment_0 ) { before(grammarAccess.getPesoMagnitudVariableAccess().getMagnitudAssignment_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2303:1: ( rule__PesoMagnitudVariable__MagnitudAssignment_0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2303:2: rule__PesoMagnitudVariable__MagnitudAssignment_0 { pushFollow(FOLLOW_rule__PesoMagnitudVariable__MagnitudAssignment_0_in_rule__PesoMagnitudVariable__Group__0__Impl4802); rule__PesoMagnitudVariable__MagnitudAssignment_0(); state._fsp--; } after(grammarAccess.getPesoMagnitudVariableAccess().getMagnitudAssignment_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudVariable__Group__0__Impl" // $ANTLR start "rule__PesoMagnitudVariable__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2313:1: rule__PesoMagnitudVariable__Group__1 : rule__PesoMagnitudVariable__Group__1__Impl ; public final void rule__PesoMagnitudVariable__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2317:1: ( rule__PesoMagnitudVariable__Group__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2318:2: rule__PesoMagnitudVariable__Group__1__Impl { pushFollow(FOLLOW_rule__PesoMagnitudVariable__Group__1__Impl_in_rule__PesoMagnitudVariable__Group__14832); rule__PesoMagnitudVariable__Group__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudVariable__Group__1" // $ANTLR start "rule__PesoMagnitudVariable__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2324:1: rule__PesoMagnitudVariable__Group__1__Impl : ( ( rule__PesoMagnitudVariable__MedidaPesoAssignment_1 ) ) ; public final void rule__PesoMagnitudVariable__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2328:1: ( ( ( rule__PesoMagnitudVariable__MedidaPesoAssignment_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2329:1: ( ( rule__PesoMagnitudVariable__MedidaPesoAssignment_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2329:1: ( ( rule__PesoMagnitudVariable__MedidaPesoAssignment_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2330:1: ( rule__PesoMagnitudVariable__MedidaPesoAssignment_1 ) { before(grammarAccess.getPesoMagnitudVariableAccess().getMedidaPesoAssignment_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2331:1: ( rule__PesoMagnitudVariable__MedidaPesoAssignment_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2331:2: rule__PesoMagnitudVariable__MedidaPesoAssignment_1 { pushFollow(FOLLOW_rule__PesoMagnitudVariable__MedidaPesoAssignment_1_in_rule__PesoMagnitudVariable__Group__1__Impl4859); rule__PesoMagnitudVariable__MedidaPesoAssignment_1(); state._fsp--; } after(grammarAccess.getPesoMagnitudVariableAccess().getMedidaPesoAssignment_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudVariable__Group__1__Impl" // $ANTLR start "rule__PesoMagnitudFija__Group_1__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2345:1: rule__PesoMagnitudFija__Group_1__0 : rule__PesoMagnitudFija__Group_1__0__Impl rule__PesoMagnitudFija__Group_1__1 ; public final void rule__PesoMagnitudFija__Group_1__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2349:1: ( rule__PesoMagnitudFija__Group_1__0__Impl rule__PesoMagnitudFija__Group_1__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2350:2: rule__PesoMagnitudFija__Group_1__0__Impl rule__PesoMagnitudFija__Group_1__1 { pushFollow(FOLLOW_rule__PesoMagnitudFija__Group_1__0__Impl_in_rule__PesoMagnitudFija__Group_1__04893); rule__PesoMagnitudFija__Group_1__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__PesoMagnitudFija__Group_1__1_in_rule__PesoMagnitudFija__Group_1__04896); rule__PesoMagnitudFija__Group_1__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudFija__Group_1__0" // $ANTLR start "rule__PesoMagnitudFija__Group_1__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2357:1: rule__PesoMagnitudFija__Group_1__0__Impl : ( ( 'un' )? ) ; public final void rule__PesoMagnitudFija__Group_1__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2361:1: ( ( ( 'un' )? ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2362:1: ( ( 'un' )? ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2362:1: ( ( 'un' )? ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2363:1: ( 'un' )? { before(grammarAccess.getPesoMagnitudFijaAccess().getUnKeyword_1_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2364:1: ( 'un' )? int alt21=2; int LA21_0 = input.LA(1); if ( (LA21_0==44) ) { alt21=1; } switch (alt21) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2365:2: 'un' { match(input,44,FOLLOW_44_in_rule__PesoMagnitudFija__Group_1__0__Impl4925); } break; } after(grammarAccess.getPesoMagnitudFijaAccess().getUnKeyword_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudFija__Group_1__0__Impl" // $ANTLR start "rule__PesoMagnitudFija__Group_1__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2376:1: rule__PesoMagnitudFija__Group_1__1 : rule__PesoMagnitudFija__Group_1__1__Impl ; public final void rule__PesoMagnitudFija__Group_1__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2380:1: ( rule__PesoMagnitudFija__Group_1__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2381:2: rule__PesoMagnitudFija__Group_1__1__Impl { pushFollow(FOLLOW_rule__PesoMagnitudFija__Group_1__1__Impl_in_rule__PesoMagnitudFija__Group_1__14958); rule__PesoMagnitudFija__Group_1__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudFija__Group_1__1" // $ANTLR start "rule__PesoMagnitudFija__Group_1__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2387:1: rule__PesoMagnitudFija__Group_1__1__Impl : ( ( rule__PesoMagnitudFija__PesaCuartoKiloAssignment_1_1 ) ) ; public final void rule__PesoMagnitudFija__Group_1__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2391:1: ( ( ( rule__PesoMagnitudFija__PesaCuartoKiloAssignment_1_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2392:1: ( ( rule__PesoMagnitudFija__PesaCuartoKiloAssignment_1_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2392:1: ( ( rule__PesoMagnitudFija__PesaCuartoKiloAssignment_1_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2393:1: ( rule__PesoMagnitudFija__PesaCuartoKiloAssignment_1_1 ) { before(grammarAccess.getPesoMagnitudFijaAccess().getPesaCuartoKiloAssignment_1_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2394:1: ( rule__PesoMagnitudFija__PesaCuartoKiloAssignment_1_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2394:2: rule__PesoMagnitudFija__PesaCuartoKiloAssignment_1_1 { pushFollow(FOLLOW_rule__PesoMagnitudFija__PesaCuartoKiloAssignment_1_1_in_rule__PesoMagnitudFija__Group_1__1__Impl4985); rule__PesoMagnitudFija__PesaCuartoKiloAssignment_1_1(); state._fsp--; } after(grammarAccess.getPesoMagnitudFijaAccess().getPesaCuartoKiloAssignment_1_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudFija__Group_1__1__Impl" // $ANTLR start "rule__PesoMagnitudFija__Group_2__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2408:1: rule__PesoMagnitudFija__Group_2__0 : rule__PesoMagnitudFija__Group_2__0__Impl rule__PesoMagnitudFija__Group_2__1 ; public final void rule__PesoMagnitudFija__Group_2__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2412:1: ( rule__PesoMagnitudFija__Group_2__0__Impl rule__PesoMagnitudFija__Group_2__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2413:2: rule__PesoMagnitudFija__Group_2__0__Impl rule__PesoMagnitudFija__Group_2__1 { pushFollow(FOLLOW_rule__PesoMagnitudFija__Group_2__0__Impl_in_rule__PesoMagnitudFija__Group_2__05019); rule__PesoMagnitudFija__Group_2__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__PesoMagnitudFija__Group_2__1_in_rule__PesoMagnitudFija__Group_2__05022); rule__PesoMagnitudFija__Group_2__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudFija__Group_2__0" // $ANTLR start "rule__PesoMagnitudFija__Group_2__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2420:1: rule__PesoMagnitudFija__Group_2__0__Impl : ( ( 'un' )? ) ; public final void rule__PesoMagnitudFija__Group_2__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2424:1: ( ( ( 'un' )? ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2425:1: ( ( 'un' )? ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2425:1: ( ( 'un' )? ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2426:1: ( 'un' )? { before(grammarAccess.getPesoMagnitudFijaAccess().getUnKeyword_2_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2427:1: ( 'un' )? int alt22=2; int LA22_0 = input.LA(1); if ( (LA22_0==44) ) { alt22=1; } switch (alt22) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2428:2: 'un' { match(input,44,FOLLOW_44_in_rule__PesoMagnitudFija__Group_2__0__Impl5051); } break; } after(grammarAccess.getPesoMagnitudFijaAccess().getUnKeyword_2_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudFija__Group_2__0__Impl" // $ANTLR start "rule__PesoMagnitudFija__Group_2__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2439:1: rule__PesoMagnitudFija__Group_2__1 : rule__PesoMagnitudFija__Group_2__1__Impl ; public final void rule__PesoMagnitudFija__Group_2__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2443:1: ( rule__PesoMagnitudFija__Group_2__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2444:2: rule__PesoMagnitudFija__Group_2__1__Impl { pushFollow(FOLLOW_rule__PesoMagnitudFija__Group_2__1__Impl_in_rule__PesoMagnitudFija__Group_2__15084); rule__PesoMagnitudFija__Group_2__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudFija__Group_2__1" // $ANTLR start "rule__PesoMagnitudFija__Group_2__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2450:1: rule__PesoMagnitudFija__Group_2__1__Impl : ( ( rule__PesoMagnitudFija__PesaUnKiloAssignment_2_1 ) ) ; public final void rule__PesoMagnitudFija__Group_2__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2454:1: ( ( ( rule__PesoMagnitudFija__PesaUnKiloAssignment_2_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2455:1: ( ( rule__PesoMagnitudFija__PesaUnKiloAssignment_2_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2455:1: ( ( rule__PesoMagnitudFija__PesaUnKiloAssignment_2_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2456:1: ( rule__PesoMagnitudFija__PesaUnKiloAssignment_2_1 ) { before(grammarAccess.getPesoMagnitudFijaAccess().getPesaUnKiloAssignment_2_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2457:1: ( rule__PesoMagnitudFija__PesaUnKiloAssignment_2_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2457:2: rule__PesoMagnitudFija__PesaUnKiloAssignment_2_1 { pushFollow(FOLLOW_rule__PesoMagnitudFija__PesaUnKiloAssignment_2_1_in_rule__PesoMagnitudFija__Group_2__1__Impl5111); rule__PesoMagnitudFija__PesaUnKiloAssignment_2_1(); state._fsp--; } after(grammarAccess.getPesoMagnitudFijaAccess().getPesaUnKiloAssignment_2_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudFija__Group_2__1__Impl" // $ANTLR start "rule__MedidaPesoGramos__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2471:1: rule__MedidaPesoGramos__Group__0 : rule__MedidaPesoGramos__Group__0__Impl rule__MedidaPesoGramos__Group__1 ; public final void rule__MedidaPesoGramos__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2475:1: ( rule__MedidaPesoGramos__Group__0__Impl rule__MedidaPesoGramos__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2476:2: rule__MedidaPesoGramos__Group__0__Impl rule__MedidaPesoGramos__Group__1 { pushFollow(FOLLOW_rule__MedidaPesoGramos__Group__0__Impl_in_rule__MedidaPesoGramos__Group__05145); rule__MedidaPesoGramos__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__MedidaPesoGramos__Group__1_in_rule__MedidaPesoGramos__Group__05148); rule__MedidaPesoGramos__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__MedidaPesoGramos__Group__0" // $ANTLR start "rule__MedidaPesoGramos__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2483:1: rule__MedidaPesoGramos__Group__0__Impl : ( () ) ; public final void rule__MedidaPesoGramos__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2487:1: ( ( () ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2488:1: ( () ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2488:1: ( () ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2489:1: () { before(grammarAccess.getMedidaPesoGramosAccess().getMedidaPesoGramosAction_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2490:1: () // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2492:1: { } after(grammarAccess.getMedidaPesoGramosAccess().getMedidaPesoGramosAction_0()); } } } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__MedidaPesoGramos__Group__0__Impl" // $ANTLR start "rule__MedidaPesoGramos__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2502:1: rule__MedidaPesoGramos__Group__1 : rule__MedidaPesoGramos__Group__1__Impl ; public final void rule__MedidaPesoGramos__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2506:1: ( rule__MedidaPesoGramos__Group__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2507:2: rule__MedidaPesoGramos__Group__1__Impl { pushFollow(FOLLOW_rule__MedidaPesoGramos__Group__1__Impl_in_rule__MedidaPesoGramos__Group__15206); rule__MedidaPesoGramos__Group__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__MedidaPesoGramos__Group__1" // $ANTLR start "rule__MedidaPesoGramos__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2513:1: rule__MedidaPesoGramos__Group__1__Impl : ( 'gramos' ) ; public final void rule__MedidaPesoGramos__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2517:1: ( ( 'gramos' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2518:1: ( 'gramos' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2518:1: ( 'gramos' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2519:1: 'gramos' { before(grammarAccess.getMedidaPesoGramosAccess().getGramosKeyword_1()); match(input,45,FOLLOW_45_in_rule__MedidaPesoGramos__Group__1__Impl5234); after(grammarAccess.getMedidaPesoGramosAccess().getGramosKeyword_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__MedidaPesoGramos__Group__1__Impl" // $ANTLR start "rule__MedidaPesoKilo__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2536:1: rule__MedidaPesoKilo__Group__0 : rule__MedidaPesoKilo__Group__0__Impl rule__MedidaPesoKilo__Group__1 ; public final void rule__MedidaPesoKilo__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2540:1: ( rule__MedidaPesoKilo__Group__0__Impl rule__MedidaPesoKilo__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2541:2: rule__MedidaPesoKilo__Group__0__Impl rule__MedidaPesoKilo__Group__1 { pushFollow(FOLLOW_rule__MedidaPesoKilo__Group__0__Impl_in_rule__MedidaPesoKilo__Group__05269); rule__MedidaPesoKilo__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__MedidaPesoKilo__Group__1_in_rule__MedidaPesoKilo__Group__05272); rule__MedidaPesoKilo__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__MedidaPesoKilo__Group__0" // $ANTLR start "rule__MedidaPesoKilo__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2548:1: rule__MedidaPesoKilo__Group__0__Impl : ( () ) ; public final void rule__MedidaPesoKilo__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2552:1: ( ( () ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2553:1: ( () ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2553:1: ( () ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2554:1: () { before(grammarAccess.getMedidaPesoKiloAccess().getMedidaPesoKiloAction_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2555:1: () // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2557:1: { } after(grammarAccess.getMedidaPesoKiloAccess().getMedidaPesoKiloAction_0()); } } } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__MedidaPesoKilo__Group__0__Impl" // $ANTLR start "rule__MedidaPesoKilo__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2567:1: rule__MedidaPesoKilo__Group__1 : rule__MedidaPesoKilo__Group__1__Impl ; public final void rule__MedidaPesoKilo__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2571:1: ( rule__MedidaPesoKilo__Group__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2572:2: rule__MedidaPesoKilo__Group__1__Impl { pushFollow(FOLLOW_rule__MedidaPesoKilo__Group__1__Impl_in_rule__MedidaPesoKilo__Group__15330); rule__MedidaPesoKilo__Group__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__MedidaPesoKilo__Group__1" // $ANTLR start "rule__MedidaPesoKilo__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2578:1: rule__MedidaPesoKilo__Group__1__Impl : ( ( rule__MedidaPesoKilo__Alternatives_1 ) ) ; public final void rule__MedidaPesoKilo__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2582:1: ( ( ( rule__MedidaPesoKilo__Alternatives_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2583:1: ( ( rule__MedidaPesoKilo__Alternatives_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2583:1: ( ( rule__MedidaPesoKilo__Alternatives_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2584:1: ( rule__MedidaPesoKilo__Alternatives_1 ) { before(grammarAccess.getMedidaPesoKiloAccess().getAlternatives_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2585:1: ( rule__MedidaPesoKilo__Alternatives_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2585:2: rule__MedidaPesoKilo__Alternatives_1 { pushFollow(FOLLOW_rule__MedidaPesoKilo__Alternatives_1_in_rule__MedidaPesoKilo__Group__1__Impl5357); rule__MedidaPesoKilo__Alternatives_1(); state._fsp--; } after(grammarAccess.getMedidaPesoKiloAccess().getAlternatives_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__MedidaPesoKilo__Group__1__Impl" // $ANTLR start "rule__Venta__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2599:1: rule__Venta__Group__0 : rule__Venta__Group__0__Impl rule__Venta__Group__1 ; public final void rule__Venta__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2603:1: ( rule__Venta__Group__0__Impl rule__Venta__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2604:2: rule__Venta__Group__0__Impl rule__Venta__Group__1 { pushFollow(FOLLOW_rule__Venta__Group__0__Impl_in_rule__Venta__Group__05391); rule__Venta__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Venta__Group__1_in_rule__Venta__Group__05394); rule__Venta__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group__0" // $ANTLR start "rule__Venta__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2611:1: rule__Venta__Group__0__Impl : ( ( rule__Venta__CompradorAssignment_0 ) ) ; public final void rule__Venta__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2615:1: ( ( ( rule__Venta__CompradorAssignment_0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2616:1: ( ( rule__Venta__CompradorAssignment_0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2616:1: ( ( rule__Venta__CompradorAssignment_0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2617:1: ( rule__Venta__CompradorAssignment_0 ) { before(grammarAccess.getVentaAccess().getCompradorAssignment_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2618:1: ( rule__Venta__CompradorAssignment_0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2618:2: rule__Venta__CompradorAssignment_0 { pushFollow(FOLLOW_rule__Venta__CompradorAssignment_0_in_rule__Venta__Group__0__Impl5421); rule__Venta__CompradorAssignment_0(); state._fsp--; } after(grammarAccess.getVentaAccess().getCompradorAssignment_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group__0__Impl" // $ANTLR start "rule__Venta__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2628:1: rule__Venta__Group__1 : rule__Venta__Group__1__Impl rule__Venta__Group__2 ; public final void rule__Venta__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2632:1: ( rule__Venta__Group__1__Impl rule__Venta__Group__2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2633:2: rule__Venta__Group__1__Impl rule__Venta__Group__2 { pushFollow(FOLLOW_rule__Venta__Group__1__Impl_in_rule__Venta__Group__15451); rule__Venta__Group__1__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Venta__Group__2_in_rule__Venta__Group__15454); rule__Venta__Group__2(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group__1" // $ANTLR start "rule__Venta__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2640:1: rule__Venta__Group__1__Impl : ( 'compra' ) ; public final void rule__Venta__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2644:1: ( ( 'compra' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2645:1: ( 'compra' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2645:1: ( 'compra' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2646:1: 'compra' { before(grammarAccess.getVentaAccess().getCompraKeyword_1()); match(input,46,FOLLOW_46_in_rule__Venta__Group__1__Impl5482); after(grammarAccess.getVentaAccess().getCompraKeyword_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group__1__Impl" // $ANTLR start "rule__Venta__Group__2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2659:1: rule__Venta__Group__2 : rule__Venta__Group__2__Impl rule__Venta__Group__3 ; public final void rule__Venta__Group__2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2663:1: ( rule__Venta__Group__2__Impl rule__Venta__Group__3 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2664:2: rule__Venta__Group__2__Impl rule__Venta__Group__3 { pushFollow(FOLLOW_rule__Venta__Group__2__Impl_in_rule__Venta__Group__25513); rule__Venta__Group__2__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Venta__Group__3_in_rule__Venta__Group__25516); rule__Venta__Group__3(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group__2" // $ANTLR start "rule__Venta__Group__2__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2671:1: rule__Venta__Group__2__Impl : ( ( rule__Venta__ItemsVendidosAssignment_2 ) ) ; public final void rule__Venta__Group__2__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2675:1: ( ( ( rule__Venta__ItemsVendidosAssignment_2 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2676:1: ( ( rule__Venta__ItemsVendidosAssignment_2 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2676:1: ( ( rule__Venta__ItemsVendidosAssignment_2 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2677:1: ( rule__Venta__ItemsVendidosAssignment_2 ) { before(grammarAccess.getVentaAccess().getItemsVendidosAssignment_2()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2678:1: ( rule__Venta__ItemsVendidosAssignment_2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2678:2: rule__Venta__ItemsVendidosAssignment_2 { pushFollow(FOLLOW_rule__Venta__ItemsVendidosAssignment_2_in_rule__Venta__Group__2__Impl5543); rule__Venta__ItemsVendidosAssignment_2(); state._fsp--; } after(grammarAccess.getVentaAccess().getItemsVendidosAssignment_2()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group__2__Impl" // $ANTLR start "rule__Venta__Group__3" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2688:1: rule__Venta__Group__3 : rule__Venta__Group__3__Impl rule__Venta__Group__4 ; public final void rule__Venta__Group__3() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2692:1: ( rule__Venta__Group__3__Impl rule__Venta__Group__4 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2693:2: rule__Venta__Group__3__Impl rule__Venta__Group__4 { pushFollow(FOLLOW_rule__Venta__Group__3__Impl_in_rule__Venta__Group__35573); rule__Venta__Group__3__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Venta__Group__4_in_rule__Venta__Group__35576); rule__Venta__Group__4(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group__3" // $ANTLR start "rule__Venta__Group__3__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2700:1: rule__Venta__Group__3__Impl : ( ( rule__Venta__Group_3__0 )* ) ; public final void rule__Venta__Group__3__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2704:1: ( ( ( rule__Venta__Group_3__0 )* ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2705:1: ( ( rule__Venta__Group_3__0 )* ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2705:1: ( ( rule__Venta__Group_3__0 )* ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2706:1: ( rule__Venta__Group_3__0 )* { before(grammarAccess.getVentaAccess().getGroup_3()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2707:1: ( rule__Venta__Group_3__0 )* loop23: do { int alt23=2; int LA23_0 = input.LA(1); if ( (LA23_0==47) ) { int LA23_1 = input.LA(2); if ( (LA23_1==RULE_INT||LA23_1==19||LA23_1==44||(LA23_1>=60 && LA23_1<=61)) ) { alt23=1; } } switch (alt23) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2707:2: rule__Venta__Group_3__0 { pushFollow(FOLLOW_rule__Venta__Group_3__0_in_rule__Venta__Group__3__Impl5603); rule__Venta__Group_3__0(); state._fsp--; } break; default : break loop23; } } while (true); after(grammarAccess.getVentaAccess().getGroup_3()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group__3__Impl" // $ANTLR start "rule__Venta__Group__4" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2717:1: rule__Venta__Group__4 : rule__Venta__Group__4__Impl rule__Venta__Group__5 ; public final void rule__Venta__Group__4() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2721:1: ( rule__Venta__Group__4__Impl rule__Venta__Group__5 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2722:2: rule__Venta__Group__4__Impl rule__Venta__Group__5 { pushFollow(FOLLOW_rule__Venta__Group__4__Impl_in_rule__Venta__Group__45634); rule__Venta__Group__4__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Venta__Group__5_in_rule__Venta__Group__45637); rule__Venta__Group__5(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group__4" // $ANTLR start "rule__Venta__Group__4__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2729:1: rule__Venta__Group__4__Impl : ( ( rule__Venta__Group_4__0 )? ) ; public final void rule__Venta__Group__4__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2733:1: ( ( ( rule__Venta__Group_4__0 )? ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2734:1: ( ( rule__Venta__Group_4__0 )? ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2734:1: ( ( rule__Venta__Group_4__0 )? ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2735:1: ( rule__Venta__Group_4__0 )? { before(grammarAccess.getVentaAccess().getGroup_4()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2736:1: ( rule__Venta__Group_4__0 )? int alt24=2; int LA24_0 = input.LA(1); if ( (LA24_0==47) ) { int LA24_1 = input.LA(2); if ( (LA24_1==48) ) { alt24=1; } } switch (alt24) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2736:2: rule__Venta__Group_4__0 { pushFollow(FOLLOW_rule__Venta__Group_4__0_in_rule__Venta__Group__4__Impl5664); rule__Venta__Group_4__0(); state._fsp--; } break; } after(grammarAccess.getVentaAccess().getGroup_4()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group__4__Impl" // $ANTLR start "rule__Venta__Group__5" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2746:1: rule__Venta__Group__5 : rule__Venta__Group__5__Impl rule__Venta__Group__6 ; public final void rule__Venta__Group__5() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2750:1: ( rule__Venta__Group__5__Impl rule__Venta__Group__6 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2751:2: rule__Venta__Group__5__Impl rule__Venta__Group__6 { pushFollow(FOLLOW_rule__Venta__Group__5__Impl_in_rule__Venta__Group__55695); rule__Venta__Group__5__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Venta__Group__6_in_rule__Venta__Group__55698); rule__Venta__Group__6(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group__5" // $ANTLR start "rule__Venta__Group__5__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2758:1: rule__Venta__Group__5__Impl : ( ( rule__Venta__Group_5__0 )? ) ; public final void rule__Venta__Group__5__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2762:1: ( ( ( rule__Venta__Group_5__0 )? ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2763:1: ( ( rule__Venta__Group_5__0 )? ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2763:1: ( ( rule__Venta__Group_5__0 )? ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2764:1: ( rule__Venta__Group_5__0 )? { before(grammarAccess.getVentaAccess().getGroup_5()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2765:1: ( rule__Venta__Group_5__0 )? int alt25=2; int LA25_0 = input.LA(1); if ( (LA25_0==47) ) { int LA25_1 = input.LA(2); if ( (LA25_1==49) ) { alt25=1; } } switch (alt25) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2765:2: rule__Venta__Group_5__0 { pushFollow(FOLLOW_rule__Venta__Group_5__0_in_rule__Venta__Group__5__Impl5725); rule__Venta__Group_5__0(); state._fsp--; } break; } after(grammarAccess.getVentaAccess().getGroup_5()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group__5__Impl" // $ANTLR start "rule__Venta__Group__6" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2775:1: rule__Venta__Group__6 : rule__Venta__Group__6__Impl rule__Venta__Group__7 ; public final void rule__Venta__Group__6() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2779:1: ( rule__Venta__Group__6__Impl rule__Venta__Group__7 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2780:2: rule__Venta__Group__6__Impl rule__Venta__Group__7 { pushFollow(FOLLOW_rule__Venta__Group__6__Impl_in_rule__Venta__Group__65756); rule__Venta__Group__6__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Venta__Group__7_in_rule__Venta__Group__65759); rule__Venta__Group__7(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group__6" // $ANTLR start "rule__Venta__Group__6__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2787:1: rule__Venta__Group__6__Impl : ( ( rule__Venta__Group_6__0 )? ) ; public final void rule__Venta__Group__6__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2791:1: ( ( ( rule__Venta__Group_6__0 )? ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2792:1: ( ( rule__Venta__Group_6__0 )? ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2792:1: ( ( rule__Venta__Group_6__0 )? ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2793:1: ( rule__Venta__Group_6__0 )? { before(grammarAccess.getVentaAccess().getGroup_6()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2794:1: ( rule__Venta__Group_6__0 )? int alt26=2; int LA26_0 = input.LA(1); if ( (LA26_0==47) ) { alt26=1; } switch (alt26) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2794:2: rule__Venta__Group_6__0 { pushFollow(FOLLOW_rule__Venta__Group_6__0_in_rule__Venta__Group__6__Impl5786); rule__Venta__Group_6__0(); state._fsp--; } break; } after(grammarAccess.getVentaAccess().getGroup_6()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group__6__Impl" // $ANTLR start "rule__Venta__Group__7" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2804:1: rule__Venta__Group__7 : rule__Venta__Group__7__Impl ; public final void rule__Venta__Group__7() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2808:1: ( rule__Venta__Group__7__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2809:2: rule__Venta__Group__7__Impl { pushFollow(FOLLOW_rule__Venta__Group__7__Impl_in_rule__Venta__Group__75817); rule__Venta__Group__7__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group__7" // $ANTLR start "rule__Venta__Group__7__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2815:1: rule__Venta__Group__7__Impl : ( '.' ) ; public final void rule__Venta__Group__7__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2819:1: ( ( '.' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2820:1: ( '.' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2820:1: ( '.' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2821:1: '.' { before(grammarAccess.getVentaAccess().getFullStopKeyword_7()); match(input,42,FOLLOW_42_in_rule__Venta__Group__7__Impl5845); after(grammarAccess.getVentaAccess().getFullStopKeyword_7()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group__7__Impl" // $ANTLR start "rule__Venta__Group_3__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2850:1: rule__Venta__Group_3__0 : rule__Venta__Group_3__0__Impl rule__Venta__Group_3__1 ; public final void rule__Venta__Group_3__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2854:1: ( rule__Venta__Group_3__0__Impl rule__Venta__Group_3__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2855:2: rule__Venta__Group_3__0__Impl rule__Venta__Group_3__1 { pushFollow(FOLLOW_rule__Venta__Group_3__0__Impl_in_rule__Venta__Group_3__05892); rule__Venta__Group_3__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Venta__Group_3__1_in_rule__Venta__Group_3__05895); rule__Venta__Group_3__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_3__0" // $ANTLR start "rule__Venta__Group_3__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2862:1: rule__Venta__Group_3__0__Impl : ( ',' ) ; public final void rule__Venta__Group_3__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2866:1: ( ( ',' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2867:1: ( ',' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2867:1: ( ',' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2868:1: ',' { before(grammarAccess.getVentaAccess().getCommaKeyword_3_0()); match(input,47,FOLLOW_47_in_rule__Venta__Group_3__0__Impl5923); after(grammarAccess.getVentaAccess().getCommaKeyword_3_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_3__0__Impl" // $ANTLR start "rule__Venta__Group_3__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2881:1: rule__Venta__Group_3__1 : rule__Venta__Group_3__1__Impl ; public final void rule__Venta__Group_3__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2885:1: ( rule__Venta__Group_3__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2886:2: rule__Venta__Group_3__1__Impl { pushFollow(FOLLOW_rule__Venta__Group_3__1__Impl_in_rule__Venta__Group_3__15954); rule__Venta__Group_3__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_3__1" // $ANTLR start "rule__Venta__Group_3__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2892:1: rule__Venta__Group_3__1__Impl : ( ( rule__Venta__ItemsVendidosAssignment_3_1 ) ) ; public final void rule__Venta__Group_3__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2896:1: ( ( ( rule__Venta__ItemsVendidosAssignment_3_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2897:1: ( ( rule__Venta__ItemsVendidosAssignment_3_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2897:1: ( ( rule__Venta__ItemsVendidosAssignment_3_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2898:1: ( rule__Venta__ItemsVendidosAssignment_3_1 ) { before(grammarAccess.getVentaAccess().getItemsVendidosAssignment_3_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2899:1: ( rule__Venta__ItemsVendidosAssignment_3_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2899:2: rule__Venta__ItemsVendidosAssignment_3_1 { pushFollow(FOLLOW_rule__Venta__ItemsVendidosAssignment_3_1_in_rule__Venta__Group_3__1__Impl5981); rule__Venta__ItemsVendidosAssignment_3_1(); state._fsp--; } after(grammarAccess.getVentaAccess().getItemsVendidosAssignment_3_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_3__1__Impl" // $ANTLR start "rule__Venta__Group_4__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2913:1: rule__Venta__Group_4__0 : rule__Venta__Group_4__0__Impl rule__Venta__Group_4__1 ; public final void rule__Venta__Group_4__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2917:1: ( rule__Venta__Group_4__0__Impl rule__Venta__Group_4__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2918:2: rule__Venta__Group_4__0__Impl rule__Venta__Group_4__1 { pushFollow(FOLLOW_rule__Venta__Group_4__0__Impl_in_rule__Venta__Group_4__06015); rule__Venta__Group_4__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Venta__Group_4__1_in_rule__Venta__Group_4__06018); rule__Venta__Group_4__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_4__0" // $ANTLR start "rule__Venta__Group_4__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2925:1: rule__Venta__Group_4__0__Impl : ( ',' ) ; public final void rule__Venta__Group_4__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2929:1: ( ( ',' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2930:1: ( ',' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2930:1: ( ',' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2931:1: ',' { before(grammarAccess.getVentaAccess().getCommaKeyword_4_0()); match(input,47,FOLLOW_47_in_rule__Venta__Group_4__0__Impl6046); after(grammarAccess.getVentaAccess().getCommaKeyword_4_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_4__0__Impl" // $ANTLR start "rule__Venta__Group_4__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2944:1: rule__Venta__Group_4__1 : rule__Venta__Group_4__1__Impl ; public final void rule__Venta__Group_4__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2948:1: ( rule__Venta__Group_4__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2949:2: rule__Venta__Group_4__1__Impl { pushFollow(FOLLOW_rule__Venta__Group_4__1__Impl_in_rule__Venta__Group_4__16077); rule__Venta__Group_4__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_4__1" // $ANTLR start "rule__Venta__Group_4__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2955:1: rule__Venta__Group_4__1__Impl : ( ( rule__Venta__Group_4_1__0 ) ) ; public final void rule__Venta__Group_4__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2959:1: ( ( ( rule__Venta__Group_4_1__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2960:1: ( ( rule__Venta__Group_4_1__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2960:1: ( ( rule__Venta__Group_4_1__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2961:1: ( rule__Venta__Group_4_1__0 ) { before(grammarAccess.getVentaAccess().getGroup_4_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2962:1: ( rule__Venta__Group_4_1__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2962:2: rule__Venta__Group_4_1__0 { pushFollow(FOLLOW_rule__Venta__Group_4_1__0_in_rule__Venta__Group_4__1__Impl6104); rule__Venta__Group_4_1__0(); state._fsp--; } after(grammarAccess.getVentaAccess().getGroup_4_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_4__1__Impl" // $ANTLR start "rule__Venta__Group_4_1__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2976:1: rule__Venta__Group_4_1__0 : rule__Venta__Group_4_1__0__Impl rule__Venta__Group_4_1__1 ; public final void rule__Venta__Group_4_1__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2980:1: ( rule__Venta__Group_4_1__0__Impl rule__Venta__Group_4_1__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2981:2: rule__Venta__Group_4_1__0__Impl rule__Venta__Group_4_1__1 { pushFollow(FOLLOW_rule__Venta__Group_4_1__0__Impl_in_rule__Venta__Group_4_1__06138); rule__Venta__Group_4_1__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Venta__Group_4_1__1_in_rule__Venta__Group_4_1__06141); rule__Venta__Group_4_1__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_4_1__0" // $ANTLR start "rule__Venta__Group_4_1__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2988:1: rule__Venta__Group_4_1__0__Impl : ( 'se redondea a' ) ; public final void rule__Venta__Group_4_1__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2992:1: ( ( 'se redondea a' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2993:1: ( 'se redondea a' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2993:1: ( 'se redondea a' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:2994:1: 'se redondea a' { before(grammarAccess.getVentaAccess().getSeRedondeaAKeyword_4_1_0()); match(input,48,FOLLOW_48_in_rule__Venta__Group_4_1__0__Impl6169); after(grammarAccess.getVentaAccess().getSeRedondeaAKeyword_4_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_4_1__0__Impl" // $ANTLR start "rule__Venta__Group_4_1__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3007:1: rule__Venta__Group_4_1__1 : rule__Venta__Group_4_1__1__Impl ; public final void rule__Venta__Group_4_1__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3011:1: ( rule__Venta__Group_4_1__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3012:2: rule__Venta__Group_4_1__1__Impl { pushFollow(FOLLOW_rule__Venta__Group_4_1__1__Impl_in_rule__Venta__Group_4_1__16200); rule__Venta__Group_4_1__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_4_1__1" // $ANTLR start "rule__Venta__Group_4_1__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3018:1: rule__Venta__Group_4_1__1__Impl : ( ( rule__Venta__TotalRedondeadoAssignment_4_1_1 ) ) ; public final void rule__Venta__Group_4_1__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3022:1: ( ( ( rule__Venta__TotalRedondeadoAssignment_4_1_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3023:1: ( ( rule__Venta__TotalRedondeadoAssignment_4_1_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3023:1: ( ( rule__Venta__TotalRedondeadoAssignment_4_1_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3024:1: ( rule__Venta__TotalRedondeadoAssignment_4_1_1 ) { before(grammarAccess.getVentaAccess().getTotalRedondeadoAssignment_4_1_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3025:1: ( rule__Venta__TotalRedondeadoAssignment_4_1_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3025:2: rule__Venta__TotalRedondeadoAssignment_4_1_1 { pushFollow(FOLLOW_rule__Venta__TotalRedondeadoAssignment_4_1_1_in_rule__Venta__Group_4_1__1__Impl6227); rule__Venta__TotalRedondeadoAssignment_4_1_1(); state._fsp--; } after(grammarAccess.getVentaAccess().getTotalRedondeadoAssignment_4_1_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_4_1__1__Impl" // $ANTLR start "rule__Venta__Group_5__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3039:1: rule__Venta__Group_5__0 : rule__Venta__Group_5__0__Impl rule__Venta__Group_5__1 ; public final void rule__Venta__Group_5__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3043:1: ( rule__Venta__Group_5__0__Impl rule__Venta__Group_5__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3044:2: rule__Venta__Group_5__0__Impl rule__Venta__Group_5__1 { pushFollow(FOLLOW_rule__Venta__Group_5__0__Impl_in_rule__Venta__Group_5__06261); rule__Venta__Group_5__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Venta__Group_5__1_in_rule__Venta__Group_5__06264); rule__Venta__Group_5__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_5__0" // $ANTLR start "rule__Venta__Group_5__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3051:1: rule__Venta__Group_5__0__Impl : ( ',' ) ; public final void rule__Venta__Group_5__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3055:1: ( ( ',' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3056:1: ( ',' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3056:1: ( ',' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3057:1: ',' { before(grammarAccess.getVentaAccess().getCommaKeyword_5_0()); match(input,47,FOLLOW_47_in_rule__Venta__Group_5__0__Impl6292); after(grammarAccess.getVentaAccess().getCommaKeyword_5_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_5__0__Impl" // $ANTLR start "rule__Venta__Group_5__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3070:1: rule__Venta__Group_5__1 : rule__Venta__Group_5__1__Impl ; public final void rule__Venta__Group_5__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3074:1: ( rule__Venta__Group_5__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3075:2: rule__Venta__Group_5__1__Impl { pushFollow(FOLLOW_rule__Venta__Group_5__1__Impl_in_rule__Venta__Group_5__16323); rule__Venta__Group_5__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_5__1" // $ANTLR start "rule__Venta__Group_5__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3081:1: rule__Venta__Group_5__1__Impl : ( ( rule__Venta__Group_5_1__0 ) ) ; public final void rule__Venta__Group_5__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3085:1: ( ( ( rule__Venta__Group_5_1__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3086:1: ( ( rule__Venta__Group_5_1__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3086:1: ( ( rule__Venta__Group_5_1__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3087:1: ( rule__Venta__Group_5_1__0 ) { before(grammarAccess.getVentaAccess().getGroup_5_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3088:1: ( rule__Venta__Group_5_1__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3088:2: rule__Venta__Group_5_1__0 { pushFollow(FOLLOW_rule__Venta__Group_5_1__0_in_rule__Venta__Group_5__1__Impl6350); rule__Venta__Group_5_1__0(); state._fsp--; } after(grammarAccess.getVentaAccess().getGroup_5_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_5__1__Impl" // $ANTLR start "rule__Venta__Group_5_1__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3102:1: rule__Venta__Group_5_1__0 : rule__Venta__Group_5_1__0__Impl rule__Venta__Group_5_1__1 ; public final void rule__Venta__Group_5_1__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3106:1: ( rule__Venta__Group_5_1__0__Impl rule__Venta__Group_5_1__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3107:2: rule__Venta__Group_5_1__0__Impl rule__Venta__Group_5_1__1 { pushFollow(FOLLOW_rule__Venta__Group_5_1__0__Impl_in_rule__Venta__Group_5_1__06384); rule__Venta__Group_5_1__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Venta__Group_5_1__1_in_rule__Venta__Group_5_1__06387); rule__Venta__Group_5_1__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_5_1__0" // $ANTLR start "rule__Venta__Group_5_1__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3114:1: rule__Venta__Group_5_1__0__Impl : ( 'paga' ) ; public final void rule__Venta__Group_5_1__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3118:1: ( ( 'paga' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3119:1: ( 'paga' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3119:1: ( 'paga' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3120:1: 'paga' { before(grammarAccess.getVentaAccess().getPagaKeyword_5_1_0()); match(input,49,FOLLOW_49_in_rule__Venta__Group_5_1__0__Impl6415); after(grammarAccess.getVentaAccess().getPagaKeyword_5_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_5_1__0__Impl" // $ANTLR start "rule__Venta__Group_5_1__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3133:1: rule__Venta__Group_5_1__1 : rule__Venta__Group_5_1__1__Impl ; public final void rule__Venta__Group_5_1__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3137:1: ( rule__Venta__Group_5_1__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3138:2: rule__Venta__Group_5_1__1__Impl { pushFollow(FOLLOW_rule__Venta__Group_5_1__1__Impl_in_rule__Venta__Group_5_1__16446); rule__Venta__Group_5_1__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_5_1__1" // $ANTLR start "rule__Venta__Group_5_1__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3144:1: rule__Venta__Group_5_1__1__Impl : ( ( rule__Venta__TotalPagadoAssignment_5_1_1 ) ) ; public final void rule__Venta__Group_5_1__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3148:1: ( ( ( rule__Venta__TotalPagadoAssignment_5_1_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3149:1: ( ( rule__Venta__TotalPagadoAssignment_5_1_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3149:1: ( ( rule__Venta__TotalPagadoAssignment_5_1_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3150:1: ( rule__Venta__TotalPagadoAssignment_5_1_1 ) { before(grammarAccess.getVentaAccess().getTotalPagadoAssignment_5_1_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3151:1: ( rule__Venta__TotalPagadoAssignment_5_1_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3151:2: rule__Venta__TotalPagadoAssignment_5_1_1 { pushFollow(FOLLOW_rule__Venta__TotalPagadoAssignment_5_1_1_in_rule__Venta__Group_5_1__1__Impl6473); rule__Venta__TotalPagadoAssignment_5_1_1(); state._fsp--; } after(grammarAccess.getVentaAccess().getTotalPagadoAssignment_5_1_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_5_1__1__Impl" // $ANTLR start "rule__Venta__Group_6__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3165:1: rule__Venta__Group_6__0 : rule__Venta__Group_6__0__Impl rule__Venta__Group_6__1 ; public final void rule__Venta__Group_6__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3169:1: ( rule__Venta__Group_6__0__Impl rule__Venta__Group_6__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3170:2: rule__Venta__Group_6__0__Impl rule__Venta__Group_6__1 { pushFollow(FOLLOW_rule__Venta__Group_6__0__Impl_in_rule__Venta__Group_6__06507); rule__Venta__Group_6__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Venta__Group_6__1_in_rule__Venta__Group_6__06510); rule__Venta__Group_6__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_6__0" // $ANTLR start "rule__Venta__Group_6__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3177:1: rule__Venta__Group_6__0__Impl : ( ',' ) ; public final void rule__Venta__Group_6__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3181:1: ( ( ',' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3182:1: ( ',' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3182:1: ( ',' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3183:1: ',' { before(grammarAccess.getVentaAccess().getCommaKeyword_6_0()); match(input,47,FOLLOW_47_in_rule__Venta__Group_6__0__Impl6538); after(grammarAccess.getVentaAccess().getCommaKeyword_6_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_6__0__Impl" // $ANTLR start "rule__Venta__Group_6__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3196:1: rule__Venta__Group_6__1 : rule__Venta__Group_6__1__Impl ; public final void rule__Venta__Group_6__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3200:1: ( rule__Venta__Group_6__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3201:2: rule__Venta__Group_6__1__Impl { pushFollow(FOLLOW_rule__Venta__Group_6__1__Impl_in_rule__Venta__Group_6__16569); rule__Venta__Group_6__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_6__1" // $ANTLR start "rule__Venta__Group_6__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3207:1: rule__Venta__Group_6__1__Impl : ( ( rule__Venta__Group_6_1__0 ) ) ; public final void rule__Venta__Group_6__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3211:1: ( ( ( rule__Venta__Group_6_1__0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3212:1: ( ( rule__Venta__Group_6_1__0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3212:1: ( ( rule__Venta__Group_6_1__0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3213:1: ( rule__Venta__Group_6_1__0 ) { before(grammarAccess.getVentaAccess().getGroup_6_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3214:1: ( rule__Venta__Group_6_1__0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3214:2: rule__Venta__Group_6_1__0 { pushFollow(FOLLOW_rule__Venta__Group_6_1__0_in_rule__Venta__Group_6__1__Impl6596); rule__Venta__Group_6_1__0(); state._fsp--; } after(grammarAccess.getVentaAccess().getGroup_6_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_6__1__Impl" // $ANTLR start "rule__Venta__Group_6_1__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3228:1: rule__Venta__Group_6_1__0 : rule__Venta__Group_6_1__0__Impl rule__Venta__Group_6_1__1 ; public final void rule__Venta__Group_6_1__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3232:1: ( rule__Venta__Group_6_1__0__Impl rule__Venta__Group_6_1__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3233:2: rule__Venta__Group_6_1__0__Impl rule__Venta__Group_6_1__1 { pushFollow(FOLLOW_rule__Venta__Group_6_1__0__Impl_in_rule__Venta__Group_6_1__06630); rule__Venta__Group_6_1__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Venta__Group_6_1__1_in_rule__Venta__Group_6_1__06633); rule__Venta__Group_6_1__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_6_1__0" // $ANTLR start "rule__Venta__Group_6_1__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3240:1: rule__Venta__Group_6_1__0__Impl : ( 'queda debiendo' ) ; public final void rule__Venta__Group_6_1__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3244:1: ( ( 'queda debiendo' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3245:1: ( 'queda debiendo' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3245:1: ( 'queda debiendo' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3246:1: 'queda debiendo' { before(grammarAccess.getVentaAccess().getQuedaDebiendoKeyword_6_1_0()); match(input,50,FOLLOW_50_in_rule__Venta__Group_6_1__0__Impl6661); after(grammarAccess.getVentaAccess().getQuedaDebiendoKeyword_6_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_6_1__0__Impl" // $ANTLR start "rule__Venta__Group_6_1__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3259:1: rule__Venta__Group_6_1__1 : rule__Venta__Group_6_1__1__Impl ; public final void rule__Venta__Group_6_1__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3263:1: ( rule__Venta__Group_6_1__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3264:2: rule__Venta__Group_6_1__1__Impl { pushFollow(FOLLOW_rule__Venta__Group_6_1__1__Impl_in_rule__Venta__Group_6_1__16692); rule__Venta__Group_6_1__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_6_1__1" // $ANTLR start "rule__Venta__Group_6_1__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3270:1: rule__Venta__Group_6_1__1__Impl : ( ( rule__Venta__TotalDebiendoAssignment_6_1_1 ) ) ; public final void rule__Venta__Group_6_1__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3274:1: ( ( ( rule__Venta__TotalDebiendoAssignment_6_1_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3275:1: ( ( rule__Venta__TotalDebiendoAssignment_6_1_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3275:1: ( ( rule__Venta__TotalDebiendoAssignment_6_1_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3276:1: ( rule__Venta__TotalDebiendoAssignment_6_1_1 ) { before(grammarAccess.getVentaAccess().getTotalDebiendoAssignment_6_1_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3277:1: ( rule__Venta__TotalDebiendoAssignment_6_1_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3277:2: rule__Venta__TotalDebiendoAssignment_6_1_1 { pushFollow(FOLLOW_rule__Venta__TotalDebiendoAssignment_6_1_1_in_rule__Venta__Group_6_1__1__Impl6719); rule__Venta__TotalDebiendoAssignment_6_1_1(); state._fsp--; } after(grammarAccess.getVentaAccess().getTotalDebiendoAssignment_6_1_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__Group_6_1__1__Impl" // $ANTLR start "rule__ItemVenta__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3291:1: rule__ItemVenta__Group__0 : rule__ItemVenta__Group__0__Impl rule__ItemVenta__Group__1 ; public final void rule__ItemVenta__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3295:1: ( rule__ItemVenta__Group__0__Impl rule__ItemVenta__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3296:2: rule__ItemVenta__Group__0__Impl rule__ItemVenta__Group__1 { pushFollow(FOLLOW_rule__ItemVenta__Group__0__Impl_in_rule__ItemVenta__Group__06753); rule__ItemVenta__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__ItemVenta__Group__1_in_rule__ItemVenta__Group__06756); rule__ItemVenta__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ItemVenta__Group__0" // $ANTLR start "rule__ItemVenta__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3303:1: rule__ItemVenta__Group__0__Impl : ( ( rule__ItemVenta__CantidadAssignment_0 ) ) ; public final void rule__ItemVenta__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3307:1: ( ( ( rule__ItemVenta__CantidadAssignment_0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3308:1: ( ( rule__ItemVenta__CantidadAssignment_0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3308:1: ( ( rule__ItemVenta__CantidadAssignment_0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3309:1: ( rule__ItemVenta__CantidadAssignment_0 ) { before(grammarAccess.getItemVentaAccess().getCantidadAssignment_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3310:1: ( rule__ItemVenta__CantidadAssignment_0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3310:2: rule__ItemVenta__CantidadAssignment_0 { pushFollow(FOLLOW_rule__ItemVenta__CantidadAssignment_0_in_rule__ItemVenta__Group__0__Impl6783); rule__ItemVenta__CantidadAssignment_0(); state._fsp--; } after(grammarAccess.getItemVentaAccess().getCantidadAssignment_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ItemVenta__Group__0__Impl" // $ANTLR start "rule__ItemVenta__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3320:1: rule__ItemVenta__Group__1 : rule__ItemVenta__Group__1__Impl rule__ItemVenta__Group__2 ; public final void rule__ItemVenta__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3324:1: ( rule__ItemVenta__Group__1__Impl rule__ItemVenta__Group__2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3325:2: rule__ItemVenta__Group__1__Impl rule__ItemVenta__Group__2 { pushFollow(FOLLOW_rule__ItemVenta__Group__1__Impl_in_rule__ItemVenta__Group__16813); rule__ItemVenta__Group__1__Impl(); state._fsp--; pushFollow(FOLLOW_rule__ItemVenta__Group__2_in_rule__ItemVenta__Group__16816); rule__ItemVenta__Group__2(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ItemVenta__Group__1" // $ANTLR start "rule__ItemVenta__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3332:1: rule__ItemVenta__Group__1__Impl : ( 'de' ) ; public final void rule__ItemVenta__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3336:1: ( ( 'de' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3337:1: ( 'de' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3337:1: ( 'de' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3338:1: 'de' { before(grammarAccess.getItemVentaAccess().getDeKeyword_1()); match(input,51,FOLLOW_51_in_rule__ItemVenta__Group__1__Impl6844); after(grammarAccess.getItemVentaAccess().getDeKeyword_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ItemVenta__Group__1__Impl" // $ANTLR start "rule__ItemVenta__Group__2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3351:1: rule__ItemVenta__Group__2 : rule__ItemVenta__Group__2__Impl ; public final void rule__ItemVenta__Group__2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3355:1: ( rule__ItemVenta__Group__2__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3356:2: rule__ItemVenta__Group__2__Impl { pushFollow(FOLLOW_rule__ItemVenta__Group__2__Impl_in_rule__ItemVenta__Group__26875); rule__ItemVenta__Group__2__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ItemVenta__Group__2" // $ANTLR start "rule__ItemVenta__Group__2__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3362:1: rule__ItemVenta__Group__2__Impl : ( ( rule__ItemVenta__ProductoAssignment_2 ) ) ; public final void rule__ItemVenta__Group__2__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3366:1: ( ( ( rule__ItemVenta__ProductoAssignment_2 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3367:1: ( ( rule__ItemVenta__ProductoAssignment_2 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3367:1: ( ( rule__ItemVenta__ProductoAssignment_2 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3368:1: ( rule__ItemVenta__ProductoAssignment_2 ) { before(grammarAccess.getItemVentaAccess().getProductoAssignment_2()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3369:1: ( rule__ItemVenta__ProductoAssignment_2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3369:2: rule__ItemVenta__ProductoAssignment_2 { pushFollow(FOLLOW_rule__ItemVenta__ProductoAssignment_2_in_rule__ItemVenta__Group__2__Impl6902); rule__ItemVenta__ProductoAssignment_2(); state._fsp--; } after(grammarAccess.getItemVentaAccess().getProductoAssignment_2()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ItemVenta__Group__2__Impl" // $ANTLR start "rule__Tarea__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3385:1: rule__Tarea__Group__0 : rule__Tarea__Group__0__Impl rule__Tarea__Group__1 ; public final void rule__Tarea__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3389:1: ( rule__Tarea__Group__0__Impl rule__Tarea__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3390:2: rule__Tarea__Group__0__Impl rule__Tarea__Group__1 { pushFollow(FOLLOW_rule__Tarea__Group__0__Impl_in_rule__Tarea__Group__06938); rule__Tarea__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Tarea__Group__1_in_rule__Tarea__Group__06941); rule__Tarea__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Tarea__Group__0" // $ANTLR start "rule__Tarea__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3397:1: rule__Tarea__Group__0__Impl : ( 'A las' ) ; public final void rule__Tarea__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3401:1: ( ( 'A las' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3402:1: ( 'A las' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3402:1: ( 'A las' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3403:1: 'A las' { before(grammarAccess.getTareaAccess().getALasKeyword_0()); match(input,52,FOLLOW_52_in_rule__Tarea__Group__0__Impl6969); after(grammarAccess.getTareaAccess().getALasKeyword_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Tarea__Group__0__Impl" // $ANTLR start "rule__Tarea__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3416:1: rule__Tarea__Group__1 : rule__Tarea__Group__1__Impl rule__Tarea__Group__2 ; public final void rule__Tarea__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3420:1: ( rule__Tarea__Group__1__Impl rule__Tarea__Group__2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3421:2: rule__Tarea__Group__1__Impl rule__Tarea__Group__2 { pushFollow(FOLLOW_rule__Tarea__Group__1__Impl_in_rule__Tarea__Group__17000); rule__Tarea__Group__1__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Tarea__Group__2_in_rule__Tarea__Group__17003); rule__Tarea__Group__2(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Tarea__Group__1" // $ANTLR start "rule__Tarea__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3428:1: rule__Tarea__Group__1__Impl : ( ( rule__Tarea__HorarioAssignment_1 ) ) ; public final void rule__Tarea__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3432:1: ( ( ( rule__Tarea__HorarioAssignment_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3433:1: ( ( rule__Tarea__HorarioAssignment_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3433:1: ( ( rule__Tarea__HorarioAssignment_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3434:1: ( rule__Tarea__HorarioAssignment_1 ) { before(grammarAccess.getTareaAccess().getHorarioAssignment_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3435:1: ( rule__Tarea__HorarioAssignment_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3435:2: rule__Tarea__HorarioAssignment_1 { pushFollow(FOLLOW_rule__Tarea__HorarioAssignment_1_in_rule__Tarea__Group__1__Impl7030); rule__Tarea__HorarioAssignment_1(); state._fsp--; } after(grammarAccess.getTareaAccess().getHorarioAssignment_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Tarea__Group__1__Impl" // $ANTLR start "rule__Tarea__Group__2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3445:1: rule__Tarea__Group__2 : rule__Tarea__Group__2__Impl rule__Tarea__Group__3 ; public final void rule__Tarea__Group__2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3449:1: ( rule__Tarea__Group__2__Impl rule__Tarea__Group__3 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3450:2: rule__Tarea__Group__2__Impl rule__Tarea__Group__3 { pushFollow(FOLLOW_rule__Tarea__Group__2__Impl_in_rule__Tarea__Group__27060); rule__Tarea__Group__2__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Tarea__Group__3_in_rule__Tarea__Group__27063); rule__Tarea__Group__3(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Tarea__Group__2" // $ANTLR start "rule__Tarea__Group__2__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3457:1: rule__Tarea__Group__2__Impl : ( ( rule__Tarea__TareaRealizadaAssignment_2 ) ) ; public final void rule__Tarea__Group__2__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3461:1: ( ( ( rule__Tarea__TareaRealizadaAssignment_2 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3462:1: ( ( rule__Tarea__TareaRealizadaAssignment_2 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3462:1: ( ( rule__Tarea__TareaRealizadaAssignment_2 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3463:1: ( rule__Tarea__TareaRealizadaAssignment_2 ) { before(grammarAccess.getTareaAccess().getTareaRealizadaAssignment_2()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3464:1: ( rule__Tarea__TareaRealizadaAssignment_2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3464:2: rule__Tarea__TareaRealizadaAssignment_2 { pushFollow(FOLLOW_rule__Tarea__TareaRealizadaAssignment_2_in_rule__Tarea__Group__2__Impl7090); rule__Tarea__TareaRealizadaAssignment_2(); state._fsp--; } after(grammarAccess.getTareaAccess().getTareaRealizadaAssignment_2()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Tarea__Group__2__Impl" // $ANTLR start "rule__Tarea__Group__3" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3474:1: rule__Tarea__Group__3 : rule__Tarea__Group__3__Impl rule__Tarea__Group__4 ; public final void rule__Tarea__Group__3() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3478:1: ( rule__Tarea__Group__3__Impl rule__Tarea__Group__4 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3479:2: rule__Tarea__Group__3__Impl rule__Tarea__Group__4 { pushFollow(FOLLOW_rule__Tarea__Group__3__Impl_in_rule__Tarea__Group__37120); rule__Tarea__Group__3__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Tarea__Group__4_in_rule__Tarea__Group__37123); rule__Tarea__Group__4(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Tarea__Group__3" // $ANTLR start "rule__Tarea__Group__3__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3486:1: rule__Tarea__Group__3__Impl : ( ', tomo' ) ; public final void rule__Tarea__Group__3__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3490:1: ( ( ', tomo' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3491:1: ( ', tomo' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3491:1: ( ', tomo' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3492:1: ', tomo' { before(grammarAccess.getTareaAccess().getTomoKeyword_3()); match(input,53,FOLLOW_53_in_rule__Tarea__Group__3__Impl7151); after(grammarAccess.getTareaAccess().getTomoKeyword_3()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Tarea__Group__3__Impl" // $ANTLR start "rule__Tarea__Group__4" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3505:1: rule__Tarea__Group__4 : rule__Tarea__Group__4__Impl rule__Tarea__Group__5 ; public final void rule__Tarea__Group__4() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3509:1: ( rule__Tarea__Group__4__Impl rule__Tarea__Group__5 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3510:2: rule__Tarea__Group__4__Impl rule__Tarea__Group__5 { pushFollow(FOLLOW_rule__Tarea__Group__4__Impl_in_rule__Tarea__Group__47182); rule__Tarea__Group__4__Impl(); state._fsp--; pushFollow(FOLLOW_rule__Tarea__Group__5_in_rule__Tarea__Group__47185); rule__Tarea__Group__5(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Tarea__Group__4" // $ANTLR start "rule__Tarea__Group__4__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3517:1: rule__Tarea__Group__4__Impl : ( ( rule__Tarea__DuracionAssignment_4 ) ) ; public final void rule__Tarea__Group__4__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3521:1: ( ( ( rule__Tarea__DuracionAssignment_4 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3522:1: ( ( rule__Tarea__DuracionAssignment_4 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3522:1: ( ( rule__Tarea__DuracionAssignment_4 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3523:1: ( rule__Tarea__DuracionAssignment_4 ) { before(grammarAccess.getTareaAccess().getDuracionAssignment_4()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3524:1: ( rule__Tarea__DuracionAssignment_4 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3524:2: rule__Tarea__DuracionAssignment_4 { pushFollow(FOLLOW_rule__Tarea__DuracionAssignment_4_in_rule__Tarea__Group__4__Impl7212); rule__Tarea__DuracionAssignment_4(); state._fsp--; } after(grammarAccess.getTareaAccess().getDuracionAssignment_4()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Tarea__Group__4__Impl" // $ANTLR start "rule__Tarea__Group__5" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3534:1: rule__Tarea__Group__5 : rule__Tarea__Group__5__Impl ; public final void rule__Tarea__Group__5() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3538:1: ( rule__Tarea__Group__5__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3539:2: rule__Tarea__Group__5__Impl { pushFollow(FOLLOW_rule__Tarea__Group__5__Impl_in_rule__Tarea__Group__57242); rule__Tarea__Group__5__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Tarea__Group__5" // $ANTLR start "rule__Tarea__Group__5__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3545:1: rule__Tarea__Group__5__Impl : ( '.' ) ; public final void rule__Tarea__Group__5__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3549:1: ( ( '.' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3550:1: ( '.' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3550:1: ( '.' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3551:1: '.' { before(grammarAccess.getTareaAccess().getFullStopKeyword_5()); match(input,42,FOLLOW_42_in_rule__Tarea__Group__5__Impl7270); after(grammarAccess.getTareaAccess().getFullStopKeyword_5()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Tarea__Group__5__Impl" // $ANTLR start "rule__HorarioLiteral__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3576:1: rule__HorarioLiteral__Group__0 : rule__HorarioLiteral__Group__0__Impl rule__HorarioLiteral__Group__1 ; public final void rule__HorarioLiteral__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3580:1: ( rule__HorarioLiteral__Group__0__Impl rule__HorarioLiteral__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3581:2: rule__HorarioLiteral__Group__0__Impl rule__HorarioLiteral__Group__1 { pushFollow(FOLLOW_rule__HorarioLiteral__Group__0__Impl_in_rule__HorarioLiteral__Group__07313); rule__HorarioLiteral__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__HorarioLiteral__Group__1_in_rule__HorarioLiteral__Group__07316); rule__HorarioLiteral__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioLiteral__Group__0" // $ANTLR start "rule__HorarioLiteral__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3588:1: rule__HorarioLiteral__Group__0__Impl : ( ( rule__HorarioLiteral__HoraAssignment_0 ) ) ; public final void rule__HorarioLiteral__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3592:1: ( ( ( rule__HorarioLiteral__HoraAssignment_0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3593:1: ( ( rule__HorarioLiteral__HoraAssignment_0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3593:1: ( ( rule__HorarioLiteral__HoraAssignment_0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3594:1: ( rule__HorarioLiteral__HoraAssignment_0 ) { before(grammarAccess.getHorarioLiteralAccess().getHoraAssignment_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3595:1: ( rule__HorarioLiteral__HoraAssignment_0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3595:2: rule__HorarioLiteral__HoraAssignment_0 { pushFollow(FOLLOW_rule__HorarioLiteral__HoraAssignment_0_in_rule__HorarioLiteral__Group__0__Impl7343); rule__HorarioLiteral__HoraAssignment_0(); state._fsp--; } after(grammarAccess.getHorarioLiteralAccess().getHoraAssignment_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioLiteral__Group__0__Impl" // $ANTLR start "rule__HorarioLiteral__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3605:1: rule__HorarioLiteral__Group__1 : rule__HorarioLiteral__Group__1__Impl ; public final void rule__HorarioLiteral__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3609:1: ( rule__HorarioLiteral__Group__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3610:2: rule__HorarioLiteral__Group__1__Impl { pushFollow(FOLLOW_rule__HorarioLiteral__Group__1__Impl_in_rule__HorarioLiteral__Group__17373); rule__HorarioLiteral__Group__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioLiteral__Group__1" // $ANTLR start "rule__HorarioLiteral__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3616:1: rule__HorarioLiteral__Group__1__Impl : ( ( rule__HorarioLiteral__Group_1__0 )? ) ; public final void rule__HorarioLiteral__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3620:1: ( ( ( rule__HorarioLiteral__Group_1__0 )? ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3621:1: ( ( rule__HorarioLiteral__Group_1__0 )? ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3621:1: ( ( rule__HorarioLiteral__Group_1__0 )? ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3622:1: ( rule__HorarioLiteral__Group_1__0 )? { before(grammarAccess.getHorarioLiteralAccess().getGroup_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3623:1: ( rule__HorarioLiteral__Group_1__0 )? int alt27=2; int LA27_0 = input.LA(1); if ( (LA27_0==54) ) { alt27=1; } switch (alt27) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3623:2: rule__HorarioLiteral__Group_1__0 { pushFollow(FOLLOW_rule__HorarioLiteral__Group_1__0_in_rule__HorarioLiteral__Group__1__Impl7400); rule__HorarioLiteral__Group_1__0(); state._fsp--; } break; } after(grammarAccess.getHorarioLiteralAccess().getGroup_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioLiteral__Group__1__Impl" // $ANTLR start "rule__HorarioLiteral__Group_1__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3637:1: rule__HorarioLiteral__Group_1__0 : rule__HorarioLiteral__Group_1__0__Impl rule__HorarioLiteral__Group_1__1 ; public final void rule__HorarioLiteral__Group_1__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3641:1: ( rule__HorarioLiteral__Group_1__0__Impl rule__HorarioLiteral__Group_1__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3642:2: rule__HorarioLiteral__Group_1__0__Impl rule__HorarioLiteral__Group_1__1 { pushFollow(FOLLOW_rule__HorarioLiteral__Group_1__0__Impl_in_rule__HorarioLiteral__Group_1__07435); rule__HorarioLiteral__Group_1__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__HorarioLiteral__Group_1__1_in_rule__HorarioLiteral__Group_1__07438); rule__HorarioLiteral__Group_1__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioLiteral__Group_1__0" // $ANTLR start "rule__HorarioLiteral__Group_1__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3649:1: rule__HorarioLiteral__Group_1__0__Impl : ( 'y' ) ; public final void rule__HorarioLiteral__Group_1__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3653:1: ( ( 'y' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3654:1: ( 'y' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3654:1: ( 'y' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3655:1: 'y' { before(grammarAccess.getHorarioLiteralAccess().getYKeyword_1_0()); match(input,54,FOLLOW_54_in_rule__HorarioLiteral__Group_1__0__Impl7466); after(grammarAccess.getHorarioLiteralAccess().getYKeyword_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioLiteral__Group_1__0__Impl" // $ANTLR start "rule__HorarioLiteral__Group_1__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3668:1: rule__HorarioLiteral__Group_1__1 : rule__HorarioLiteral__Group_1__1__Impl ; public final void rule__HorarioLiteral__Group_1__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3672:1: ( rule__HorarioLiteral__Group_1__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3673:2: rule__HorarioLiteral__Group_1__1__Impl { pushFollow(FOLLOW_rule__HorarioLiteral__Group_1__1__Impl_in_rule__HorarioLiteral__Group_1__17497); rule__HorarioLiteral__Group_1__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioLiteral__Group_1__1" // $ANTLR start "rule__HorarioLiteral__Group_1__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3679:1: rule__HorarioLiteral__Group_1__1__Impl : ( ( rule__HorarioLiteral__FraccionHoraAssignment_1_1 ) ) ; public final void rule__HorarioLiteral__Group_1__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3683:1: ( ( ( rule__HorarioLiteral__FraccionHoraAssignment_1_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3684:1: ( ( rule__HorarioLiteral__FraccionHoraAssignment_1_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3684:1: ( ( rule__HorarioLiteral__FraccionHoraAssignment_1_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3685:1: ( rule__HorarioLiteral__FraccionHoraAssignment_1_1 ) { before(grammarAccess.getHorarioLiteralAccess().getFraccionHoraAssignment_1_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3686:1: ( rule__HorarioLiteral__FraccionHoraAssignment_1_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3686:2: rule__HorarioLiteral__FraccionHoraAssignment_1_1 { pushFollow(FOLLOW_rule__HorarioLiteral__FraccionHoraAssignment_1_1_in_rule__HorarioLiteral__Group_1__1__Impl7524); rule__HorarioLiteral__FraccionHoraAssignment_1_1(); state._fsp--; } after(grammarAccess.getHorarioLiteralAccess().getFraccionHoraAssignment_1_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioLiteral__Group_1__1__Impl" // $ANTLR start "rule__HorarioNumerico__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3700:1: rule__HorarioNumerico__Group__0 : rule__HorarioNumerico__Group__0__Impl rule__HorarioNumerico__Group__1 ; public final void rule__HorarioNumerico__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3704:1: ( rule__HorarioNumerico__Group__0__Impl rule__HorarioNumerico__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3705:2: rule__HorarioNumerico__Group__0__Impl rule__HorarioNumerico__Group__1 { pushFollow(FOLLOW_rule__HorarioNumerico__Group__0__Impl_in_rule__HorarioNumerico__Group__07558); rule__HorarioNumerico__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__HorarioNumerico__Group__1_in_rule__HorarioNumerico__Group__07561); rule__HorarioNumerico__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioNumerico__Group__0" // $ANTLR start "rule__HorarioNumerico__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3712:1: rule__HorarioNumerico__Group__0__Impl : ( ( rule__HorarioNumerico__HoraAssignment_0 ) ) ; public final void rule__HorarioNumerico__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3716:1: ( ( ( rule__HorarioNumerico__HoraAssignment_0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3717:1: ( ( rule__HorarioNumerico__HoraAssignment_0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3717:1: ( ( rule__HorarioNumerico__HoraAssignment_0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3718:1: ( rule__HorarioNumerico__HoraAssignment_0 ) { before(grammarAccess.getHorarioNumericoAccess().getHoraAssignment_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3719:1: ( rule__HorarioNumerico__HoraAssignment_0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3719:2: rule__HorarioNumerico__HoraAssignment_0 { pushFollow(FOLLOW_rule__HorarioNumerico__HoraAssignment_0_in_rule__HorarioNumerico__Group__0__Impl7588); rule__HorarioNumerico__HoraAssignment_0(); state._fsp--; } after(grammarAccess.getHorarioNumericoAccess().getHoraAssignment_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioNumerico__Group__0__Impl" // $ANTLR start "rule__HorarioNumerico__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3729:1: rule__HorarioNumerico__Group__1 : rule__HorarioNumerico__Group__1__Impl ; public final void rule__HorarioNumerico__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3733:1: ( rule__HorarioNumerico__Group__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3734:2: rule__HorarioNumerico__Group__1__Impl { pushFollow(FOLLOW_rule__HorarioNumerico__Group__1__Impl_in_rule__HorarioNumerico__Group__17618); rule__HorarioNumerico__Group__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioNumerico__Group__1" // $ANTLR start "rule__HorarioNumerico__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3740:1: rule__HorarioNumerico__Group__1__Impl : ( ( rule__HorarioNumerico__Group_1__0 )? ) ; public final void rule__HorarioNumerico__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3744:1: ( ( ( rule__HorarioNumerico__Group_1__0 )? ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3745:1: ( ( rule__HorarioNumerico__Group_1__0 )? ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3745:1: ( ( rule__HorarioNumerico__Group_1__0 )? ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3746:1: ( rule__HorarioNumerico__Group_1__0 )? { before(grammarAccess.getHorarioNumericoAccess().getGroup_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3747:1: ( rule__HorarioNumerico__Group_1__0 )? int alt28=2; int LA28_0 = input.LA(1); if ( (LA28_0==55) ) { alt28=1; } switch (alt28) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3747:2: rule__HorarioNumerico__Group_1__0 { pushFollow(FOLLOW_rule__HorarioNumerico__Group_1__0_in_rule__HorarioNumerico__Group__1__Impl7645); rule__HorarioNumerico__Group_1__0(); state._fsp--; } break; } after(grammarAccess.getHorarioNumericoAccess().getGroup_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioNumerico__Group__1__Impl" // $ANTLR start "rule__HorarioNumerico__Group_1__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3761:1: rule__HorarioNumerico__Group_1__0 : rule__HorarioNumerico__Group_1__0__Impl rule__HorarioNumerico__Group_1__1 ; public final void rule__HorarioNumerico__Group_1__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3765:1: ( rule__HorarioNumerico__Group_1__0__Impl rule__HorarioNumerico__Group_1__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3766:2: rule__HorarioNumerico__Group_1__0__Impl rule__HorarioNumerico__Group_1__1 { pushFollow(FOLLOW_rule__HorarioNumerico__Group_1__0__Impl_in_rule__HorarioNumerico__Group_1__07680); rule__HorarioNumerico__Group_1__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__HorarioNumerico__Group_1__1_in_rule__HorarioNumerico__Group_1__07683); rule__HorarioNumerico__Group_1__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioNumerico__Group_1__0" // $ANTLR start "rule__HorarioNumerico__Group_1__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3773:1: rule__HorarioNumerico__Group_1__0__Impl : ( ':' ) ; public final void rule__HorarioNumerico__Group_1__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3777:1: ( ( ':' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3778:1: ( ':' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3778:1: ( ':' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3779:1: ':' { before(grammarAccess.getHorarioNumericoAccess().getColonKeyword_1_0()); match(input,55,FOLLOW_55_in_rule__HorarioNumerico__Group_1__0__Impl7711); after(grammarAccess.getHorarioNumericoAccess().getColonKeyword_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioNumerico__Group_1__0__Impl" // $ANTLR start "rule__HorarioNumerico__Group_1__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3792:1: rule__HorarioNumerico__Group_1__1 : rule__HorarioNumerico__Group_1__1__Impl ; public final void rule__HorarioNumerico__Group_1__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3796:1: ( rule__HorarioNumerico__Group_1__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3797:2: rule__HorarioNumerico__Group_1__1__Impl { pushFollow(FOLLOW_rule__HorarioNumerico__Group_1__1__Impl_in_rule__HorarioNumerico__Group_1__17742); rule__HorarioNumerico__Group_1__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioNumerico__Group_1__1" // $ANTLR start "rule__HorarioNumerico__Group_1__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3803:1: rule__HorarioNumerico__Group_1__1__Impl : ( ( rule__HorarioNumerico__MinutosAssignment_1_1 ) ) ; public final void rule__HorarioNumerico__Group_1__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3807:1: ( ( ( rule__HorarioNumerico__MinutosAssignment_1_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3808:1: ( ( rule__HorarioNumerico__MinutosAssignment_1_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3808:1: ( ( rule__HorarioNumerico__MinutosAssignment_1_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3809:1: ( rule__HorarioNumerico__MinutosAssignment_1_1 ) { before(grammarAccess.getHorarioNumericoAccess().getMinutosAssignment_1_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3810:1: ( rule__HorarioNumerico__MinutosAssignment_1_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3810:2: rule__HorarioNumerico__MinutosAssignment_1_1 { pushFollow(FOLLOW_rule__HorarioNumerico__MinutosAssignment_1_1_in_rule__HorarioNumerico__Group_1__1__Impl7769); rule__HorarioNumerico__MinutosAssignment_1_1(); state._fsp--; } after(grammarAccess.getHorarioNumericoAccess().getMinutosAssignment_1_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioNumerico__Group_1__1__Impl" // $ANTLR start "rule__TareaRevisionProductos__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3824:1: rule__TareaRevisionProductos__Group__0 : rule__TareaRevisionProductos__Group__0__Impl rule__TareaRevisionProductos__Group__1 ; public final void rule__TareaRevisionProductos__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3828:1: ( rule__TareaRevisionProductos__Group__0__Impl rule__TareaRevisionProductos__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3829:2: rule__TareaRevisionProductos__Group__0__Impl rule__TareaRevisionProductos__Group__1 { pushFollow(FOLLOW_rule__TareaRevisionProductos__Group__0__Impl_in_rule__TareaRevisionProductos__Group__07803); rule__TareaRevisionProductos__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__TareaRevisionProductos__Group__1_in_rule__TareaRevisionProductos__Group__07806); rule__TareaRevisionProductos__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__Group__0" // $ANTLR start "rule__TareaRevisionProductos__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3836:1: rule__TareaRevisionProductos__Group__0__Impl : ( 'se revisaron' ) ; public final void rule__TareaRevisionProductos__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3840:1: ( ( 'se revisaron' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3841:1: ( 'se revisaron' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3841:1: ( 'se revisaron' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3842:1: 'se revisaron' { before(grammarAccess.getTareaRevisionProductosAccess().getSeRevisaronKeyword_0()); match(input,56,FOLLOW_56_in_rule__TareaRevisionProductos__Group__0__Impl7834); after(grammarAccess.getTareaRevisionProductosAccess().getSeRevisaronKeyword_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__Group__0__Impl" // $ANTLR start "rule__TareaRevisionProductos__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3855:1: rule__TareaRevisionProductos__Group__1 : rule__TareaRevisionProductos__Group__1__Impl rule__TareaRevisionProductos__Group__2 ; public final void rule__TareaRevisionProductos__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3859:1: ( rule__TareaRevisionProductos__Group__1__Impl rule__TareaRevisionProductos__Group__2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3860:2: rule__TareaRevisionProductos__Group__1__Impl rule__TareaRevisionProductos__Group__2 { pushFollow(FOLLOW_rule__TareaRevisionProductos__Group__1__Impl_in_rule__TareaRevisionProductos__Group__17865); rule__TareaRevisionProductos__Group__1__Impl(); state._fsp--; pushFollow(FOLLOW_rule__TareaRevisionProductos__Group__2_in_rule__TareaRevisionProductos__Group__17868); rule__TareaRevisionProductos__Group__2(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__Group__1" // $ANTLR start "rule__TareaRevisionProductos__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3867:1: rule__TareaRevisionProductos__Group__1__Impl : ( ( rule__TareaRevisionProductos__Group_1__0 )* ) ; public final void rule__TareaRevisionProductos__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3871:1: ( ( ( rule__TareaRevisionProductos__Group_1__0 )* ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3872:1: ( ( rule__TareaRevisionProductos__Group_1__0 )* ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3872:1: ( ( rule__TareaRevisionProductos__Group_1__0 )* ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3873:1: ( rule__TareaRevisionProductos__Group_1__0 )* { before(grammarAccess.getTareaRevisionProductosAccess().getGroup_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3874:1: ( rule__TareaRevisionProductos__Group_1__0 )* loop29: do { int alt29=2; int LA29_0 = input.LA(1); if ( (LA29_0==14) ) { int LA29_1 = input.LA(2); if ( (LA29_1==RULE_ID) ) { int LA29_3 = input.LA(3); if ( (LA29_3==47) ) { alt29=1; } } } else if ( (LA29_0==13) ) { int LA29_2 = input.LA(2); if ( (LA29_2==RULE_ID) ) { int LA29_3 = input.LA(3); if ( (LA29_3==47) ) { alt29=1; } } } switch (alt29) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3874:2: rule__TareaRevisionProductos__Group_1__0 { pushFollow(FOLLOW_rule__TareaRevisionProductos__Group_1__0_in_rule__TareaRevisionProductos__Group__1__Impl7895); rule__TareaRevisionProductos__Group_1__0(); state._fsp--; } break; default : break loop29; } } while (true); after(grammarAccess.getTareaRevisionProductosAccess().getGroup_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__Group__1__Impl" // $ANTLR start "rule__TareaRevisionProductos__Group__2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3884:1: rule__TareaRevisionProductos__Group__2 : rule__TareaRevisionProductos__Group__2__Impl rule__TareaRevisionProductos__Group__3 ; public final void rule__TareaRevisionProductos__Group__2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3888:1: ( rule__TareaRevisionProductos__Group__2__Impl rule__TareaRevisionProductos__Group__3 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3889:2: rule__TareaRevisionProductos__Group__2__Impl rule__TareaRevisionProductos__Group__3 { pushFollow(FOLLOW_rule__TareaRevisionProductos__Group__2__Impl_in_rule__TareaRevisionProductos__Group__27926); rule__TareaRevisionProductos__Group__2__Impl(); state._fsp--; pushFollow(FOLLOW_rule__TareaRevisionProductos__Group__3_in_rule__TareaRevisionProductos__Group__27929); rule__TareaRevisionProductos__Group__3(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__Group__2" // $ANTLR start "rule__TareaRevisionProductos__Group__2__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3896:1: rule__TareaRevisionProductos__Group__2__Impl : ( ( rule__TareaRevisionProductos__Alternatives_2 ) ) ; public final void rule__TareaRevisionProductos__Group__2__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3900:1: ( ( ( rule__TareaRevisionProductos__Alternatives_2 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3901:1: ( ( rule__TareaRevisionProductos__Alternatives_2 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3901:1: ( ( rule__TareaRevisionProductos__Alternatives_2 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3902:1: ( rule__TareaRevisionProductos__Alternatives_2 ) { before(grammarAccess.getTareaRevisionProductosAccess().getAlternatives_2()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3903:1: ( rule__TareaRevisionProductos__Alternatives_2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3903:2: rule__TareaRevisionProductos__Alternatives_2 { pushFollow(FOLLOW_rule__TareaRevisionProductos__Alternatives_2_in_rule__TareaRevisionProductos__Group__2__Impl7956); rule__TareaRevisionProductos__Alternatives_2(); state._fsp--; } after(grammarAccess.getTareaRevisionProductosAccess().getAlternatives_2()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__Group__2__Impl" // $ANTLR start "rule__TareaRevisionProductos__Group__3" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3913:1: rule__TareaRevisionProductos__Group__3 : rule__TareaRevisionProductos__Group__3__Impl ; public final void rule__TareaRevisionProductos__Group__3() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3917:1: ( rule__TareaRevisionProductos__Group__3__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3918:2: rule__TareaRevisionProductos__Group__3__Impl { pushFollow(FOLLOW_rule__TareaRevisionProductos__Group__3__Impl_in_rule__TareaRevisionProductos__Group__37986); rule__TareaRevisionProductos__Group__3__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__Group__3" // $ANTLR start "rule__TareaRevisionProductos__Group__3__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3924:1: rule__TareaRevisionProductos__Group__3__Impl : ( ( rule__TareaRevisionProductos__ProductosRevisadosAssignment_3 ) ) ; public final void rule__TareaRevisionProductos__Group__3__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3928:1: ( ( ( rule__TareaRevisionProductos__ProductosRevisadosAssignment_3 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3929:1: ( ( rule__TareaRevisionProductos__ProductosRevisadosAssignment_3 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3929:1: ( ( rule__TareaRevisionProductos__ProductosRevisadosAssignment_3 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3930:1: ( rule__TareaRevisionProductos__ProductosRevisadosAssignment_3 ) { before(grammarAccess.getTareaRevisionProductosAccess().getProductosRevisadosAssignment_3()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3931:1: ( rule__TareaRevisionProductos__ProductosRevisadosAssignment_3 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3931:2: rule__TareaRevisionProductos__ProductosRevisadosAssignment_3 { pushFollow(FOLLOW_rule__TareaRevisionProductos__ProductosRevisadosAssignment_3_in_rule__TareaRevisionProductos__Group__3__Impl8013); rule__TareaRevisionProductos__ProductosRevisadosAssignment_3(); state._fsp--; } after(grammarAccess.getTareaRevisionProductosAccess().getProductosRevisadosAssignment_3()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__Group__3__Impl" // $ANTLR start "rule__TareaRevisionProductos__Group_1__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3949:1: rule__TareaRevisionProductos__Group_1__0 : rule__TareaRevisionProductos__Group_1__0__Impl rule__TareaRevisionProductos__Group_1__1 ; public final void rule__TareaRevisionProductos__Group_1__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3953:1: ( rule__TareaRevisionProductos__Group_1__0__Impl rule__TareaRevisionProductos__Group_1__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3954:2: rule__TareaRevisionProductos__Group_1__0__Impl rule__TareaRevisionProductos__Group_1__1 { pushFollow(FOLLOW_rule__TareaRevisionProductos__Group_1__0__Impl_in_rule__TareaRevisionProductos__Group_1__08051); rule__TareaRevisionProductos__Group_1__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__TareaRevisionProductos__Group_1__1_in_rule__TareaRevisionProductos__Group_1__08054); rule__TareaRevisionProductos__Group_1__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__Group_1__0" // $ANTLR start "rule__TareaRevisionProductos__Group_1__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3961:1: rule__TareaRevisionProductos__Group_1__0__Impl : ( ( rule__TareaRevisionProductos__Alternatives_1_0 ) ) ; public final void rule__TareaRevisionProductos__Group_1__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3965:1: ( ( ( rule__TareaRevisionProductos__Alternatives_1_0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3966:1: ( ( rule__TareaRevisionProductos__Alternatives_1_0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3966:1: ( ( rule__TareaRevisionProductos__Alternatives_1_0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3967:1: ( rule__TareaRevisionProductos__Alternatives_1_0 ) { before(grammarAccess.getTareaRevisionProductosAccess().getAlternatives_1_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3968:1: ( rule__TareaRevisionProductos__Alternatives_1_0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3968:2: rule__TareaRevisionProductos__Alternatives_1_0 { pushFollow(FOLLOW_rule__TareaRevisionProductos__Alternatives_1_0_in_rule__TareaRevisionProductos__Group_1__0__Impl8081); rule__TareaRevisionProductos__Alternatives_1_0(); state._fsp--; } after(grammarAccess.getTareaRevisionProductosAccess().getAlternatives_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__Group_1__0__Impl" // $ANTLR start "rule__TareaRevisionProductos__Group_1__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3978:1: rule__TareaRevisionProductos__Group_1__1 : rule__TareaRevisionProductos__Group_1__1__Impl rule__TareaRevisionProductos__Group_1__2 ; public final void rule__TareaRevisionProductos__Group_1__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3982:1: ( rule__TareaRevisionProductos__Group_1__1__Impl rule__TareaRevisionProductos__Group_1__2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3983:2: rule__TareaRevisionProductos__Group_1__1__Impl rule__TareaRevisionProductos__Group_1__2 { pushFollow(FOLLOW_rule__TareaRevisionProductos__Group_1__1__Impl_in_rule__TareaRevisionProductos__Group_1__18111); rule__TareaRevisionProductos__Group_1__1__Impl(); state._fsp--; pushFollow(FOLLOW_rule__TareaRevisionProductos__Group_1__2_in_rule__TareaRevisionProductos__Group_1__18114); rule__TareaRevisionProductos__Group_1__2(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__Group_1__1" // $ANTLR start "rule__TareaRevisionProductos__Group_1__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3990:1: rule__TareaRevisionProductos__Group_1__1__Impl : ( ( rule__TareaRevisionProductos__ProductosRevisadosAssignment_1_1 ) ) ; public final void rule__TareaRevisionProductos__Group_1__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3994:1: ( ( ( rule__TareaRevisionProductos__ProductosRevisadosAssignment_1_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3995:1: ( ( rule__TareaRevisionProductos__ProductosRevisadosAssignment_1_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3995:1: ( ( rule__TareaRevisionProductos__ProductosRevisadosAssignment_1_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3996:1: ( rule__TareaRevisionProductos__ProductosRevisadosAssignment_1_1 ) { before(grammarAccess.getTareaRevisionProductosAccess().getProductosRevisadosAssignment_1_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3997:1: ( rule__TareaRevisionProductos__ProductosRevisadosAssignment_1_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:3997:2: rule__TareaRevisionProductos__ProductosRevisadosAssignment_1_1 { pushFollow(FOLLOW_rule__TareaRevisionProductos__ProductosRevisadosAssignment_1_1_in_rule__TareaRevisionProductos__Group_1__1__Impl8141); rule__TareaRevisionProductos__ProductosRevisadosAssignment_1_1(); state._fsp--; } after(grammarAccess.getTareaRevisionProductosAccess().getProductosRevisadosAssignment_1_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__Group_1__1__Impl" // $ANTLR start "rule__TareaRevisionProductos__Group_1__2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4007:1: rule__TareaRevisionProductos__Group_1__2 : rule__TareaRevisionProductos__Group_1__2__Impl ; public final void rule__TareaRevisionProductos__Group_1__2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4011:1: ( rule__TareaRevisionProductos__Group_1__2__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4012:2: rule__TareaRevisionProductos__Group_1__2__Impl { pushFollow(FOLLOW_rule__TareaRevisionProductos__Group_1__2__Impl_in_rule__TareaRevisionProductos__Group_1__28171); rule__TareaRevisionProductos__Group_1__2__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__Group_1__2" // $ANTLR start "rule__TareaRevisionProductos__Group_1__2__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4018:1: rule__TareaRevisionProductos__Group_1__2__Impl : ( ',' ) ; public final void rule__TareaRevisionProductos__Group_1__2__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4022:1: ( ( ',' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4023:1: ( ',' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4023:1: ( ',' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4024:1: ',' { before(grammarAccess.getTareaRevisionProductosAccess().getCommaKeyword_1_2()); match(input,47,FOLLOW_47_in_rule__TareaRevisionProductos__Group_1__2__Impl8199); after(grammarAccess.getTareaRevisionProductosAccess().getCommaKeyword_1_2()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__Group_1__2__Impl" // $ANTLR start "rule__TareaArqueoCaja__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4043:1: rule__TareaArqueoCaja__Group__0 : rule__TareaArqueoCaja__Group__0__Impl rule__TareaArqueoCaja__Group__1 ; public final void rule__TareaArqueoCaja__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4047:1: ( rule__TareaArqueoCaja__Group__0__Impl rule__TareaArqueoCaja__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4048:2: rule__TareaArqueoCaja__Group__0__Impl rule__TareaArqueoCaja__Group__1 { pushFollow(FOLLOW_rule__TareaArqueoCaja__Group__0__Impl_in_rule__TareaArqueoCaja__Group__08236); rule__TareaArqueoCaja__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__TareaArqueoCaja__Group__1_in_rule__TareaArqueoCaja__Group__08239); rule__TareaArqueoCaja__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaArqueoCaja__Group__0" // $ANTLR start "rule__TareaArqueoCaja__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4055:1: rule__TareaArqueoCaja__Group__0__Impl : ( () ) ; public final void rule__TareaArqueoCaja__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4059:1: ( ( () ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4060:1: ( () ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4060:1: ( () ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4061:1: () { before(grammarAccess.getTareaArqueoCajaAccess().getTareaArqueoCajaAction_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4062:1: () // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4064:1: { } after(grammarAccess.getTareaArqueoCajaAccess().getTareaArqueoCajaAction_0()); } } } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaArqueoCaja__Group__0__Impl" // $ANTLR start "rule__TareaArqueoCaja__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4074:1: rule__TareaArqueoCaja__Group__1 : rule__TareaArqueoCaja__Group__1__Impl ; public final void rule__TareaArqueoCaja__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4078:1: ( rule__TareaArqueoCaja__Group__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4079:2: rule__TareaArqueoCaja__Group__1__Impl { pushFollow(FOLLOW_rule__TareaArqueoCaja__Group__1__Impl_in_rule__TareaArqueoCaja__Group__18297); rule__TareaArqueoCaja__Group__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaArqueoCaja__Group__1" // $ANTLR start "rule__TareaArqueoCaja__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4085:1: rule__TareaArqueoCaja__Group__1__Impl : ( 'se hizo el arqueo de caja' ) ; public final void rule__TareaArqueoCaja__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4089:1: ( ( 'se hizo el arqueo de caja' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4090:1: ( 'se hizo el arqueo de caja' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4090:1: ( 'se hizo el arqueo de caja' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4091:1: 'se hizo el arqueo de caja' { before(grammarAccess.getTareaArqueoCajaAccess().getSeHizoElArqueoDeCajaKeyword_1()); match(input,57,FOLLOW_57_in_rule__TareaArqueoCaja__Group__1__Impl8325); after(grammarAccess.getTareaArqueoCajaAccess().getSeHizoElArqueoDeCajaKeyword_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaArqueoCaja__Group__1__Impl" // $ANTLR start "rule__TareaLimpiezaLocal__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4108:1: rule__TareaLimpiezaLocal__Group__0 : rule__TareaLimpiezaLocal__Group__0__Impl rule__TareaLimpiezaLocal__Group__1 ; public final void rule__TareaLimpiezaLocal__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4112:1: ( rule__TareaLimpiezaLocal__Group__0__Impl rule__TareaLimpiezaLocal__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4113:2: rule__TareaLimpiezaLocal__Group__0__Impl rule__TareaLimpiezaLocal__Group__1 { pushFollow(FOLLOW_rule__TareaLimpiezaLocal__Group__0__Impl_in_rule__TareaLimpiezaLocal__Group__08360); rule__TareaLimpiezaLocal__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__TareaLimpiezaLocal__Group__1_in_rule__TareaLimpiezaLocal__Group__08363); rule__TareaLimpiezaLocal__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaLimpiezaLocal__Group__0" // $ANTLR start "rule__TareaLimpiezaLocal__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4120:1: rule__TareaLimpiezaLocal__Group__0__Impl : ( () ) ; public final void rule__TareaLimpiezaLocal__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4124:1: ( ( () ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4125:1: ( () ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4125:1: ( () ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4126:1: () { before(grammarAccess.getTareaLimpiezaLocalAccess().getTareaLimpiezaLocalAction_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4127:1: () // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4129:1: { } after(grammarAccess.getTareaLimpiezaLocalAccess().getTareaLimpiezaLocalAction_0()); } } } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaLimpiezaLocal__Group__0__Impl" // $ANTLR start "rule__TareaLimpiezaLocal__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4139:1: rule__TareaLimpiezaLocal__Group__1 : rule__TareaLimpiezaLocal__Group__1__Impl ; public final void rule__TareaLimpiezaLocal__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4143:1: ( rule__TareaLimpiezaLocal__Group__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4144:2: rule__TareaLimpiezaLocal__Group__1__Impl { pushFollow(FOLLOW_rule__TareaLimpiezaLocal__Group__1__Impl_in_rule__TareaLimpiezaLocal__Group__18421); rule__TareaLimpiezaLocal__Group__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaLimpiezaLocal__Group__1" // $ANTLR start "rule__TareaLimpiezaLocal__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4150:1: rule__TareaLimpiezaLocal__Group__1__Impl : ( 'se limpio el local' ) ; public final void rule__TareaLimpiezaLocal__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4154:1: ( ( 'se limpio el local' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4155:1: ( 'se limpio el local' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4155:1: ( 'se limpio el local' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4156:1: 'se limpio el local' { before(grammarAccess.getTareaLimpiezaLocalAccess().getSeLimpioElLocalKeyword_1()); match(input,58,FOLLOW_58_in_rule__TareaLimpiezaLocal__Group__1__Impl8449); after(grammarAccess.getTareaLimpiezaLocalAccess().getSeLimpioElLocalKeyword_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaLimpiezaLocal__Group__1__Impl" // $ANTLR start "rule__LapsoTiempoNumerico__Group_0__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4173:1: rule__LapsoTiempoNumerico__Group_0__0 : rule__LapsoTiempoNumerico__Group_0__0__Impl rule__LapsoTiempoNumerico__Group_0__1 ; public final void rule__LapsoTiempoNumerico__Group_0__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4177:1: ( rule__LapsoTiempoNumerico__Group_0__0__Impl rule__LapsoTiempoNumerico__Group_0__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4178:2: rule__LapsoTiempoNumerico__Group_0__0__Impl rule__LapsoTiempoNumerico__Group_0__1 { pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Group_0__0__Impl_in_rule__LapsoTiempoNumerico__Group_0__08484); rule__LapsoTiempoNumerico__Group_0__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Group_0__1_in_rule__LapsoTiempoNumerico__Group_0__08487); rule__LapsoTiempoNumerico__Group_0__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Group_0__0" // $ANTLR start "rule__LapsoTiempoNumerico__Group_0__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4185:1: rule__LapsoTiempoNumerico__Group_0__0__Impl : ( ( rule__LapsoTiempoNumerico__MinutosAssignment_0_0 ) ) ; public final void rule__LapsoTiempoNumerico__Group_0__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4189:1: ( ( ( rule__LapsoTiempoNumerico__MinutosAssignment_0_0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4190:1: ( ( rule__LapsoTiempoNumerico__MinutosAssignment_0_0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4190:1: ( ( rule__LapsoTiempoNumerico__MinutosAssignment_0_0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4191:1: ( rule__LapsoTiempoNumerico__MinutosAssignment_0_0 ) { before(grammarAccess.getLapsoTiempoNumericoAccess().getMinutosAssignment_0_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4192:1: ( rule__LapsoTiempoNumerico__MinutosAssignment_0_0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4192:2: rule__LapsoTiempoNumerico__MinutosAssignment_0_0 { pushFollow(FOLLOW_rule__LapsoTiempoNumerico__MinutosAssignment_0_0_in_rule__LapsoTiempoNumerico__Group_0__0__Impl8514); rule__LapsoTiempoNumerico__MinutosAssignment_0_0(); state._fsp--; } after(grammarAccess.getLapsoTiempoNumericoAccess().getMinutosAssignment_0_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Group_0__0__Impl" // $ANTLR start "rule__LapsoTiempoNumerico__Group_0__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4202:1: rule__LapsoTiempoNumerico__Group_0__1 : rule__LapsoTiempoNumerico__Group_0__1__Impl ; public final void rule__LapsoTiempoNumerico__Group_0__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4206:1: ( rule__LapsoTiempoNumerico__Group_0__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4207:2: rule__LapsoTiempoNumerico__Group_0__1__Impl { pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Group_0__1__Impl_in_rule__LapsoTiempoNumerico__Group_0__18544); rule__LapsoTiempoNumerico__Group_0__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Group_0__1" // $ANTLR start "rule__LapsoTiempoNumerico__Group_0__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4213:1: rule__LapsoTiempoNumerico__Group_0__1__Impl : ( 'minutos' ) ; public final void rule__LapsoTiempoNumerico__Group_0__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4217:1: ( ( 'minutos' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4218:1: ( 'minutos' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4218:1: ( 'minutos' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4219:1: 'minutos' { before(grammarAccess.getLapsoTiempoNumericoAccess().getMinutosKeyword_0_1()); match(input,59,FOLLOW_59_in_rule__LapsoTiempoNumerico__Group_0__1__Impl8572); after(grammarAccess.getLapsoTiempoNumericoAccess().getMinutosKeyword_0_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Group_0__1__Impl" // $ANTLR start "rule__LapsoTiempoNumerico__Group_1__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4236:1: rule__LapsoTiempoNumerico__Group_1__0 : rule__LapsoTiempoNumerico__Group_1__0__Impl rule__LapsoTiempoNumerico__Group_1__1 ; public final void rule__LapsoTiempoNumerico__Group_1__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4240:1: ( rule__LapsoTiempoNumerico__Group_1__0__Impl rule__LapsoTiempoNumerico__Group_1__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4241:2: rule__LapsoTiempoNumerico__Group_1__0__Impl rule__LapsoTiempoNumerico__Group_1__1 { pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Group_1__0__Impl_in_rule__LapsoTiempoNumerico__Group_1__08607); rule__LapsoTiempoNumerico__Group_1__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Group_1__1_in_rule__LapsoTiempoNumerico__Group_1__08610); rule__LapsoTiempoNumerico__Group_1__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Group_1__0" // $ANTLR start "rule__LapsoTiempoNumerico__Group_1__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4248:1: rule__LapsoTiempoNumerico__Group_1__0__Impl : ( ( rule__LapsoTiempoNumerico__HorasAssignment_1_0 ) ) ; public final void rule__LapsoTiempoNumerico__Group_1__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4252:1: ( ( ( rule__LapsoTiempoNumerico__HorasAssignment_1_0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4253:1: ( ( rule__LapsoTiempoNumerico__HorasAssignment_1_0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4253:1: ( ( rule__LapsoTiempoNumerico__HorasAssignment_1_0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4254:1: ( rule__LapsoTiempoNumerico__HorasAssignment_1_0 ) { before(grammarAccess.getLapsoTiempoNumericoAccess().getHorasAssignment_1_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4255:1: ( rule__LapsoTiempoNumerico__HorasAssignment_1_0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4255:2: rule__LapsoTiempoNumerico__HorasAssignment_1_0 { pushFollow(FOLLOW_rule__LapsoTiempoNumerico__HorasAssignment_1_0_in_rule__LapsoTiempoNumerico__Group_1__0__Impl8637); rule__LapsoTiempoNumerico__HorasAssignment_1_0(); state._fsp--; } after(grammarAccess.getLapsoTiempoNumericoAccess().getHorasAssignment_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Group_1__0__Impl" // $ANTLR start "rule__LapsoTiempoNumerico__Group_1__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4265:1: rule__LapsoTiempoNumerico__Group_1__1 : rule__LapsoTiempoNumerico__Group_1__1__Impl rule__LapsoTiempoNumerico__Group_1__2 ; public final void rule__LapsoTiempoNumerico__Group_1__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4269:1: ( rule__LapsoTiempoNumerico__Group_1__1__Impl rule__LapsoTiempoNumerico__Group_1__2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4270:2: rule__LapsoTiempoNumerico__Group_1__1__Impl rule__LapsoTiempoNumerico__Group_1__2 { pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Group_1__1__Impl_in_rule__LapsoTiempoNumerico__Group_1__18667); rule__LapsoTiempoNumerico__Group_1__1__Impl(); state._fsp--; pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Group_1__2_in_rule__LapsoTiempoNumerico__Group_1__18670); rule__LapsoTiempoNumerico__Group_1__2(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Group_1__1" // $ANTLR start "rule__LapsoTiempoNumerico__Group_1__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4277:1: rule__LapsoTiempoNumerico__Group_1__1__Impl : ( ( rule__LapsoTiempoNumerico__Alternatives_1_1 ) ) ; public final void rule__LapsoTiempoNumerico__Group_1__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4281:1: ( ( ( rule__LapsoTiempoNumerico__Alternatives_1_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4282:1: ( ( rule__LapsoTiempoNumerico__Alternatives_1_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4282:1: ( ( rule__LapsoTiempoNumerico__Alternatives_1_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4283:1: ( rule__LapsoTiempoNumerico__Alternatives_1_1 ) { before(grammarAccess.getLapsoTiempoNumericoAccess().getAlternatives_1_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4284:1: ( rule__LapsoTiempoNumerico__Alternatives_1_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4284:2: rule__LapsoTiempoNumerico__Alternatives_1_1 { pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Alternatives_1_1_in_rule__LapsoTiempoNumerico__Group_1__1__Impl8697); rule__LapsoTiempoNumerico__Alternatives_1_1(); state._fsp--; } after(grammarAccess.getLapsoTiempoNumericoAccess().getAlternatives_1_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Group_1__1__Impl" // $ANTLR start "rule__LapsoTiempoNumerico__Group_1__2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4294:1: rule__LapsoTiempoNumerico__Group_1__2 : rule__LapsoTiempoNumerico__Group_1__2__Impl ; public final void rule__LapsoTiempoNumerico__Group_1__2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4298:1: ( rule__LapsoTiempoNumerico__Group_1__2__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4299:2: rule__LapsoTiempoNumerico__Group_1__2__Impl { pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Group_1__2__Impl_in_rule__LapsoTiempoNumerico__Group_1__28727); rule__LapsoTiempoNumerico__Group_1__2__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Group_1__2" // $ANTLR start "rule__LapsoTiempoNumerico__Group_1__2__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4305:1: rule__LapsoTiempoNumerico__Group_1__2__Impl : ( ( rule__LapsoTiempoNumerico__Group_1_2__0 )? ) ; public final void rule__LapsoTiempoNumerico__Group_1__2__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4309:1: ( ( ( rule__LapsoTiempoNumerico__Group_1_2__0 )? ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4310:1: ( ( rule__LapsoTiempoNumerico__Group_1_2__0 )? ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4310:1: ( ( rule__LapsoTiempoNumerico__Group_1_2__0 )? ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4311:1: ( rule__LapsoTiempoNumerico__Group_1_2__0 )? { before(grammarAccess.getLapsoTiempoNumericoAccess().getGroup_1_2()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4312:1: ( rule__LapsoTiempoNumerico__Group_1_2__0 )? int alt30=2; int LA30_0 = input.LA(1); if ( (LA30_0==54) ) { alt30=1; } switch (alt30) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4312:2: rule__LapsoTiempoNumerico__Group_1_2__0 { pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Group_1_2__0_in_rule__LapsoTiempoNumerico__Group_1__2__Impl8754); rule__LapsoTiempoNumerico__Group_1_2__0(); state._fsp--; } break; } after(grammarAccess.getLapsoTiempoNumericoAccess().getGroup_1_2()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Group_1__2__Impl" // $ANTLR start "rule__LapsoTiempoNumerico__Group_1_2__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4328:1: rule__LapsoTiempoNumerico__Group_1_2__0 : rule__LapsoTiempoNumerico__Group_1_2__0__Impl rule__LapsoTiempoNumerico__Group_1_2__1 ; public final void rule__LapsoTiempoNumerico__Group_1_2__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4332:1: ( rule__LapsoTiempoNumerico__Group_1_2__0__Impl rule__LapsoTiempoNumerico__Group_1_2__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4333:2: rule__LapsoTiempoNumerico__Group_1_2__0__Impl rule__LapsoTiempoNumerico__Group_1_2__1 { pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Group_1_2__0__Impl_in_rule__LapsoTiempoNumerico__Group_1_2__08791); rule__LapsoTiempoNumerico__Group_1_2__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Group_1_2__1_in_rule__LapsoTiempoNumerico__Group_1_2__08794); rule__LapsoTiempoNumerico__Group_1_2__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Group_1_2__0" // $ANTLR start "rule__LapsoTiempoNumerico__Group_1_2__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4340:1: rule__LapsoTiempoNumerico__Group_1_2__0__Impl : ( 'y' ) ; public final void rule__LapsoTiempoNumerico__Group_1_2__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4344:1: ( ( 'y' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4345:1: ( 'y' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4345:1: ( 'y' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4346:1: 'y' { before(grammarAccess.getLapsoTiempoNumericoAccess().getYKeyword_1_2_0()); match(input,54,FOLLOW_54_in_rule__LapsoTiempoNumerico__Group_1_2__0__Impl8822); after(grammarAccess.getLapsoTiempoNumericoAccess().getYKeyword_1_2_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Group_1_2__0__Impl" // $ANTLR start "rule__LapsoTiempoNumerico__Group_1_2__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4359:1: rule__LapsoTiempoNumerico__Group_1_2__1 : rule__LapsoTiempoNumerico__Group_1_2__1__Impl rule__LapsoTiempoNumerico__Group_1_2__2 ; public final void rule__LapsoTiempoNumerico__Group_1_2__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4363:1: ( rule__LapsoTiempoNumerico__Group_1_2__1__Impl rule__LapsoTiempoNumerico__Group_1_2__2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4364:2: rule__LapsoTiempoNumerico__Group_1_2__1__Impl rule__LapsoTiempoNumerico__Group_1_2__2 { pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Group_1_2__1__Impl_in_rule__LapsoTiempoNumerico__Group_1_2__18853); rule__LapsoTiempoNumerico__Group_1_2__1__Impl(); state._fsp--; pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Group_1_2__2_in_rule__LapsoTiempoNumerico__Group_1_2__18856); rule__LapsoTiempoNumerico__Group_1_2__2(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Group_1_2__1" // $ANTLR start "rule__LapsoTiempoNumerico__Group_1_2__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4371:1: rule__LapsoTiempoNumerico__Group_1_2__1__Impl : ( ( rule__LapsoTiempoNumerico__MinutosAssignment_1_2_1 ) ) ; public final void rule__LapsoTiempoNumerico__Group_1_2__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4375:1: ( ( ( rule__LapsoTiempoNumerico__MinutosAssignment_1_2_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4376:1: ( ( rule__LapsoTiempoNumerico__MinutosAssignment_1_2_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4376:1: ( ( rule__LapsoTiempoNumerico__MinutosAssignment_1_2_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4377:1: ( rule__LapsoTiempoNumerico__MinutosAssignment_1_2_1 ) { before(grammarAccess.getLapsoTiempoNumericoAccess().getMinutosAssignment_1_2_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4378:1: ( rule__LapsoTiempoNumerico__MinutosAssignment_1_2_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4378:2: rule__LapsoTiempoNumerico__MinutosAssignment_1_2_1 { pushFollow(FOLLOW_rule__LapsoTiempoNumerico__MinutosAssignment_1_2_1_in_rule__LapsoTiempoNumerico__Group_1_2__1__Impl8883); rule__LapsoTiempoNumerico__MinutosAssignment_1_2_1(); state._fsp--; } after(grammarAccess.getLapsoTiempoNumericoAccess().getMinutosAssignment_1_2_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Group_1_2__1__Impl" // $ANTLR start "rule__LapsoTiempoNumerico__Group_1_2__2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4388:1: rule__LapsoTiempoNumerico__Group_1_2__2 : rule__LapsoTiempoNumerico__Group_1_2__2__Impl ; public final void rule__LapsoTiempoNumerico__Group_1_2__2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4392:1: ( rule__LapsoTiempoNumerico__Group_1_2__2__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4393:2: rule__LapsoTiempoNumerico__Group_1_2__2__Impl { pushFollow(FOLLOW_rule__LapsoTiempoNumerico__Group_1_2__2__Impl_in_rule__LapsoTiempoNumerico__Group_1_2__28913); rule__LapsoTiempoNumerico__Group_1_2__2__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Group_1_2__2" // $ANTLR start "rule__LapsoTiempoNumerico__Group_1_2__2__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4399:1: rule__LapsoTiempoNumerico__Group_1_2__2__Impl : ( 'minutos' ) ; public final void rule__LapsoTiempoNumerico__Group_1_2__2__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4403:1: ( ( 'minutos' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4404:1: ( 'minutos' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4404:1: ( 'minutos' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4405:1: 'minutos' { before(grammarAccess.getLapsoTiempoNumericoAccess().getMinutosKeyword_1_2_2()); match(input,59,FOLLOW_59_in_rule__LapsoTiempoNumerico__Group_1_2__2__Impl8941); after(grammarAccess.getLapsoTiempoNumericoAccess().getMinutosKeyword_1_2_2()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__Group_1_2__2__Impl" // $ANTLR start "rule__LapsoTiempoLiteral__Group__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4424:1: rule__LapsoTiempoLiteral__Group__0 : rule__LapsoTiempoLiteral__Group__0__Impl rule__LapsoTiempoLiteral__Group__1 ; public final void rule__LapsoTiempoLiteral__Group__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4428:1: ( rule__LapsoTiempoLiteral__Group__0__Impl rule__LapsoTiempoLiteral__Group__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4429:2: rule__LapsoTiempoLiteral__Group__0__Impl rule__LapsoTiempoLiteral__Group__1 { pushFollow(FOLLOW_rule__LapsoTiempoLiteral__Group__0__Impl_in_rule__LapsoTiempoLiteral__Group__08978); rule__LapsoTiempoLiteral__Group__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__LapsoTiempoLiteral__Group__1_in_rule__LapsoTiempoLiteral__Group__08981); rule__LapsoTiempoLiteral__Group__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoLiteral__Group__0" // $ANTLR start "rule__LapsoTiempoLiteral__Group__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4436:1: rule__LapsoTiempoLiteral__Group__0__Impl : ( ( rule__LapsoTiempoLiteral__HorasAssignment_0 ) ) ; public final void rule__LapsoTiempoLiteral__Group__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4440:1: ( ( ( rule__LapsoTiempoLiteral__HorasAssignment_0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4441:1: ( ( rule__LapsoTiempoLiteral__HorasAssignment_0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4441:1: ( ( rule__LapsoTiempoLiteral__HorasAssignment_0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4442:1: ( rule__LapsoTiempoLiteral__HorasAssignment_0 ) { before(grammarAccess.getLapsoTiempoLiteralAccess().getHorasAssignment_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4443:1: ( rule__LapsoTiempoLiteral__HorasAssignment_0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4443:2: rule__LapsoTiempoLiteral__HorasAssignment_0 { pushFollow(FOLLOW_rule__LapsoTiempoLiteral__HorasAssignment_0_in_rule__LapsoTiempoLiteral__Group__0__Impl9008); rule__LapsoTiempoLiteral__HorasAssignment_0(); state._fsp--; } after(grammarAccess.getLapsoTiempoLiteralAccess().getHorasAssignment_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoLiteral__Group__0__Impl" // $ANTLR start "rule__LapsoTiempoLiteral__Group__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4453:1: rule__LapsoTiempoLiteral__Group__1 : rule__LapsoTiempoLiteral__Group__1__Impl rule__LapsoTiempoLiteral__Group__2 ; public final void rule__LapsoTiempoLiteral__Group__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4457:1: ( rule__LapsoTiempoLiteral__Group__1__Impl rule__LapsoTiempoLiteral__Group__2 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4458:2: rule__LapsoTiempoLiteral__Group__1__Impl rule__LapsoTiempoLiteral__Group__2 { pushFollow(FOLLOW_rule__LapsoTiempoLiteral__Group__1__Impl_in_rule__LapsoTiempoLiteral__Group__19038); rule__LapsoTiempoLiteral__Group__1__Impl(); state._fsp--; pushFollow(FOLLOW_rule__LapsoTiempoLiteral__Group__2_in_rule__LapsoTiempoLiteral__Group__19041); rule__LapsoTiempoLiteral__Group__2(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoLiteral__Group__1" // $ANTLR start "rule__LapsoTiempoLiteral__Group__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4465:1: rule__LapsoTiempoLiteral__Group__1__Impl : ( ( rule__LapsoTiempoLiteral__Alternatives_1 ) ) ; public final void rule__LapsoTiempoLiteral__Group__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4469:1: ( ( ( rule__LapsoTiempoLiteral__Alternatives_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4470:1: ( ( rule__LapsoTiempoLiteral__Alternatives_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4470:1: ( ( rule__LapsoTiempoLiteral__Alternatives_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4471:1: ( rule__LapsoTiempoLiteral__Alternatives_1 ) { before(grammarAccess.getLapsoTiempoLiteralAccess().getAlternatives_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4472:1: ( rule__LapsoTiempoLiteral__Alternatives_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4472:2: rule__LapsoTiempoLiteral__Alternatives_1 { pushFollow(FOLLOW_rule__LapsoTiempoLiteral__Alternatives_1_in_rule__LapsoTiempoLiteral__Group__1__Impl9068); rule__LapsoTiempoLiteral__Alternatives_1(); state._fsp--; } after(grammarAccess.getLapsoTiempoLiteralAccess().getAlternatives_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoLiteral__Group__1__Impl" // $ANTLR start "rule__LapsoTiempoLiteral__Group__2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4482:1: rule__LapsoTiempoLiteral__Group__2 : rule__LapsoTiempoLiteral__Group__2__Impl ; public final void rule__LapsoTiempoLiteral__Group__2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4486:1: ( rule__LapsoTiempoLiteral__Group__2__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4487:2: rule__LapsoTiempoLiteral__Group__2__Impl { pushFollow(FOLLOW_rule__LapsoTiempoLiteral__Group__2__Impl_in_rule__LapsoTiempoLiteral__Group__29098); rule__LapsoTiempoLiteral__Group__2__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoLiteral__Group__2" // $ANTLR start "rule__LapsoTiempoLiteral__Group__2__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4493:1: rule__LapsoTiempoLiteral__Group__2__Impl : ( ( rule__LapsoTiempoLiteral__Group_2__0 )? ) ; public final void rule__LapsoTiempoLiteral__Group__2__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4497:1: ( ( ( rule__LapsoTiempoLiteral__Group_2__0 )? ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4498:1: ( ( rule__LapsoTiempoLiteral__Group_2__0 )? ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4498:1: ( ( rule__LapsoTiempoLiteral__Group_2__0 )? ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4499:1: ( rule__LapsoTiempoLiteral__Group_2__0 )? { before(grammarAccess.getLapsoTiempoLiteralAccess().getGroup_2()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4500:1: ( rule__LapsoTiempoLiteral__Group_2__0 )? int alt31=2; int LA31_0 = input.LA(1); if ( (LA31_0==54) ) { alt31=1; } switch (alt31) { case 1 : // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4500:2: rule__LapsoTiempoLiteral__Group_2__0 { pushFollow(FOLLOW_rule__LapsoTiempoLiteral__Group_2__0_in_rule__LapsoTiempoLiteral__Group__2__Impl9125); rule__LapsoTiempoLiteral__Group_2__0(); state._fsp--; } break; } after(grammarAccess.getLapsoTiempoLiteralAccess().getGroup_2()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoLiteral__Group__2__Impl" // $ANTLR start "rule__LapsoTiempoLiteral__Group_2__0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4516:1: rule__LapsoTiempoLiteral__Group_2__0 : rule__LapsoTiempoLiteral__Group_2__0__Impl rule__LapsoTiempoLiteral__Group_2__1 ; public final void rule__LapsoTiempoLiteral__Group_2__0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4520:1: ( rule__LapsoTiempoLiteral__Group_2__0__Impl rule__LapsoTiempoLiteral__Group_2__1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4521:2: rule__LapsoTiempoLiteral__Group_2__0__Impl rule__LapsoTiempoLiteral__Group_2__1 { pushFollow(FOLLOW_rule__LapsoTiempoLiteral__Group_2__0__Impl_in_rule__LapsoTiempoLiteral__Group_2__09162); rule__LapsoTiempoLiteral__Group_2__0__Impl(); state._fsp--; pushFollow(FOLLOW_rule__LapsoTiempoLiteral__Group_2__1_in_rule__LapsoTiempoLiteral__Group_2__09165); rule__LapsoTiempoLiteral__Group_2__1(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoLiteral__Group_2__0" // $ANTLR start "rule__LapsoTiempoLiteral__Group_2__0__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4528:1: rule__LapsoTiempoLiteral__Group_2__0__Impl : ( 'y' ) ; public final void rule__LapsoTiempoLiteral__Group_2__0__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4532:1: ( ( 'y' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4533:1: ( 'y' ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4533:1: ( 'y' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4534:1: 'y' { before(grammarAccess.getLapsoTiempoLiteralAccess().getYKeyword_2_0()); match(input,54,FOLLOW_54_in_rule__LapsoTiempoLiteral__Group_2__0__Impl9193); after(grammarAccess.getLapsoTiempoLiteralAccess().getYKeyword_2_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoLiteral__Group_2__0__Impl" // $ANTLR start "rule__LapsoTiempoLiteral__Group_2__1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4547:1: rule__LapsoTiempoLiteral__Group_2__1 : rule__LapsoTiempoLiteral__Group_2__1__Impl ; public final void rule__LapsoTiempoLiteral__Group_2__1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4551:1: ( rule__LapsoTiempoLiteral__Group_2__1__Impl ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4552:2: rule__LapsoTiempoLiteral__Group_2__1__Impl { pushFollow(FOLLOW_rule__LapsoTiempoLiteral__Group_2__1__Impl_in_rule__LapsoTiempoLiteral__Group_2__19224); rule__LapsoTiempoLiteral__Group_2__1__Impl(); state._fsp--; } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoLiteral__Group_2__1" // $ANTLR start "rule__LapsoTiempoLiteral__Group_2__1__Impl" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4558:1: rule__LapsoTiempoLiteral__Group_2__1__Impl : ( ( rule__LapsoTiempoLiteral__FraccionHoraAssignment_2_1 ) ) ; public final void rule__LapsoTiempoLiteral__Group_2__1__Impl() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4562:1: ( ( ( rule__LapsoTiempoLiteral__FraccionHoraAssignment_2_1 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4563:1: ( ( rule__LapsoTiempoLiteral__FraccionHoraAssignment_2_1 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4563:1: ( ( rule__LapsoTiempoLiteral__FraccionHoraAssignment_2_1 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4564:1: ( rule__LapsoTiempoLiteral__FraccionHoraAssignment_2_1 ) { before(grammarAccess.getLapsoTiempoLiteralAccess().getFraccionHoraAssignment_2_1()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4565:1: ( rule__LapsoTiempoLiteral__FraccionHoraAssignment_2_1 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4565:2: rule__LapsoTiempoLiteral__FraccionHoraAssignment_2_1 { pushFollow(FOLLOW_rule__LapsoTiempoLiteral__FraccionHoraAssignment_2_1_in_rule__LapsoTiempoLiteral__Group_2__1__Impl9251); rule__LapsoTiempoLiteral__FraccionHoraAssignment_2_1(); state._fsp--; } after(grammarAccess.getLapsoTiempoLiteralAccess().getFraccionHoraAssignment_2_1()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoLiteral__Group_2__1__Impl" // $ANTLR start "rule__Verduleria__ClientesAssignment_0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4580:1: rule__Verduleria__ClientesAssignment_0 : ( ruleCliente ) ; public final void rule__Verduleria__ClientesAssignment_0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4584:1: ( ( ruleCliente ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4585:1: ( ruleCliente ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4585:1: ( ruleCliente ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4586:1: ruleCliente { before(grammarAccess.getVerduleriaAccess().getClientesClienteParserRuleCall_0_0()); pushFollow(FOLLOW_ruleCliente_in_rule__Verduleria__ClientesAssignment_09290); ruleCliente(); state._fsp--; after(grammarAccess.getVerduleriaAccess().getClientesClienteParserRuleCall_0_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Verduleria__ClientesAssignment_0" // $ANTLR start "rule__Verduleria__ProductosAssignment_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4595:1: rule__Verduleria__ProductosAssignment_1 : ( ruleProductoConPrecio ) ; public final void rule__Verduleria__ProductosAssignment_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4599:1: ( ( ruleProductoConPrecio ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4600:1: ( ruleProductoConPrecio ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4600:1: ( ruleProductoConPrecio ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4601:1: ruleProductoConPrecio { before(grammarAccess.getVerduleriaAccess().getProductosProductoConPrecioParserRuleCall_1_0()); pushFollow(FOLLOW_ruleProductoConPrecio_in_rule__Verduleria__ProductosAssignment_19321); ruleProductoConPrecio(); state._fsp--; after(grammarAccess.getVerduleriaAccess().getProductosProductoConPrecioParserRuleCall_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Verduleria__ProductosAssignment_1" // $ANTLR start "rule__Verduleria__VentasAssignment_2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4610:1: rule__Verduleria__VentasAssignment_2 : ( ruleVenta ) ; public final void rule__Verduleria__VentasAssignment_2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4614:1: ( ( ruleVenta ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4615:1: ( ruleVenta ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4615:1: ( ruleVenta ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4616:1: ruleVenta { before(grammarAccess.getVerduleriaAccess().getVentasVentaParserRuleCall_2_0()); pushFollow(FOLLOW_ruleVenta_in_rule__Verduleria__VentasAssignment_29352); ruleVenta(); state._fsp--; after(grammarAccess.getVerduleriaAccess().getVentasVentaParserRuleCall_2_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Verduleria__VentasAssignment_2" // $ANTLR start "rule__Verduleria__TareasAssignment_3" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4625:1: rule__Verduleria__TareasAssignment_3 : ( ruleTarea ) ; public final void rule__Verduleria__TareasAssignment_3() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4629:1: ( ( ruleTarea ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4630:1: ( ruleTarea ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4630:1: ( ruleTarea ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4631:1: ruleTarea { before(grammarAccess.getVerduleriaAccess().getTareasTareaParserRuleCall_3_0()); pushFollow(FOLLOW_ruleTarea_in_rule__Verduleria__TareasAssignment_39383); ruleTarea(); state._fsp--; after(grammarAccess.getVerduleriaAccess().getTareasTareaParserRuleCall_3_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Verduleria__TareasAssignment_3" // $ANTLR start "rule__ClienteConDeuda__NameAssignment_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4640:1: rule__ClienteConDeuda__NameAssignment_1 : ( RULE_ID ) ; public final void rule__ClienteConDeuda__NameAssignment_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4644:1: ( ( RULE_ID ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4645:1: ( RULE_ID ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4645:1: ( RULE_ID ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4646:1: RULE_ID { before(grammarAccess.getClienteConDeudaAccess().getNameIDTerminalRuleCall_1_0()); match(input,RULE_ID,FOLLOW_RULE_ID_in_rule__ClienteConDeuda__NameAssignment_19414); after(grammarAccess.getClienteConDeudaAccess().getNameIDTerminalRuleCall_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConDeuda__NameAssignment_1" // $ANTLR start "rule__ClienteConDeuda__MontoDeudaAssignment_3" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4655:1: rule__ClienteConDeuda__MontoDeudaAssignment_3 : ( ruleMontoDinero ) ; public final void rule__ClienteConDeuda__MontoDeudaAssignment_3() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4659:1: ( ( ruleMontoDinero ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4660:1: ( ruleMontoDinero ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4660:1: ( ruleMontoDinero ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4661:1: ruleMontoDinero { before(grammarAccess.getClienteConDeudaAccess().getMontoDeudaMontoDineroParserRuleCall_3_0()); pushFollow(FOLLOW_ruleMontoDinero_in_rule__ClienteConDeuda__MontoDeudaAssignment_39445); ruleMontoDinero(); state._fsp--; after(grammarAccess.getClienteConDeudaAccess().getMontoDeudaMontoDineroParserRuleCall_3_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConDeuda__MontoDeudaAssignment_3" // $ANTLR start "rule__ClienteAlDia__NameAssignment_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4670:1: rule__ClienteAlDia__NameAssignment_1 : ( RULE_ID ) ; public final void rule__ClienteAlDia__NameAssignment_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4674:1: ( ( RULE_ID ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4675:1: ( RULE_ID ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4675:1: ( RULE_ID ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4676:1: RULE_ID { before(grammarAccess.getClienteAlDiaAccess().getNameIDTerminalRuleCall_1_0()); match(input,RULE_ID,FOLLOW_RULE_ID_in_rule__ClienteAlDia__NameAssignment_19476); after(grammarAccess.getClienteAlDiaAccess().getNameIDTerminalRuleCall_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteAlDia__NameAssignment_1" // $ANTLR start "rule__ClienteConCredito__NameAssignment_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4685:1: rule__ClienteConCredito__NameAssignment_1 : ( RULE_ID ) ; public final void rule__ClienteConCredito__NameAssignment_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4689:1: ( ( RULE_ID ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4690:1: ( RULE_ID ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4690:1: ( RULE_ID ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4691:1: RULE_ID { before(grammarAccess.getClienteConCreditoAccess().getNameIDTerminalRuleCall_1_0()); match(input,RULE_ID,FOLLOW_RULE_ID_in_rule__ClienteConCredito__NameAssignment_19507); after(grammarAccess.getClienteConCreditoAccess().getNameIDTerminalRuleCall_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConCredito__NameAssignment_1" // $ANTLR start "rule__ClienteConCredito__MontoCreditoAssignment_3" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4700:1: rule__ClienteConCredito__MontoCreditoAssignment_3 : ( ruleMontoDinero ) ; public final void rule__ClienteConCredito__MontoCreditoAssignment_3() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4704:1: ( ( ruleMontoDinero ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4705:1: ( ruleMontoDinero ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4705:1: ( ruleMontoDinero ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4706:1: ruleMontoDinero { before(grammarAccess.getClienteConCreditoAccess().getMontoCreditoMontoDineroParserRuleCall_3_0()); pushFollow(FOLLOW_ruleMontoDinero_in_rule__ClienteConCredito__MontoCreditoAssignment_39538); ruleMontoDinero(); state._fsp--; after(grammarAccess.getClienteConCreditoAccess().getMontoCreditoMontoDineroParserRuleCall_3_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ClienteConCredito__MontoCreditoAssignment_3" // $ANTLR start "rule__ProductoConPrecio__PrecioAssignment_2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4715:1: rule__ProductoConPrecio__PrecioAssignment_2 : ( rulePrecioPorPeso ) ; public final void rule__ProductoConPrecio__PrecioAssignment_2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4719:1: ( ( rulePrecioPorPeso ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4720:1: ( rulePrecioPorPeso ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4720:1: ( rulePrecioPorPeso ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4721:1: rulePrecioPorPeso { before(grammarAccess.getProductoConPrecioAccess().getPrecioPrecioPorPesoParserRuleCall_2_0()); pushFollow(FOLLOW_rulePrecioPorPeso_in_rule__ProductoConPrecio__PrecioAssignment_29569); rulePrecioPorPeso(); state._fsp--; after(grammarAccess.getProductoConPrecioAccess().getPrecioPrecioPorPesoParserRuleCall_2_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ProductoConPrecio__PrecioAssignment_2" // $ANTLR start "rule__Producto__NameAssignment_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4730:1: rule__Producto__NameAssignment_1 : ( RULE_ID ) ; public final void rule__Producto__NameAssignment_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4734:1: ( ( RULE_ID ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4735:1: ( RULE_ID ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4735:1: ( RULE_ID ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4736:1: RULE_ID { before(grammarAccess.getProductoAccess().getNameIDTerminalRuleCall_1_0()); match(input,RULE_ID,FOLLOW_RULE_ID_in_rule__Producto__NameAssignment_19600); after(grammarAccess.getProductoAccess().getNameIDTerminalRuleCall_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Producto__NameAssignment_1" // $ANTLR start "rule__PrecioPorPeso__PrecioAssignment_0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4745:1: rule__PrecioPorPeso__PrecioAssignment_0 : ( ruleMontoDinero ) ; public final void rule__PrecioPorPeso__PrecioAssignment_0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4749:1: ( ( ruleMontoDinero ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4750:1: ( ruleMontoDinero ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4750:1: ( ruleMontoDinero ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4751:1: ruleMontoDinero { before(grammarAccess.getPrecioPorPesoAccess().getPrecioMontoDineroParserRuleCall_0_0()); pushFollow(FOLLOW_ruleMontoDinero_in_rule__PrecioPorPeso__PrecioAssignment_09631); ruleMontoDinero(); state._fsp--; after(grammarAccess.getPrecioPorPesoAccess().getPrecioMontoDineroParserRuleCall_0_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PrecioPorPeso__PrecioAssignment_0" // $ANTLR start "rule__PrecioPorPeso__PesajeAssignment_1_0_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4760:1: rule__PrecioPorPeso__PesajeAssignment_1_0_1 : ( rulePesoMagnitudVariable ) ; public final void rule__PrecioPorPeso__PesajeAssignment_1_0_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4764:1: ( ( rulePesoMagnitudVariable ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4765:1: ( rulePesoMagnitudVariable ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4765:1: ( rulePesoMagnitudVariable ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4766:1: rulePesoMagnitudVariable { before(grammarAccess.getPrecioPorPesoAccess().getPesajePesoMagnitudVariableParserRuleCall_1_0_1_0()); pushFollow(FOLLOW_rulePesoMagnitudVariable_in_rule__PrecioPorPeso__PesajeAssignment_1_0_19662); rulePesoMagnitudVariable(); state._fsp--; after(grammarAccess.getPrecioPorPesoAccess().getPesajePesoMagnitudVariableParserRuleCall_1_0_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PrecioPorPeso__PesajeAssignment_1_0_1" // $ANTLR start "rule__PrecioPorPeso__PesajeAssignment_1_1_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4775:1: rule__PrecioPorPeso__PesajeAssignment_1_1_1 : ( rulePesoMagnitudFija ) ; public final void rule__PrecioPorPeso__PesajeAssignment_1_1_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4779:1: ( ( rulePesoMagnitudFija ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4780:1: ( rulePesoMagnitudFija ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4780:1: ( rulePesoMagnitudFija ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4781:1: rulePesoMagnitudFija { before(grammarAccess.getPrecioPorPesoAccess().getPesajePesoMagnitudFijaParserRuleCall_1_1_1_0()); pushFollow(FOLLOW_rulePesoMagnitudFija_in_rule__PrecioPorPeso__PesajeAssignment_1_1_19693); rulePesoMagnitudFija(); state._fsp--; after(grammarAccess.getPrecioPorPesoAccess().getPesajePesoMagnitudFijaParserRuleCall_1_1_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PrecioPorPeso__PesajeAssignment_1_1_1" // $ANTLR start "rule__MontoDinero__PrecioAssignment_0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4790:1: rule__MontoDinero__PrecioAssignment_0 : ( RULE_INT ) ; public final void rule__MontoDinero__PrecioAssignment_0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4794:1: ( ( RULE_INT ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4795:1: ( RULE_INT ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4795:1: ( RULE_INT ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4796:1: RULE_INT { before(grammarAccess.getMontoDineroAccess().getPrecioINTTerminalRuleCall_0_0()); match(input,RULE_INT,FOLLOW_RULE_INT_in_rule__MontoDinero__PrecioAssignment_09724); after(grammarAccess.getMontoDineroAccess().getPrecioINTTerminalRuleCall_0_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__MontoDinero__PrecioAssignment_0" // $ANTLR start "rule__PesoMagnitudVariable__MagnitudAssignment_0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4805:1: rule__PesoMagnitudVariable__MagnitudAssignment_0 : ( RULE_INT ) ; public final void rule__PesoMagnitudVariable__MagnitudAssignment_0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4809:1: ( ( RULE_INT ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4810:1: ( RULE_INT ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4810:1: ( RULE_INT ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4811:1: RULE_INT { before(grammarAccess.getPesoMagnitudVariableAccess().getMagnitudINTTerminalRuleCall_0_0()); match(input,RULE_INT,FOLLOW_RULE_INT_in_rule__PesoMagnitudVariable__MagnitudAssignment_09755); after(grammarAccess.getPesoMagnitudVariableAccess().getMagnitudINTTerminalRuleCall_0_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudVariable__MagnitudAssignment_0" // $ANTLR start "rule__PesoMagnitudVariable__MedidaPesoAssignment_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4820:1: rule__PesoMagnitudVariable__MedidaPesoAssignment_1 : ( ruleMedidaPeso ) ; public final void rule__PesoMagnitudVariable__MedidaPesoAssignment_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4824:1: ( ( ruleMedidaPeso ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4825:1: ( ruleMedidaPeso ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4825:1: ( ruleMedidaPeso ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4826:1: ruleMedidaPeso { before(grammarAccess.getPesoMagnitudVariableAccess().getMedidaPesoMedidaPesoParserRuleCall_1_0()); pushFollow(FOLLOW_ruleMedidaPeso_in_rule__PesoMagnitudVariable__MedidaPesoAssignment_19786); ruleMedidaPeso(); state._fsp--; after(grammarAccess.getPesoMagnitudVariableAccess().getMedidaPesoMedidaPesoParserRuleCall_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudVariable__MedidaPesoAssignment_1" // $ANTLR start "rule__PesoMagnitudFija__PesaMedioKiloAssignment_0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4835:1: rule__PesoMagnitudFija__PesaMedioKiloAssignment_0 : ( ( 'medio kilo' ) ) ; public final void rule__PesoMagnitudFija__PesaMedioKiloAssignment_0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4839:1: ( ( ( 'medio kilo' ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4840:1: ( ( 'medio kilo' ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4840:1: ( ( 'medio kilo' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4841:1: ( 'medio kilo' ) { before(grammarAccess.getPesoMagnitudFijaAccess().getPesaMedioKiloMedioKiloKeyword_0_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4842:1: ( 'medio kilo' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4843:1: 'medio kilo' { before(grammarAccess.getPesoMagnitudFijaAccess().getPesaMedioKiloMedioKiloKeyword_0_0()); match(input,60,FOLLOW_60_in_rule__PesoMagnitudFija__PesaMedioKiloAssignment_09822); after(grammarAccess.getPesoMagnitudFijaAccess().getPesaMedioKiloMedioKiloKeyword_0_0()); } after(grammarAccess.getPesoMagnitudFijaAccess().getPesaMedioKiloMedioKiloKeyword_0_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudFija__PesaMedioKiloAssignment_0" // $ANTLR start "rule__PesoMagnitudFija__PesaCuartoKiloAssignment_1_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4858:1: rule__PesoMagnitudFija__PesaCuartoKiloAssignment_1_1 : ( ( 'cuarto kilo' ) ) ; public final void rule__PesoMagnitudFija__PesaCuartoKiloAssignment_1_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4862:1: ( ( ( 'cuarto kilo' ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4863:1: ( ( 'cuarto kilo' ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4863:1: ( ( 'cuarto kilo' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4864:1: ( 'cuarto kilo' ) { before(grammarAccess.getPesoMagnitudFijaAccess().getPesaCuartoKiloCuartoKiloKeyword_1_1_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4865:1: ( 'cuarto kilo' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4866:1: 'cuarto kilo' { before(grammarAccess.getPesoMagnitudFijaAccess().getPesaCuartoKiloCuartoKiloKeyword_1_1_0()); match(input,61,FOLLOW_61_in_rule__PesoMagnitudFija__PesaCuartoKiloAssignment_1_19866); after(grammarAccess.getPesoMagnitudFijaAccess().getPesaCuartoKiloCuartoKiloKeyword_1_1_0()); } after(grammarAccess.getPesoMagnitudFijaAccess().getPesaCuartoKiloCuartoKiloKeyword_1_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudFija__PesaCuartoKiloAssignment_1_1" // $ANTLR start "rule__PesoMagnitudFija__PesaUnKiloAssignment_2_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4881:1: rule__PesoMagnitudFija__PesaUnKiloAssignment_2_1 : ( ( 'kilo' ) ) ; public final void rule__PesoMagnitudFija__PesaUnKiloAssignment_2_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4885:1: ( ( ( 'kilo' ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4886:1: ( ( 'kilo' ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4886:1: ( ( 'kilo' ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4887:1: ( 'kilo' ) { before(grammarAccess.getPesoMagnitudFijaAccess().getPesaUnKiloKiloKeyword_2_1_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4888:1: ( 'kilo' ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4889:1: 'kilo' { before(grammarAccess.getPesoMagnitudFijaAccess().getPesaUnKiloKiloKeyword_2_1_0()); match(input,19,FOLLOW_19_in_rule__PesoMagnitudFija__PesaUnKiloAssignment_2_19910); after(grammarAccess.getPesoMagnitudFijaAccess().getPesaUnKiloKiloKeyword_2_1_0()); } after(grammarAccess.getPesoMagnitudFijaAccess().getPesaUnKiloKiloKeyword_2_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__PesoMagnitudFija__PesaUnKiloAssignment_2_1" // $ANTLR start "rule__Venta__CompradorAssignment_0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4904:1: rule__Venta__CompradorAssignment_0 : ( ( RULE_ID ) ) ; public final void rule__Venta__CompradorAssignment_0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4908:1: ( ( ( RULE_ID ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4909:1: ( ( RULE_ID ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4909:1: ( ( RULE_ID ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4910:1: ( RULE_ID ) { before(grammarAccess.getVentaAccess().getCompradorClienteCrossReference_0_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4911:1: ( RULE_ID ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4912:1: RULE_ID { before(grammarAccess.getVentaAccess().getCompradorClienteIDTerminalRuleCall_0_0_1()); match(input,RULE_ID,FOLLOW_RULE_ID_in_rule__Venta__CompradorAssignment_09953); after(grammarAccess.getVentaAccess().getCompradorClienteIDTerminalRuleCall_0_0_1()); } after(grammarAccess.getVentaAccess().getCompradorClienteCrossReference_0_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__CompradorAssignment_0" // $ANTLR start "rule__Venta__ItemsVendidosAssignment_2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4923:1: rule__Venta__ItemsVendidosAssignment_2 : ( ruleItemVenta ) ; public final void rule__Venta__ItemsVendidosAssignment_2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4927:1: ( ( ruleItemVenta ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4928:1: ( ruleItemVenta ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4928:1: ( ruleItemVenta ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4929:1: ruleItemVenta { before(grammarAccess.getVentaAccess().getItemsVendidosItemVentaParserRuleCall_2_0()); pushFollow(FOLLOW_ruleItemVenta_in_rule__Venta__ItemsVendidosAssignment_29988); ruleItemVenta(); state._fsp--; after(grammarAccess.getVentaAccess().getItemsVendidosItemVentaParserRuleCall_2_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__ItemsVendidosAssignment_2" // $ANTLR start "rule__Venta__ItemsVendidosAssignment_3_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4938:1: rule__Venta__ItemsVendidosAssignment_3_1 : ( ruleItemVenta ) ; public final void rule__Venta__ItemsVendidosAssignment_3_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4942:1: ( ( ruleItemVenta ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4943:1: ( ruleItemVenta ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4943:1: ( ruleItemVenta ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4944:1: ruleItemVenta { before(grammarAccess.getVentaAccess().getItemsVendidosItemVentaParserRuleCall_3_1_0()); pushFollow(FOLLOW_ruleItemVenta_in_rule__Venta__ItemsVendidosAssignment_3_110019); ruleItemVenta(); state._fsp--; after(grammarAccess.getVentaAccess().getItemsVendidosItemVentaParserRuleCall_3_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__ItemsVendidosAssignment_3_1" // $ANTLR start "rule__Venta__TotalRedondeadoAssignment_4_1_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4953:1: rule__Venta__TotalRedondeadoAssignment_4_1_1 : ( ruleMontoDinero ) ; public final void rule__Venta__TotalRedondeadoAssignment_4_1_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4957:1: ( ( ruleMontoDinero ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4958:1: ( ruleMontoDinero ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4958:1: ( ruleMontoDinero ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4959:1: ruleMontoDinero { before(grammarAccess.getVentaAccess().getTotalRedondeadoMontoDineroParserRuleCall_4_1_1_0()); pushFollow(FOLLOW_ruleMontoDinero_in_rule__Venta__TotalRedondeadoAssignment_4_1_110050); ruleMontoDinero(); state._fsp--; after(grammarAccess.getVentaAccess().getTotalRedondeadoMontoDineroParserRuleCall_4_1_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__TotalRedondeadoAssignment_4_1_1" // $ANTLR start "rule__Venta__TotalPagadoAssignment_5_1_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4968:1: rule__Venta__TotalPagadoAssignment_5_1_1 : ( ruleMontoDinero ) ; public final void rule__Venta__TotalPagadoAssignment_5_1_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4972:1: ( ( ruleMontoDinero ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4973:1: ( ruleMontoDinero ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4973:1: ( ruleMontoDinero ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4974:1: ruleMontoDinero { before(grammarAccess.getVentaAccess().getTotalPagadoMontoDineroParserRuleCall_5_1_1_0()); pushFollow(FOLLOW_ruleMontoDinero_in_rule__Venta__TotalPagadoAssignment_5_1_110081); ruleMontoDinero(); state._fsp--; after(grammarAccess.getVentaAccess().getTotalPagadoMontoDineroParserRuleCall_5_1_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__TotalPagadoAssignment_5_1_1" // $ANTLR start "rule__Venta__TotalDebiendoAssignment_6_1_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4983:1: rule__Venta__TotalDebiendoAssignment_6_1_1 : ( ruleMontoDinero ) ; public final void rule__Venta__TotalDebiendoAssignment_6_1_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4987:1: ( ( ruleMontoDinero ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4988:1: ( ruleMontoDinero ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4988:1: ( ruleMontoDinero ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4989:1: ruleMontoDinero { before(grammarAccess.getVentaAccess().getTotalDebiendoMontoDineroParserRuleCall_6_1_1_0()); pushFollow(FOLLOW_ruleMontoDinero_in_rule__Venta__TotalDebiendoAssignment_6_1_110112); ruleMontoDinero(); state._fsp--; after(grammarAccess.getVentaAccess().getTotalDebiendoMontoDineroParserRuleCall_6_1_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Venta__TotalDebiendoAssignment_6_1_1" // $ANTLR start "rule__ItemVenta__CantidadAssignment_0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:4998:1: rule__ItemVenta__CantidadAssignment_0 : ( rulePeso ) ; public final void rule__ItemVenta__CantidadAssignment_0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5002:1: ( ( rulePeso ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5003:1: ( rulePeso ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5003:1: ( rulePeso ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5004:1: rulePeso { before(grammarAccess.getItemVentaAccess().getCantidadPesoParserRuleCall_0_0()); pushFollow(FOLLOW_rulePeso_in_rule__ItemVenta__CantidadAssignment_010143); rulePeso(); state._fsp--; after(grammarAccess.getItemVentaAccess().getCantidadPesoParserRuleCall_0_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ItemVenta__CantidadAssignment_0" // $ANTLR start "rule__ItemVenta__ProductoAssignment_2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5013:1: rule__ItemVenta__ProductoAssignment_2 : ( ( RULE_ID ) ) ; public final void rule__ItemVenta__ProductoAssignment_2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5017:1: ( ( ( RULE_ID ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5018:1: ( ( RULE_ID ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5018:1: ( ( RULE_ID ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5019:1: ( RULE_ID ) { before(grammarAccess.getItemVentaAccess().getProductoProductoCrossReference_2_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5020:1: ( RULE_ID ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5021:1: RULE_ID { before(grammarAccess.getItemVentaAccess().getProductoProductoIDTerminalRuleCall_2_0_1()); match(input,RULE_ID,FOLLOW_RULE_ID_in_rule__ItemVenta__ProductoAssignment_210178); after(grammarAccess.getItemVentaAccess().getProductoProductoIDTerminalRuleCall_2_0_1()); } after(grammarAccess.getItemVentaAccess().getProductoProductoCrossReference_2_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__ItemVenta__ProductoAssignment_2" // $ANTLR start "rule__Tarea__HorarioAssignment_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5032:1: rule__Tarea__HorarioAssignment_1 : ( ruleHorario ) ; public final void rule__Tarea__HorarioAssignment_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5036:1: ( ( ruleHorario ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5037:1: ( ruleHorario ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5037:1: ( ruleHorario ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5038:1: ruleHorario { before(grammarAccess.getTareaAccess().getHorarioHorarioParserRuleCall_1_0()); pushFollow(FOLLOW_ruleHorario_in_rule__Tarea__HorarioAssignment_110213); ruleHorario(); state._fsp--; after(grammarAccess.getTareaAccess().getHorarioHorarioParserRuleCall_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Tarea__HorarioAssignment_1" // $ANTLR start "rule__Tarea__TareaRealizadaAssignment_2" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5047:1: rule__Tarea__TareaRealizadaAssignment_2 : ( ruleTipoTarea ) ; public final void rule__Tarea__TareaRealizadaAssignment_2() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5051:1: ( ( ruleTipoTarea ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5052:1: ( ruleTipoTarea ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5052:1: ( ruleTipoTarea ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5053:1: ruleTipoTarea { before(grammarAccess.getTareaAccess().getTareaRealizadaTipoTareaParserRuleCall_2_0()); pushFollow(FOLLOW_ruleTipoTarea_in_rule__Tarea__TareaRealizadaAssignment_210244); ruleTipoTarea(); state._fsp--; after(grammarAccess.getTareaAccess().getTareaRealizadaTipoTareaParserRuleCall_2_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Tarea__TareaRealizadaAssignment_2" // $ANTLR start "rule__Tarea__DuracionAssignment_4" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5062:1: rule__Tarea__DuracionAssignment_4 : ( ruleLapsoTiempo ) ; public final void rule__Tarea__DuracionAssignment_4() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5066:1: ( ( ruleLapsoTiempo ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5067:1: ( ruleLapsoTiempo ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5067:1: ( ruleLapsoTiempo ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5068:1: ruleLapsoTiempo { before(grammarAccess.getTareaAccess().getDuracionLapsoTiempoParserRuleCall_4_0()); pushFollow(FOLLOW_ruleLapsoTiempo_in_rule__Tarea__DuracionAssignment_410275); ruleLapsoTiempo(); state._fsp--; after(grammarAccess.getTareaAccess().getDuracionLapsoTiempoParserRuleCall_4_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__Tarea__DuracionAssignment_4" // $ANTLR start "rule__HorarioLiteral__HoraAssignment_0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5077:1: rule__HorarioLiteral__HoraAssignment_0 : ( ruleHorasLiteral ) ; public final void rule__HorarioLiteral__HoraAssignment_0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5081:1: ( ( ruleHorasLiteral ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5082:1: ( ruleHorasLiteral ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5082:1: ( ruleHorasLiteral ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5083:1: ruleHorasLiteral { before(grammarAccess.getHorarioLiteralAccess().getHoraHorasLiteralParserRuleCall_0_0()); pushFollow(FOLLOW_ruleHorasLiteral_in_rule__HorarioLiteral__HoraAssignment_010306); ruleHorasLiteral(); state._fsp--; after(grammarAccess.getHorarioLiteralAccess().getHoraHorasLiteralParserRuleCall_0_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioLiteral__HoraAssignment_0" // $ANTLR start "rule__HorarioLiteral__FraccionHoraAssignment_1_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5092:1: rule__HorarioLiteral__FraccionHoraAssignment_1_1 : ( ruleFraccionHoraLiteral ) ; public final void rule__HorarioLiteral__FraccionHoraAssignment_1_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5096:1: ( ( ruleFraccionHoraLiteral ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5097:1: ( ruleFraccionHoraLiteral ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5097:1: ( ruleFraccionHoraLiteral ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5098:1: ruleFraccionHoraLiteral { before(grammarAccess.getHorarioLiteralAccess().getFraccionHoraFraccionHoraLiteralParserRuleCall_1_1_0()); pushFollow(FOLLOW_ruleFraccionHoraLiteral_in_rule__HorarioLiteral__FraccionHoraAssignment_1_110337); ruleFraccionHoraLiteral(); state._fsp--; after(grammarAccess.getHorarioLiteralAccess().getFraccionHoraFraccionHoraLiteralParserRuleCall_1_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioLiteral__FraccionHoraAssignment_1_1" // $ANTLR start "rule__HorarioNumerico__HoraAssignment_0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5107:1: rule__HorarioNumerico__HoraAssignment_0 : ( RULE_INT ) ; public final void rule__HorarioNumerico__HoraAssignment_0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5111:1: ( ( RULE_INT ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5112:1: ( RULE_INT ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5112:1: ( RULE_INT ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5113:1: RULE_INT { before(grammarAccess.getHorarioNumericoAccess().getHoraINTTerminalRuleCall_0_0()); match(input,RULE_INT,FOLLOW_RULE_INT_in_rule__HorarioNumerico__HoraAssignment_010368); after(grammarAccess.getHorarioNumericoAccess().getHoraINTTerminalRuleCall_0_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioNumerico__HoraAssignment_0" // $ANTLR start "rule__HorarioNumerico__MinutosAssignment_1_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5122:1: rule__HorarioNumerico__MinutosAssignment_1_1 : ( RULE_INT ) ; public final void rule__HorarioNumerico__MinutosAssignment_1_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5126:1: ( ( RULE_INT ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5127:1: ( RULE_INT ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5127:1: ( RULE_INT ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5128:1: RULE_INT { before(grammarAccess.getHorarioNumericoAccess().getMinutosINTTerminalRuleCall_1_1_0()); match(input,RULE_INT,FOLLOW_RULE_INT_in_rule__HorarioNumerico__MinutosAssignment_1_110399); after(grammarAccess.getHorarioNumericoAccess().getMinutosINTTerminalRuleCall_1_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorarioNumerico__MinutosAssignment_1_1" // $ANTLR start "rule__TareaRevisionProductos__ProductosRevisadosAssignment_1_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5137:1: rule__TareaRevisionProductos__ProductosRevisadosAssignment_1_1 : ( ( RULE_ID ) ) ; public final void rule__TareaRevisionProductos__ProductosRevisadosAssignment_1_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5141:1: ( ( ( RULE_ID ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5142:1: ( ( RULE_ID ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5142:1: ( ( RULE_ID ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5143:1: ( RULE_ID ) { before(grammarAccess.getTareaRevisionProductosAccess().getProductosRevisadosProductoCrossReference_1_1_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5144:1: ( RULE_ID ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5145:1: RULE_ID { before(grammarAccess.getTareaRevisionProductosAccess().getProductosRevisadosProductoIDTerminalRuleCall_1_1_0_1()); match(input,RULE_ID,FOLLOW_RULE_ID_in_rule__TareaRevisionProductos__ProductosRevisadosAssignment_1_110434); after(grammarAccess.getTareaRevisionProductosAccess().getProductosRevisadosProductoIDTerminalRuleCall_1_1_0_1()); } after(grammarAccess.getTareaRevisionProductosAccess().getProductosRevisadosProductoCrossReference_1_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__ProductosRevisadosAssignment_1_1" // $ANTLR start "rule__TareaRevisionProductos__ProductosRevisadosAssignment_3" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5156:1: rule__TareaRevisionProductos__ProductosRevisadosAssignment_3 : ( ( RULE_ID ) ) ; public final void rule__TareaRevisionProductos__ProductosRevisadosAssignment_3() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5160:1: ( ( ( RULE_ID ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5161:1: ( ( RULE_ID ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5161:1: ( ( RULE_ID ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5162:1: ( RULE_ID ) { before(grammarAccess.getTareaRevisionProductosAccess().getProductosRevisadosProductoCrossReference_3_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5163:1: ( RULE_ID ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5164:1: RULE_ID { before(grammarAccess.getTareaRevisionProductosAccess().getProductosRevisadosProductoIDTerminalRuleCall_3_0_1()); match(input,RULE_ID,FOLLOW_RULE_ID_in_rule__TareaRevisionProductos__ProductosRevisadosAssignment_310473); after(grammarAccess.getTareaRevisionProductosAccess().getProductosRevisadosProductoIDTerminalRuleCall_3_0_1()); } after(grammarAccess.getTareaRevisionProductosAccess().getProductosRevisadosProductoCrossReference_3_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__TareaRevisionProductos__ProductosRevisadosAssignment_3" // $ANTLR start "rule__LapsoTiempoNumerico__MinutosAssignment_0_0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5175:1: rule__LapsoTiempoNumerico__MinutosAssignment_0_0 : ( RULE_INT ) ; public final void rule__LapsoTiempoNumerico__MinutosAssignment_0_0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5179:1: ( ( RULE_INT ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5180:1: ( RULE_INT ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5180:1: ( RULE_INT ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5181:1: RULE_INT { before(grammarAccess.getLapsoTiempoNumericoAccess().getMinutosINTTerminalRuleCall_0_0_0()); match(input,RULE_INT,FOLLOW_RULE_INT_in_rule__LapsoTiempoNumerico__MinutosAssignment_0_010508); after(grammarAccess.getLapsoTiempoNumericoAccess().getMinutosINTTerminalRuleCall_0_0_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__MinutosAssignment_0_0" // $ANTLR start "rule__LapsoTiempoNumerico__HorasAssignment_1_0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5190:1: rule__LapsoTiempoNumerico__HorasAssignment_1_0 : ( RULE_INT ) ; public final void rule__LapsoTiempoNumerico__HorasAssignment_1_0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5194:1: ( ( RULE_INT ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5195:1: ( RULE_INT ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5195:1: ( RULE_INT ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5196:1: RULE_INT { before(grammarAccess.getLapsoTiempoNumericoAccess().getHorasINTTerminalRuleCall_1_0_0()); match(input,RULE_INT,FOLLOW_RULE_INT_in_rule__LapsoTiempoNumerico__HorasAssignment_1_010539); after(grammarAccess.getLapsoTiempoNumericoAccess().getHorasINTTerminalRuleCall_1_0_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__HorasAssignment_1_0" // $ANTLR start "rule__LapsoTiempoNumerico__MinutosAssignment_1_2_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5205:1: rule__LapsoTiempoNumerico__MinutosAssignment_1_2_1 : ( RULE_INT ) ; public final void rule__LapsoTiempoNumerico__MinutosAssignment_1_2_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5209:1: ( ( RULE_INT ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5210:1: ( RULE_INT ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5210:1: ( RULE_INT ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5211:1: RULE_INT { before(grammarAccess.getLapsoTiempoNumericoAccess().getMinutosINTTerminalRuleCall_1_2_1_0()); match(input,RULE_INT,FOLLOW_RULE_INT_in_rule__LapsoTiempoNumerico__MinutosAssignment_1_2_110570); after(grammarAccess.getLapsoTiempoNumericoAccess().getMinutosINTTerminalRuleCall_1_2_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoNumerico__MinutosAssignment_1_2_1" // $ANTLR start "rule__LapsoTiempoLiteral__HorasAssignment_0" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5220:1: rule__LapsoTiempoLiteral__HorasAssignment_0 : ( ruleHorasLiteral ) ; public final void rule__LapsoTiempoLiteral__HorasAssignment_0() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5224:1: ( ( ruleHorasLiteral ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5225:1: ( ruleHorasLiteral ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5225:1: ( ruleHorasLiteral ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5226:1: ruleHorasLiteral { before(grammarAccess.getLapsoTiempoLiteralAccess().getHorasHorasLiteralParserRuleCall_0_0()); pushFollow(FOLLOW_ruleHorasLiteral_in_rule__LapsoTiempoLiteral__HorasAssignment_010601); ruleHorasLiteral(); state._fsp--; after(grammarAccess.getLapsoTiempoLiteralAccess().getHorasHorasLiteralParserRuleCall_0_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoLiteral__HorasAssignment_0" // $ANTLR start "rule__LapsoTiempoLiteral__FraccionHoraAssignment_2_1" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5235:1: rule__LapsoTiempoLiteral__FraccionHoraAssignment_2_1 : ( ruleFraccionHoraLiteral ) ; public final void rule__LapsoTiempoLiteral__FraccionHoraAssignment_2_1() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5239:1: ( ( ruleFraccionHoraLiteral ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5240:1: ( ruleFraccionHoraLiteral ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5240:1: ( ruleFraccionHoraLiteral ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5241:1: ruleFraccionHoraLiteral { before(grammarAccess.getLapsoTiempoLiteralAccess().getFraccionHoraFraccionHoraLiteralParserRuleCall_2_1_0()); pushFollow(FOLLOW_ruleFraccionHoraLiteral_in_rule__LapsoTiempoLiteral__FraccionHoraAssignment_2_110632); ruleFraccionHoraLiteral(); state._fsp--; after(grammarAccess.getLapsoTiempoLiteralAccess().getFraccionHoraFraccionHoraLiteralParserRuleCall_2_1_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__LapsoTiempoLiteral__FraccionHoraAssignment_2_1" // $ANTLR start "rule__HorasLiteral__LiteralAssignment" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5250:1: rule__HorasLiteral__LiteralAssignment : ( ( rule__HorasLiteral__LiteralAlternatives_0 ) ) ; public final void rule__HorasLiteral__LiteralAssignment() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5254:1: ( ( ( rule__HorasLiteral__LiteralAlternatives_0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5255:1: ( ( rule__HorasLiteral__LiteralAlternatives_0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5255:1: ( ( rule__HorasLiteral__LiteralAlternatives_0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5256:1: ( rule__HorasLiteral__LiteralAlternatives_0 ) { before(grammarAccess.getHorasLiteralAccess().getLiteralAlternatives_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5257:1: ( rule__HorasLiteral__LiteralAlternatives_0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5257:2: rule__HorasLiteral__LiteralAlternatives_0 { pushFollow(FOLLOW_rule__HorasLiteral__LiteralAlternatives_0_in_rule__HorasLiteral__LiteralAssignment10663); rule__HorasLiteral__LiteralAlternatives_0(); state._fsp--; } after(grammarAccess.getHorasLiteralAccess().getLiteralAlternatives_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__HorasLiteral__LiteralAssignment" // $ANTLR start "rule__FraccionHoraLiteral__LiteralAssignment" // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5266:1: rule__FraccionHoraLiteral__LiteralAssignment : ( ( rule__FraccionHoraLiteral__LiteralAlternatives_0 ) ) ; public final void rule__FraccionHoraLiteral__LiteralAssignment() throws RecognitionException { int stackSize = keepStackSize(); try { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5270:1: ( ( ( rule__FraccionHoraLiteral__LiteralAlternatives_0 ) ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5271:1: ( ( rule__FraccionHoraLiteral__LiteralAlternatives_0 ) ) { // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5271:1: ( ( rule__FraccionHoraLiteral__LiteralAlternatives_0 ) ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5272:1: ( rule__FraccionHoraLiteral__LiteralAlternatives_0 ) { before(grammarAccess.getFraccionHoraLiteralAccess().getLiteralAlternatives_0()); // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5273:1: ( rule__FraccionHoraLiteral__LiteralAlternatives_0 ) // ../org.xtext.example.TextualVerduler.ui/src-gen/org/xtext/example/mydsl/ui/contentassist/antlr/internal/InternalTextualVerduler.g:5273:2: rule__FraccionHoraLiteral__LiteralAlternatives_0 { pushFollow(FOLLOW_rule__FraccionHoraLiteral__LiteralAlternatives_0_in_rule__FraccionHoraLiteral__LiteralAssignment10696); rule__FraccionHoraLiteral__LiteralAlternatives_0(); state._fsp--; } after(grammarAccess.getFraccionHoraLiteralAccess().getLiteralAlternatives_0()); } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { restoreStackSize(stackSize); } return ; } // $ANTLR end "rule__FraccionHoraLiteral__LiteralAssignment" // Delegated rules public static final BitSet FOLLOW_ruleVerduleria_in_entryRuleVerduleria61 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleVerduleria68 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Verduleria__Alternatives_in_ruleVerduleria94 = new BitSet(new long[]{0x0010002000007812L}); public static final BitSet FOLLOW_ruleCliente_in_entryRuleCliente122 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleCliente129 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Cliente__Alternatives_in_ruleCliente155 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleClienteConDeuda_in_entryRuleClienteConDeuda182 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleClienteConDeuda189 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteConDeuda__Group__0_in_ruleClienteConDeuda215 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleClienteAlDia_in_entryRuleClienteAlDia242 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleClienteAlDia249 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteAlDia__Group__0_in_ruleClienteAlDia275 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleClienteConCredito_in_entryRuleClienteConCredito302 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleClienteConCredito309 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteConCredito__Group__0_in_ruleClienteConCredito335 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleProductoConPrecio_in_entryRuleProductoConPrecio362 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleProductoConPrecio369 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ProductoConPrecio__Group__0_in_ruleProductoConPrecio395 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleProducto_in_entryRuleProducto422 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleProducto429 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Producto__Group__0_in_ruleProducto455 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rulePrecioPorPeso_in_entryRulePrecioPorPeso482 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRulePrecioPorPeso489 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PrecioPorPeso__Group__0_in_rulePrecioPorPeso515 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleMontoDinero_in_entryRuleMontoDinero542 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleMontoDinero549 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__MontoDinero__Group__0_in_ruleMontoDinero575 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rulePeso_in_entryRulePeso602 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRulePeso609 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Peso__Alternatives_in_rulePeso635 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rulePesoMagnitudVariable_in_entryRulePesoMagnitudVariable662 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRulePesoMagnitudVariable669 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PesoMagnitudVariable__Group__0_in_rulePesoMagnitudVariable695 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rulePesoMagnitudFija_in_entryRulePesoMagnitudFija722 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRulePesoMagnitudFija729 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PesoMagnitudFija__Alternatives_in_rulePesoMagnitudFija755 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleMedidaPeso_in_entryRuleMedidaPeso782 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleMedidaPeso789 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__MedidaPeso__Alternatives_in_ruleMedidaPeso815 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleMedidaPesoGramos_in_entryRuleMedidaPesoGramos842 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleMedidaPesoGramos849 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__MedidaPesoGramos__Group__0_in_ruleMedidaPesoGramos875 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleMedidaPesoKilo_in_entryRuleMedidaPesoKilo902 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleMedidaPesoKilo909 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__MedidaPesoKilo__Group__0_in_ruleMedidaPesoKilo935 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleVenta_in_entryRuleVenta962 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleVenta969 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group__0_in_ruleVenta995 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleItemVenta_in_entryRuleItemVenta1022 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleItemVenta1029 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ItemVenta__Group__0_in_ruleItemVenta1055 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleTarea_in_entryRuleTarea1082 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleTarea1089 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Tarea__Group__0_in_ruleTarea1115 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleHorario_in_entryRuleHorario1142 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleHorario1149 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Horario__Alternatives_in_ruleHorario1175 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleHorarioLiteral_in_entryRuleHorarioLiteral1202 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleHorarioLiteral1209 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorarioLiteral__Group__0_in_ruleHorarioLiteral1235 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleHorarioNumerico_in_entryRuleHorarioNumerico1262 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleHorarioNumerico1269 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorarioNumerico__Group__0_in_ruleHorarioNumerico1295 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleTipoTarea_in_entryRuleTipoTarea1322 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleTipoTarea1329 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TipoTarea__Alternatives_in_ruleTipoTarea1355 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleTareaRevisionProductos_in_entryRuleTareaRevisionProductos1382 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleTareaRevisionProductos1389 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__Group__0_in_ruleTareaRevisionProductos1415 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleTareaArqueoCaja_in_entryRuleTareaArqueoCaja1442 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleTareaArqueoCaja1449 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaArqueoCaja__Group__0_in_ruleTareaArqueoCaja1475 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleTareaLimpiezaLocal_in_entryRuleTareaLimpiezaLocal1502 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleTareaLimpiezaLocal1509 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaLimpiezaLocal__Group__0_in_ruleTareaLimpiezaLocal1535 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleLapsoTiempo_in_entryRuleLapsoTiempo1562 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleLapsoTiempo1569 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempo__Alternatives_in_ruleLapsoTiempo1595 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleLapsoTiempoNumerico_in_entryRuleLapsoTiempoNumerico1622 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleLapsoTiempoNumerico1629 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Alternatives_in_ruleLapsoTiempoNumerico1655 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleLapsoTiempoLiteral_in_entryRuleLapsoTiempoLiteral1682 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleLapsoTiempoLiteral1689 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoLiteral__Group__0_in_ruleLapsoTiempoLiteral1715 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleHorasLiteral_in_entryRuleHorasLiteral1742 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleHorasLiteral1749 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorasLiteral__LiteralAssignment_in_ruleHorasLiteral1775 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleFraccionHoraLiteral_in_entryRuleFraccionHoraLiteral1802 = new BitSet(new long[]{0x0000000000000000L}); public static final BitSet FOLLOW_EOF_in_entryRuleFraccionHoraLiteral1809 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__FraccionHoraLiteral__LiteralAssignment_in_ruleFraccionHoraLiteral1835 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Verduleria__ClientesAssignment_0_in_rule__Verduleria__Alternatives1871 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Verduleria__ProductosAssignment_1_in_rule__Verduleria__Alternatives1889 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Verduleria__VentasAssignment_2_in_rule__Verduleria__Alternatives1907 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Verduleria__TareasAssignment_3_in_rule__Verduleria__Alternatives1925 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleClienteConDeuda_in_rule__Cliente__Alternatives1958 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleClienteAlDia_in_rule__Cliente__Alternatives1975 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleClienteConCredito_in_rule__Cliente__Alternatives1992 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_11_in_rule__Producto__Alternatives_02025 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_12_in_rule__Producto__Alternatives_02045 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_13_in_rule__Producto__Alternatives_02065 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_14_in_rule__Producto__Alternatives_02085 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PrecioPorPeso__Group_1_0__0_in_rule__PrecioPorPeso__Alternatives_12119 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PrecioPorPeso__Group_1_1__0_in_rule__PrecioPorPeso__Alternatives_12137 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_15_in_rule__MontoDinero__Alternatives_12171 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_16_in_rule__MontoDinero__Alternatives_12191 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_17_in_rule__MontoDinero__Alternatives_12211 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_18_in_rule__MontoDinero__Alternatives_12231 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rulePesoMagnitudVariable_in_rule__Peso__Alternatives2265 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rulePesoMagnitudFija_in_rule__Peso__Alternatives2282 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PesoMagnitudFija__PesaMedioKiloAssignment_0_in_rule__PesoMagnitudFija__Alternatives2314 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PesoMagnitudFija__Group_1__0_in_rule__PesoMagnitudFija__Alternatives2332 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PesoMagnitudFija__Group_2__0_in_rule__PesoMagnitudFija__Alternatives2350 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleMedidaPesoKilo_in_rule__MedidaPeso__Alternatives2383 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleMedidaPesoGramos_in_rule__MedidaPeso__Alternatives2400 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_19_in_rule__MedidaPesoKilo__Alternatives_12433 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_20_in_rule__MedidaPesoKilo__Alternatives_12453 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleHorarioNumerico_in_rule__Horario__Alternatives2487 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleHorarioLiteral_in_rule__Horario__Alternatives2504 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleTareaLimpiezaLocal_in_rule__TipoTarea__Alternatives2536 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleTareaArqueoCaja_in_rule__TipoTarea__Alternatives2553 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleTareaRevisionProductos_in_rule__TipoTarea__Alternatives2570 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_14_in_rule__TareaRevisionProductos__Alternatives_1_02603 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_13_in_rule__TareaRevisionProductos__Alternatives_1_02623 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_14_in_rule__TareaRevisionProductos__Alternatives_22658 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_13_in_rule__TareaRevisionProductos__Alternatives_22678 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleLapsoTiempoLiteral_in_rule__LapsoTiempo__Alternatives2712 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleLapsoTiempoNumerico_in_rule__LapsoTiempo__Alternatives2729 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Group_0__0_in_rule__LapsoTiempoNumerico__Alternatives2761 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Group_1__0_in_rule__LapsoTiempoNumerico__Alternatives2779 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_21_in_rule__LapsoTiempoNumerico__Alternatives_1_12813 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_22_in_rule__LapsoTiempoNumerico__Alternatives_1_12833 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_21_in_rule__LapsoTiempoLiteral__Alternatives_12868 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_22_in_rule__LapsoTiempoLiteral__Alternatives_12888 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_23_in_rule__HorasLiteral__LiteralAlternatives_02923 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_24_in_rule__HorasLiteral__LiteralAlternatives_02943 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_25_in_rule__HorasLiteral__LiteralAlternatives_02963 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_26_in_rule__HorasLiteral__LiteralAlternatives_02983 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_27_in_rule__HorasLiteral__LiteralAlternatives_03003 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_28_in_rule__HorasLiteral__LiteralAlternatives_03023 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_29_in_rule__HorasLiteral__LiteralAlternatives_03043 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_30_in_rule__HorasLiteral__LiteralAlternatives_03063 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_31_in_rule__HorasLiteral__LiteralAlternatives_03083 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_32_in_rule__HorasLiteral__LiteralAlternatives_03103 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_33_in_rule__HorasLiteral__LiteralAlternatives_03123 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_34_in_rule__HorasLiteral__LiteralAlternatives_03143 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_35_in_rule__FraccionHoraLiteral__LiteralAlternatives_03178 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_36_in_rule__FraccionHoraLiteral__LiteralAlternatives_03198 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteConDeuda__Group__0__Impl_in_rule__ClienteConDeuda__Group__03230 = new BitSet(new long[]{0x0000000000000010L}); public static final BitSet FOLLOW_rule__ClienteConDeuda__Group__1_in_rule__ClienteConDeuda__Group__03233 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_37_in_rule__ClienteConDeuda__Group__0__Impl3261 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteConDeuda__Group__1__Impl_in_rule__ClienteConDeuda__Group__13292 = new BitSet(new long[]{0x0000004000000000L}); public static final BitSet FOLLOW_rule__ClienteConDeuda__Group__2_in_rule__ClienteConDeuda__Group__13295 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteConDeuda__NameAssignment_1_in_rule__ClienteConDeuda__Group__1__Impl3322 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteConDeuda__Group__2__Impl_in_rule__ClienteConDeuda__Group__23352 = new BitSet(new long[]{0x0000000000000020L}); public static final BitSet FOLLOW_rule__ClienteConDeuda__Group__3_in_rule__ClienteConDeuda__Group__23355 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_38_in_rule__ClienteConDeuda__Group__2__Impl3383 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteConDeuda__Group__3__Impl_in_rule__ClienteConDeuda__Group__33414 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteConDeuda__MontoDeudaAssignment_3_in_rule__ClienteConDeuda__Group__3__Impl3441 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteAlDia__Group__0__Impl_in_rule__ClienteAlDia__Group__03479 = new BitSet(new long[]{0x0000000000000010L}); public static final BitSet FOLLOW_rule__ClienteAlDia__Group__1_in_rule__ClienteAlDia__Group__03482 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_37_in_rule__ClienteAlDia__Group__0__Impl3510 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteAlDia__Group__1__Impl_in_rule__ClienteAlDia__Group__13541 = new BitSet(new long[]{0x0000008000000000L}); public static final BitSet FOLLOW_rule__ClienteAlDia__Group__2_in_rule__ClienteAlDia__Group__13544 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteAlDia__NameAssignment_1_in_rule__ClienteAlDia__Group__1__Impl3571 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteAlDia__Group__2__Impl_in_rule__ClienteAlDia__Group__23601 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_39_in_rule__ClienteAlDia__Group__2__Impl3629 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteConCredito__Group__0__Impl_in_rule__ClienteConCredito__Group__03666 = new BitSet(new long[]{0x0000000000000010L}); public static final BitSet FOLLOW_rule__ClienteConCredito__Group__1_in_rule__ClienteConCredito__Group__03669 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_37_in_rule__ClienteConCredito__Group__0__Impl3697 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteConCredito__Group__1__Impl_in_rule__ClienteConCredito__Group__13728 = new BitSet(new long[]{0x0000010000000000L}); public static final BitSet FOLLOW_rule__ClienteConCredito__Group__2_in_rule__ClienteConCredito__Group__13731 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteConCredito__NameAssignment_1_in_rule__ClienteConCredito__Group__1__Impl3758 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteConCredito__Group__2__Impl_in_rule__ClienteConCredito__Group__23788 = new BitSet(new long[]{0x0000000000000020L}); public static final BitSet FOLLOW_rule__ClienteConCredito__Group__3_in_rule__ClienteConCredito__Group__23791 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_40_in_rule__ClienteConCredito__Group__2__Impl3819 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteConCredito__Group__3__Impl_in_rule__ClienteConCredito__Group__33850 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ClienteConCredito__MontoCreditoAssignment_3_in_rule__ClienteConCredito__Group__3__Impl3877 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ProductoConPrecio__Group__0__Impl_in_rule__ProductoConPrecio__Group__03915 = new BitSet(new long[]{0x0000020000000000L}); public static final BitSet FOLLOW_rule__ProductoConPrecio__Group__1_in_rule__ProductoConPrecio__Group__03918 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleProducto_in_rule__ProductoConPrecio__Group__0__Impl3945 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ProductoConPrecio__Group__1__Impl_in_rule__ProductoConPrecio__Group__13974 = new BitSet(new long[]{0x0000000000000020L}); public static final BitSet FOLLOW_rule__ProductoConPrecio__Group__2_in_rule__ProductoConPrecio__Group__13977 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_41_in_rule__ProductoConPrecio__Group__1__Impl4005 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ProductoConPrecio__Group__2__Impl_in_rule__ProductoConPrecio__Group__24036 = new BitSet(new long[]{0x0000040000000000L}); public static final BitSet FOLLOW_rule__ProductoConPrecio__Group__3_in_rule__ProductoConPrecio__Group__24039 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ProductoConPrecio__PrecioAssignment_2_in_rule__ProductoConPrecio__Group__2__Impl4066 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ProductoConPrecio__Group__3__Impl_in_rule__ProductoConPrecio__Group__34096 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_42_in_rule__ProductoConPrecio__Group__3__Impl4124 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Producto__Group__0__Impl_in_rule__Producto__Group__04163 = new BitSet(new long[]{0x0000000000000010L}); public static final BitSet FOLLOW_rule__Producto__Group__1_in_rule__Producto__Group__04166 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Producto__Alternatives_0_in_rule__Producto__Group__0__Impl4193 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Producto__Group__1__Impl_in_rule__Producto__Group__14223 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Producto__NameAssignment_1_in_rule__Producto__Group__1__Impl4250 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PrecioPorPeso__Group__0__Impl_in_rule__PrecioPorPeso__Group__04284 = new BitSet(new long[]{0x0000080000002000L}); public static final BitSet FOLLOW_rule__PrecioPorPeso__Group__1_in_rule__PrecioPorPeso__Group__04287 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PrecioPorPeso__PrecioAssignment_0_in_rule__PrecioPorPeso__Group__0__Impl4314 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PrecioPorPeso__Group__1__Impl_in_rule__PrecioPorPeso__Group__14344 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PrecioPorPeso__Alternatives_1_in_rule__PrecioPorPeso__Group__1__Impl4371 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PrecioPorPeso__Group_1_0__0__Impl_in_rule__PrecioPorPeso__Group_1_0__04405 = new BitSet(new long[]{0x0000000000000020L}); public static final BitSet FOLLOW_rule__PrecioPorPeso__Group_1_0__1_in_rule__PrecioPorPeso__Group_1_0__04408 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_13_in_rule__PrecioPorPeso__Group_1_0__0__Impl4436 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PrecioPorPeso__Group_1_0__1__Impl_in_rule__PrecioPorPeso__Group_1_0__14467 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PrecioPorPeso__PesajeAssignment_1_0_1_in_rule__PrecioPorPeso__Group_1_0__1__Impl4494 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PrecioPorPeso__Group_1_1__0__Impl_in_rule__PrecioPorPeso__Group_1_1__04528 = new BitSet(new long[]{0x3000100000080000L}); public static final BitSet FOLLOW_rule__PrecioPorPeso__Group_1_1__1_in_rule__PrecioPorPeso__Group_1_1__04531 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_43_in_rule__PrecioPorPeso__Group_1_1__0__Impl4559 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PrecioPorPeso__Group_1_1__1__Impl_in_rule__PrecioPorPeso__Group_1_1__14590 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PrecioPorPeso__PesajeAssignment_1_1_1_in_rule__PrecioPorPeso__Group_1_1__1__Impl4617 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__MontoDinero__Group__0__Impl_in_rule__MontoDinero__Group__04651 = new BitSet(new long[]{0x0000000000078000L}); public static final BitSet FOLLOW_rule__MontoDinero__Group__1_in_rule__MontoDinero__Group__04654 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__MontoDinero__PrecioAssignment_0_in_rule__MontoDinero__Group__0__Impl4681 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__MontoDinero__Group__1__Impl_in_rule__MontoDinero__Group__14711 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__MontoDinero__Alternatives_1_in_rule__MontoDinero__Group__1__Impl4738 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PesoMagnitudVariable__Group__0__Impl_in_rule__PesoMagnitudVariable__Group__04772 = new BitSet(new long[]{0x0000200000180000L}); public static final BitSet FOLLOW_rule__PesoMagnitudVariable__Group__1_in_rule__PesoMagnitudVariable__Group__04775 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PesoMagnitudVariable__MagnitudAssignment_0_in_rule__PesoMagnitudVariable__Group__0__Impl4802 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PesoMagnitudVariable__Group__1__Impl_in_rule__PesoMagnitudVariable__Group__14832 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PesoMagnitudVariable__MedidaPesoAssignment_1_in_rule__PesoMagnitudVariable__Group__1__Impl4859 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PesoMagnitudFija__Group_1__0__Impl_in_rule__PesoMagnitudFija__Group_1__04893 = new BitSet(new long[]{0x2000100000000000L}); public static final BitSet FOLLOW_rule__PesoMagnitudFija__Group_1__1_in_rule__PesoMagnitudFija__Group_1__04896 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_44_in_rule__PesoMagnitudFija__Group_1__0__Impl4925 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PesoMagnitudFija__Group_1__1__Impl_in_rule__PesoMagnitudFija__Group_1__14958 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PesoMagnitudFija__PesaCuartoKiloAssignment_1_1_in_rule__PesoMagnitudFija__Group_1__1__Impl4985 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PesoMagnitudFija__Group_2__0__Impl_in_rule__PesoMagnitudFija__Group_2__05019 = new BitSet(new long[]{0x3000100000080000L}); public static final BitSet FOLLOW_rule__PesoMagnitudFija__Group_2__1_in_rule__PesoMagnitudFija__Group_2__05022 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_44_in_rule__PesoMagnitudFija__Group_2__0__Impl5051 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PesoMagnitudFija__Group_2__1__Impl_in_rule__PesoMagnitudFija__Group_2__15084 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__PesoMagnitudFija__PesaUnKiloAssignment_2_1_in_rule__PesoMagnitudFija__Group_2__1__Impl5111 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__MedidaPesoGramos__Group__0__Impl_in_rule__MedidaPesoGramos__Group__05145 = new BitSet(new long[]{0x0000200000180000L}); public static final BitSet FOLLOW_rule__MedidaPesoGramos__Group__1_in_rule__MedidaPesoGramos__Group__05148 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__MedidaPesoGramos__Group__1__Impl_in_rule__MedidaPesoGramos__Group__15206 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_45_in_rule__MedidaPesoGramos__Group__1__Impl5234 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__MedidaPesoKilo__Group__0__Impl_in_rule__MedidaPesoKilo__Group__05269 = new BitSet(new long[]{0x0000000000180000L}); public static final BitSet FOLLOW_rule__MedidaPesoKilo__Group__1_in_rule__MedidaPesoKilo__Group__05272 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__MedidaPesoKilo__Group__1__Impl_in_rule__MedidaPesoKilo__Group__15330 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__MedidaPesoKilo__Alternatives_1_in_rule__MedidaPesoKilo__Group__1__Impl5357 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group__0__Impl_in_rule__Venta__Group__05391 = new BitSet(new long[]{0x0000400000000000L}); public static final BitSet FOLLOW_rule__Venta__Group__1_in_rule__Venta__Group__05394 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__CompradorAssignment_0_in_rule__Venta__Group__0__Impl5421 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group__1__Impl_in_rule__Venta__Group__15451 = new BitSet(new long[]{0x3000100000080020L}); public static final BitSet FOLLOW_rule__Venta__Group__2_in_rule__Venta__Group__15454 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_46_in_rule__Venta__Group__1__Impl5482 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group__2__Impl_in_rule__Venta__Group__25513 = new BitSet(new long[]{0x0000840000000000L}); public static final BitSet FOLLOW_rule__Venta__Group__3_in_rule__Venta__Group__25516 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__ItemsVendidosAssignment_2_in_rule__Venta__Group__2__Impl5543 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group__3__Impl_in_rule__Venta__Group__35573 = new BitSet(new long[]{0x0000840000000000L}); public static final BitSet FOLLOW_rule__Venta__Group__4_in_rule__Venta__Group__35576 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_3__0_in_rule__Venta__Group__3__Impl5603 = new BitSet(new long[]{0x0000800000000002L}); public static final BitSet FOLLOW_rule__Venta__Group__4__Impl_in_rule__Venta__Group__45634 = new BitSet(new long[]{0x0000840000000000L}); public static final BitSet FOLLOW_rule__Venta__Group__5_in_rule__Venta__Group__45637 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_4__0_in_rule__Venta__Group__4__Impl5664 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group__5__Impl_in_rule__Venta__Group__55695 = new BitSet(new long[]{0x0000840000000000L}); public static final BitSet FOLLOW_rule__Venta__Group__6_in_rule__Venta__Group__55698 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_5__0_in_rule__Venta__Group__5__Impl5725 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group__6__Impl_in_rule__Venta__Group__65756 = new BitSet(new long[]{0x0000840000000000L}); public static final BitSet FOLLOW_rule__Venta__Group__7_in_rule__Venta__Group__65759 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_6__0_in_rule__Venta__Group__6__Impl5786 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group__7__Impl_in_rule__Venta__Group__75817 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_42_in_rule__Venta__Group__7__Impl5845 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_3__0__Impl_in_rule__Venta__Group_3__05892 = new BitSet(new long[]{0x3000100000080020L}); public static final BitSet FOLLOW_rule__Venta__Group_3__1_in_rule__Venta__Group_3__05895 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_47_in_rule__Venta__Group_3__0__Impl5923 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_3__1__Impl_in_rule__Venta__Group_3__15954 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__ItemsVendidosAssignment_3_1_in_rule__Venta__Group_3__1__Impl5981 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_4__0__Impl_in_rule__Venta__Group_4__06015 = new BitSet(new long[]{0x0001000000000000L}); public static final BitSet FOLLOW_rule__Venta__Group_4__1_in_rule__Venta__Group_4__06018 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_47_in_rule__Venta__Group_4__0__Impl6046 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_4__1__Impl_in_rule__Venta__Group_4__16077 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_4_1__0_in_rule__Venta__Group_4__1__Impl6104 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_4_1__0__Impl_in_rule__Venta__Group_4_1__06138 = new BitSet(new long[]{0x0000000000000020L}); public static final BitSet FOLLOW_rule__Venta__Group_4_1__1_in_rule__Venta__Group_4_1__06141 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_48_in_rule__Venta__Group_4_1__0__Impl6169 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_4_1__1__Impl_in_rule__Venta__Group_4_1__16200 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__TotalRedondeadoAssignment_4_1_1_in_rule__Venta__Group_4_1__1__Impl6227 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_5__0__Impl_in_rule__Venta__Group_5__06261 = new BitSet(new long[]{0x0002000000000000L}); public static final BitSet FOLLOW_rule__Venta__Group_5__1_in_rule__Venta__Group_5__06264 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_47_in_rule__Venta__Group_5__0__Impl6292 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_5__1__Impl_in_rule__Venta__Group_5__16323 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_5_1__0_in_rule__Venta__Group_5__1__Impl6350 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_5_1__0__Impl_in_rule__Venta__Group_5_1__06384 = new BitSet(new long[]{0x0000000000000020L}); public static final BitSet FOLLOW_rule__Venta__Group_5_1__1_in_rule__Venta__Group_5_1__06387 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_49_in_rule__Venta__Group_5_1__0__Impl6415 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_5_1__1__Impl_in_rule__Venta__Group_5_1__16446 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__TotalPagadoAssignment_5_1_1_in_rule__Venta__Group_5_1__1__Impl6473 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_6__0__Impl_in_rule__Venta__Group_6__06507 = new BitSet(new long[]{0x0004000000000000L}); public static final BitSet FOLLOW_rule__Venta__Group_6__1_in_rule__Venta__Group_6__06510 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_47_in_rule__Venta__Group_6__0__Impl6538 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_6__1__Impl_in_rule__Venta__Group_6__16569 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_6_1__0_in_rule__Venta__Group_6__1__Impl6596 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_6_1__0__Impl_in_rule__Venta__Group_6_1__06630 = new BitSet(new long[]{0x0000000000000020L}); public static final BitSet FOLLOW_rule__Venta__Group_6_1__1_in_rule__Venta__Group_6_1__06633 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_50_in_rule__Venta__Group_6_1__0__Impl6661 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__Group_6_1__1__Impl_in_rule__Venta__Group_6_1__16692 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Venta__TotalDebiendoAssignment_6_1_1_in_rule__Venta__Group_6_1__1__Impl6719 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ItemVenta__Group__0__Impl_in_rule__ItemVenta__Group__06753 = new BitSet(new long[]{0x0008000000000000L}); public static final BitSet FOLLOW_rule__ItemVenta__Group__1_in_rule__ItemVenta__Group__06756 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ItemVenta__CantidadAssignment_0_in_rule__ItemVenta__Group__0__Impl6783 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ItemVenta__Group__1__Impl_in_rule__ItemVenta__Group__16813 = new BitSet(new long[]{0x0000000000000010L}); public static final BitSet FOLLOW_rule__ItemVenta__Group__2_in_rule__ItemVenta__Group__16816 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_51_in_rule__ItemVenta__Group__1__Impl6844 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ItemVenta__Group__2__Impl_in_rule__ItemVenta__Group__26875 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__ItemVenta__ProductoAssignment_2_in_rule__ItemVenta__Group__2__Impl6902 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Tarea__Group__0__Impl_in_rule__Tarea__Group__06938 = new BitSet(new long[]{0x00000007FF800020L}); public static final BitSet FOLLOW_rule__Tarea__Group__1_in_rule__Tarea__Group__06941 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_52_in_rule__Tarea__Group__0__Impl6969 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Tarea__Group__1__Impl_in_rule__Tarea__Group__17000 = new BitSet(new long[]{0x0700000000000000L}); public static final BitSet FOLLOW_rule__Tarea__Group__2_in_rule__Tarea__Group__17003 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Tarea__HorarioAssignment_1_in_rule__Tarea__Group__1__Impl7030 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Tarea__Group__2__Impl_in_rule__Tarea__Group__27060 = new BitSet(new long[]{0x0020000000000000L}); public static final BitSet FOLLOW_rule__Tarea__Group__3_in_rule__Tarea__Group__27063 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Tarea__TareaRealizadaAssignment_2_in_rule__Tarea__Group__2__Impl7090 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Tarea__Group__3__Impl_in_rule__Tarea__Group__37120 = new BitSet(new long[]{0x00000007FF800020L}); public static final BitSet FOLLOW_rule__Tarea__Group__4_in_rule__Tarea__Group__37123 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_53_in_rule__Tarea__Group__3__Impl7151 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Tarea__Group__4__Impl_in_rule__Tarea__Group__47182 = new BitSet(new long[]{0x0000040000000000L}); public static final BitSet FOLLOW_rule__Tarea__Group__5_in_rule__Tarea__Group__47185 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Tarea__DuracionAssignment_4_in_rule__Tarea__Group__4__Impl7212 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__Tarea__Group__5__Impl_in_rule__Tarea__Group__57242 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_42_in_rule__Tarea__Group__5__Impl7270 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorarioLiteral__Group__0__Impl_in_rule__HorarioLiteral__Group__07313 = new BitSet(new long[]{0x0040000000000000L}); public static final BitSet FOLLOW_rule__HorarioLiteral__Group__1_in_rule__HorarioLiteral__Group__07316 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorarioLiteral__HoraAssignment_0_in_rule__HorarioLiteral__Group__0__Impl7343 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorarioLiteral__Group__1__Impl_in_rule__HorarioLiteral__Group__17373 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorarioLiteral__Group_1__0_in_rule__HorarioLiteral__Group__1__Impl7400 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorarioLiteral__Group_1__0__Impl_in_rule__HorarioLiteral__Group_1__07435 = new BitSet(new long[]{0x0000001800000000L}); public static final BitSet FOLLOW_rule__HorarioLiteral__Group_1__1_in_rule__HorarioLiteral__Group_1__07438 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_54_in_rule__HorarioLiteral__Group_1__0__Impl7466 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorarioLiteral__Group_1__1__Impl_in_rule__HorarioLiteral__Group_1__17497 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorarioLiteral__FraccionHoraAssignment_1_1_in_rule__HorarioLiteral__Group_1__1__Impl7524 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorarioNumerico__Group__0__Impl_in_rule__HorarioNumerico__Group__07558 = new BitSet(new long[]{0x0080000000000000L}); public static final BitSet FOLLOW_rule__HorarioNumerico__Group__1_in_rule__HorarioNumerico__Group__07561 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorarioNumerico__HoraAssignment_0_in_rule__HorarioNumerico__Group__0__Impl7588 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorarioNumerico__Group__1__Impl_in_rule__HorarioNumerico__Group__17618 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorarioNumerico__Group_1__0_in_rule__HorarioNumerico__Group__1__Impl7645 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorarioNumerico__Group_1__0__Impl_in_rule__HorarioNumerico__Group_1__07680 = new BitSet(new long[]{0x0000000000000020L}); public static final BitSet FOLLOW_rule__HorarioNumerico__Group_1__1_in_rule__HorarioNumerico__Group_1__07683 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_55_in_rule__HorarioNumerico__Group_1__0__Impl7711 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorarioNumerico__Group_1__1__Impl_in_rule__HorarioNumerico__Group_1__17742 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorarioNumerico__MinutosAssignment_1_1_in_rule__HorarioNumerico__Group_1__1__Impl7769 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__Group__0__Impl_in_rule__TareaRevisionProductos__Group__07803 = new BitSet(new long[]{0x0000000000006000L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__Group__1_in_rule__TareaRevisionProductos__Group__07806 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_56_in_rule__TareaRevisionProductos__Group__0__Impl7834 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__Group__1__Impl_in_rule__TareaRevisionProductos__Group__17865 = new BitSet(new long[]{0x0000000000006000L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__Group__2_in_rule__TareaRevisionProductos__Group__17868 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__Group_1__0_in_rule__TareaRevisionProductos__Group__1__Impl7895 = new BitSet(new long[]{0x0000000000006002L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__Group__2__Impl_in_rule__TareaRevisionProductos__Group__27926 = new BitSet(new long[]{0x0000000000000010L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__Group__3_in_rule__TareaRevisionProductos__Group__27929 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__Alternatives_2_in_rule__TareaRevisionProductos__Group__2__Impl7956 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__Group__3__Impl_in_rule__TareaRevisionProductos__Group__37986 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__ProductosRevisadosAssignment_3_in_rule__TareaRevisionProductos__Group__3__Impl8013 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__Group_1__0__Impl_in_rule__TareaRevisionProductos__Group_1__08051 = new BitSet(new long[]{0x0000000000000010L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__Group_1__1_in_rule__TareaRevisionProductos__Group_1__08054 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__Alternatives_1_0_in_rule__TareaRevisionProductos__Group_1__0__Impl8081 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__Group_1__1__Impl_in_rule__TareaRevisionProductos__Group_1__18111 = new BitSet(new long[]{0x0000800000000000L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__Group_1__2_in_rule__TareaRevisionProductos__Group_1__18114 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__ProductosRevisadosAssignment_1_1_in_rule__TareaRevisionProductos__Group_1__1__Impl8141 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaRevisionProductos__Group_1__2__Impl_in_rule__TareaRevisionProductos__Group_1__28171 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_47_in_rule__TareaRevisionProductos__Group_1__2__Impl8199 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaArqueoCaja__Group__0__Impl_in_rule__TareaArqueoCaja__Group__08236 = new BitSet(new long[]{0x0200000000000000L}); public static final BitSet FOLLOW_rule__TareaArqueoCaja__Group__1_in_rule__TareaArqueoCaja__Group__08239 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaArqueoCaja__Group__1__Impl_in_rule__TareaArqueoCaja__Group__18297 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_57_in_rule__TareaArqueoCaja__Group__1__Impl8325 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaLimpiezaLocal__Group__0__Impl_in_rule__TareaLimpiezaLocal__Group__08360 = new BitSet(new long[]{0x0400000000000000L}); public static final BitSet FOLLOW_rule__TareaLimpiezaLocal__Group__1_in_rule__TareaLimpiezaLocal__Group__08363 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__TareaLimpiezaLocal__Group__1__Impl_in_rule__TareaLimpiezaLocal__Group__18421 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_58_in_rule__TareaLimpiezaLocal__Group__1__Impl8449 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Group_0__0__Impl_in_rule__LapsoTiempoNumerico__Group_0__08484 = new BitSet(new long[]{0x0800000000000000L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Group_0__1_in_rule__LapsoTiempoNumerico__Group_0__08487 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__MinutosAssignment_0_0_in_rule__LapsoTiempoNumerico__Group_0__0__Impl8514 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Group_0__1__Impl_in_rule__LapsoTiempoNumerico__Group_0__18544 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_59_in_rule__LapsoTiempoNumerico__Group_0__1__Impl8572 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Group_1__0__Impl_in_rule__LapsoTiempoNumerico__Group_1__08607 = new BitSet(new long[]{0x0000000000600000L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Group_1__1_in_rule__LapsoTiempoNumerico__Group_1__08610 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__HorasAssignment_1_0_in_rule__LapsoTiempoNumerico__Group_1__0__Impl8637 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Group_1__1__Impl_in_rule__LapsoTiempoNumerico__Group_1__18667 = new BitSet(new long[]{0x0040000000000000L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Group_1__2_in_rule__LapsoTiempoNumerico__Group_1__18670 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Alternatives_1_1_in_rule__LapsoTiempoNumerico__Group_1__1__Impl8697 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Group_1__2__Impl_in_rule__LapsoTiempoNumerico__Group_1__28727 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Group_1_2__0_in_rule__LapsoTiempoNumerico__Group_1__2__Impl8754 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Group_1_2__0__Impl_in_rule__LapsoTiempoNumerico__Group_1_2__08791 = new BitSet(new long[]{0x0000000000000020L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Group_1_2__1_in_rule__LapsoTiempoNumerico__Group_1_2__08794 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_54_in_rule__LapsoTiempoNumerico__Group_1_2__0__Impl8822 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Group_1_2__1__Impl_in_rule__LapsoTiempoNumerico__Group_1_2__18853 = new BitSet(new long[]{0x0800000000000000L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Group_1_2__2_in_rule__LapsoTiempoNumerico__Group_1_2__18856 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__MinutosAssignment_1_2_1_in_rule__LapsoTiempoNumerico__Group_1_2__1__Impl8883 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoNumerico__Group_1_2__2__Impl_in_rule__LapsoTiempoNumerico__Group_1_2__28913 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_59_in_rule__LapsoTiempoNumerico__Group_1_2__2__Impl8941 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoLiteral__Group__0__Impl_in_rule__LapsoTiempoLiteral__Group__08978 = new BitSet(new long[]{0x0000000000600000L}); public static final BitSet FOLLOW_rule__LapsoTiempoLiteral__Group__1_in_rule__LapsoTiempoLiteral__Group__08981 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoLiteral__HorasAssignment_0_in_rule__LapsoTiempoLiteral__Group__0__Impl9008 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoLiteral__Group__1__Impl_in_rule__LapsoTiempoLiteral__Group__19038 = new BitSet(new long[]{0x0040000000000000L}); public static final BitSet FOLLOW_rule__LapsoTiempoLiteral__Group__2_in_rule__LapsoTiempoLiteral__Group__19041 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoLiteral__Alternatives_1_in_rule__LapsoTiempoLiteral__Group__1__Impl9068 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoLiteral__Group__2__Impl_in_rule__LapsoTiempoLiteral__Group__29098 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoLiteral__Group_2__0_in_rule__LapsoTiempoLiteral__Group__2__Impl9125 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoLiteral__Group_2__0__Impl_in_rule__LapsoTiempoLiteral__Group_2__09162 = new BitSet(new long[]{0x0000001800000000L}); public static final BitSet FOLLOW_rule__LapsoTiempoLiteral__Group_2__1_in_rule__LapsoTiempoLiteral__Group_2__09165 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_54_in_rule__LapsoTiempoLiteral__Group_2__0__Impl9193 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoLiteral__Group_2__1__Impl_in_rule__LapsoTiempoLiteral__Group_2__19224 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__LapsoTiempoLiteral__FraccionHoraAssignment_2_1_in_rule__LapsoTiempoLiteral__Group_2__1__Impl9251 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleCliente_in_rule__Verduleria__ClientesAssignment_09290 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleProductoConPrecio_in_rule__Verduleria__ProductosAssignment_19321 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleVenta_in_rule__Verduleria__VentasAssignment_29352 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleTarea_in_rule__Verduleria__TareasAssignment_39383 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_RULE_ID_in_rule__ClienteConDeuda__NameAssignment_19414 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleMontoDinero_in_rule__ClienteConDeuda__MontoDeudaAssignment_39445 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_RULE_ID_in_rule__ClienteAlDia__NameAssignment_19476 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_RULE_ID_in_rule__ClienteConCredito__NameAssignment_19507 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleMontoDinero_in_rule__ClienteConCredito__MontoCreditoAssignment_39538 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rulePrecioPorPeso_in_rule__ProductoConPrecio__PrecioAssignment_29569 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_RULE_ID_in_rule__Producto__NameAssignment_19600 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleMontoDinero_in_rule__PrecioPorPeso__PrecioAssignment_09631 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rulePesoMagnitudVariable_in_rule__PrecioPorPeso__PesajeAssignment_1_0_19662 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rulePesoMagnitudFija_in_rule__PrecioPorPeso__PesajeAssignment_1_1_19693 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_RULE_INT_in_rule__MontoDinero__PrecioAssignment_09724 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_RULE_INT_in_rule__PesoMagnitudVariable__MagnitudAssignment_09755 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleMedidaPeso_in_rule__PesoMagnitudVariable__MedidaPesoAssignment_19786 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_60_in_rule__PesoMagnitudFija__PesaMedioKiloAssignment_09822 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_61_in_rule__PesoMagnitudFija__PesaCuartoKiloAssignment_1_19866 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_19_in_rule__PesoMagnitudFija__PesaUnKiloAssignment_2_19910 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_RULE_ID_in_rule__Venta__CompradorAssignment_09953 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleItemVenta_in_rule__Venta__ItemsVendidosAssignment_29988 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleItemVenta_in_rule__Venta__ItemsVendidosAssignment_3_110019 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleMontoDinero_in_rule__Venta__TotalRedondeadoAssignment_4_1_110050 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleMontoDinero_in_rule__Venta__TotalPagadoAssignment_5_1_110081 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleMontoDinero_in_rule__Venta__TotalDebiendoAssignment_6_1_110112 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rulePeso_in_rule__ItemVenta__CantidadAssignment_010143 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_RULE_ID_in_rule__ItemVenta__ProductoAssignment_210178 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleHorario_in_rule__Tarea__HorarioAssignment_110213 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleTipoTarea_in_rule__Tarea__TareaRealizadaAssignment_210244 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleLapsoTiempo_in_rule__Tarea__DuracionAssignment_410275 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleHorasLiteral_in_rule__HorarioLiteral__HoraAssignment_010306 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleFraccionHoraLiteral_in_rule__HorarioLiteral__FraccionHoraAssignment_1_110337 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_RULE_INT_in_rule__HorarioNumerico__HoraAssignment_010368 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_RULE_INT_in_rule__HorarioNumerico__MinutosAssignment_1_110399 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_RULE_ID_in_rule__TareaRevisionProductos__ProductosRevisadosAssignment_1_110434 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_RULE_ID_in_rule__TareaRevisionProductos__ProductosRevisadosAssignment_310473 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_RULE_INT_in_rule__LapsoTiempoNumerico__MinutosAssignment_0_010508 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_RULE_INT_in_rule__LapsoTiempoNumerico__HorasAssignment_1_010539 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_RULE_INT_in_rule__LapsoTiempoNumerico__MinutosAssignment_1_2_110570 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleHorasLiteral_in_rule__LapsoTiempoLiteral__HorasAssignment_010601 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_ruleFraccionHoraLiteral_in_rule__LapsoTiempoLiteral__FraccionHoraAssignment_2_110632 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__HorasLiteral__LiteralAlternatives_0_in_rule__HorasLiteral__LiteralAssignment10663 = new BitSet(new long[]{0x0000000000000002L}); public static final BitSet FOLLOW_rule__FraccionHoraLiteral__LiteralAlternatives_0_in_rule__FraccionHoraLiteral__LiteralAssignment10696 = new BitSet(new long[]{0x0000000000000002L}); }<file_sep>/** */ package org.xtext.example.mydsl.textualVerduler; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>Cliente Al Dia</b></em>'. * <!-- end-user-doc --> * * * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getClienteAlDia() * @model * @generated */ public interface ClienteAlDia extends Cliente { } // ClienteAlDia <file_sep>/** */ package org.xtext.example.mydsl.textualVerduler.impl; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.xtext.example.mydsl.textualVerduler.FraccionHoraLiteral; import org.xtext.example.mydsl.textualVerduler.HorarioLiteral; import org.xtext.example.mydsl.textualVerduler.HorasLiteral; import org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Horario Literal</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.HorarioLiteralImpl#getHora <em>Hora</em>}</li> * <li>{@link org.xtext.example.mydsl.textualVerduler.impl.HorarioLiteralImpl#getFraccionHora <em>Fraccion Hora</em>}</li> * </ul> * * @generated */ public class HorarioLiteralImpl extends HorarioImpl implements HorarioLiteral { /** * The cached value of the '{@link #getHora() <em>Hora</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getHora() * @generated * @ordered */ protected HorasLiteral hora; /** * The cached value of the '{@link #getFraccionHora() <em>Fraccion Hora</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getFraccionHora() * @generated * @ordered */ protected FraccionHoraLiteral fraccionHora; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected HorarioLiteralImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return TextualVerdulerPackage.Literals.HORARIO_LITERAL; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public HorasLiteral getHora() { return hora; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetHora(HorasLiteral newHora, NotificationChain msgs) { HorasLiteral oldHora = hora; hora = newHora; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.HORARIO_LITERAL__HORA, oldHora, newHora); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setHora(HorasLiteral newHora) { if (newHora != hora) { NotificationChain msgs = null; if (hora != null) msgs = ((InternalEObject)hora).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.HORARIO_LITERAL__HORA, null, msgs); if (newHora != null) msgs = ((InternalEObject)newHora).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.HORARIO_LITERAL__HORA, null, msgs); msgs = basicSetHora(newHora, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.HORARIO_LITERAL__HORA, newHora, newHora)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public FraccionHoraLiteral getFraccionHora() { return fraccionHora; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetFraccionHora(FraccionHoraLiteral newFraccionHora, NotificationChain msgs) { FraccionHoraLiteral oldFraccionHora = fraccionHora; fraccionHora = newFraccionHora; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.HORARIO_LITERAL__FRACCION_HORA, oldFraccionHora, newFraccionHora); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setFraccionHora(FraccionHoraLiteral newFraccionHora) { if (newFraccionHora != fraccionHora) { NotificationChain msgs = null; if (fraccionHora != null) msgs = ((InternalEObject)fraccionHora).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.HORARIO_LITERAL__FRACCION_HORA, null, msgs); if (newFraccionHora != null) msgs = ((InternalEObject)newFraccionHora).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - TextualVerdulerPackage.HORARIO_LITERAL__FRACCION_HORA, null, msgs); msgs = basicSetFraccionHora(newFraccionHora, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, TextualVerdulerPackage.HORARIO_LITERAL__FRACCION_HORA, newFraccionHora, newFraccionHora)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case TextualVerdulerPackage.HORARIO_LITERAL__HORA: return basicSetHora(null, msgs); case TextualVerdulerPackage.HORARIO_LITERAL__FRACCION_HORA: return basicSetFraccionHora(null, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case TextualVerdulerPackage.HORARIO_LITERAL__HORA: return getHora(); case TextualVerdulerPackage.HORARIO_LITERAL__FRACCION_HORA: return getFraccionHora(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case TextualVerdulerPackage.HORARIO_LITERAL__HORA: setHora((HorasLiteral)newValue); return; case TextualVerdulerPackage.HORARIO_LITERAL__FRACCION_HORA: setFraccionHora((FraccionHoraLiteral)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case TextualVerdulerPackage.HORARIO_LITERAL__HORA: setHora((HorasLiteral)null); return; case TextualVerdulerPackage.HORARIO_LITERAL__FRACCION_HORA: setFraccionHora((FraccionHoraLiteral)null); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case TextualVerdulerPackage.HORARIO_LITERAL__HORA: return hora != null; case TextualVerdulerPackage.HORARIO_LITERAL__FRACCION_HORA: return fraccionHora != null; } return super.eIsSet(featureID); } } //HorarioLiteralImpl <file_sep>/** */ package org.xtext.example.mydsl.textualVerduler; import org.eclipse.emf.ecore.EObject; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>Monto Dinero</b></em>'. * <!-- end-user-doc --> * * <p> * The following features are supported: * </p> * <ul> * <li>{@link org.xtext.example.mydsl.textualVerduler.MontoDinero#getPrecio <em>Precio</em>}</li> * </ul> * * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getMontoDinero() * @model * @generated */ public interface MontoDinero extends EObject { /** * Returns the value of the '<em><b>Precio</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Precio</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Precio</em>' attribute. * @see #setPrecio(int) * @see org.xtext.example.mydsl.textualVerduler.TextualVerdulerPackage#getMontoDinero_Precio() * @model * @generated */ int getPrecio(); /** * Sets the value of the '{@link org.xtext.example.mydsl.textualVerduler.MontoDinero#getPrecio <em>Precio</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Precio</em>' attribute. * @see #getPrecio() * @generated */ void setPrecio(int value); } // MontoDinero
0b110618836bd0e3b7c5c345870afaf01936a7bc
[ "Markdown", "Java", "INI" ]
24
Java
unq-objetos3-alumnos/unq-obj3-s1-2015-grupo7-tp3
d3655c9399754fbdcc4fbefecfaae3bf81e144b9
e4dccd2e99719b06330767e0c9c0dde99ee1c962
refs/heads/master
<file_sep>import axios from 'axios'; import { YOUTUBE_SEARCH_API_URL } from './endpoints'; import { YOUTUBE_API_KEY } from './api-keys'; export const fetchSearchResults = async query => { try { const response = axios.get(YOUTUBE_SEARCH_API_URL, { params: { part: 'snippet', q: query, key: YOUTUBE_API_KEY, type: 'video', }, }); return response; } catch (err) { return { error: true }; } }; export const fetchTopVideos = async () => { try { const response = await axios.get(YOUTUBE_SEARCH_API_URL, { params: { part: 'snippet', key: YOUTUBE_API_KEY, chart: 'mostPopular', regionCode: 'PK', maxResults: 20, }, }); return response; } catch (err) { return { error: true }; } }; export const fetchRelatedVideos = async videoId => { try { const response = await axios.get(YOUTUBE_SEARCH_API_URL, { params: { part: 'snippet', key: YOUTUBE_API_KEY, type: 'video', relatedToVideoId: videoId, }, }); return response; } catch (err) { return { error: true }; } }; <file_sep>export { VideoDetailPage } from './video-detail-page'; <file_sep>import React from 'react'; import { Link } from 'react-router-dom'; import { propTypes } from './prop-types'; import { StyledThumbnailContainer, StyledImage, StyledImageContainer, StyledPlayIconContainer, StyledTitle, } from './styled'; export const VideoThumbnail = ({ title, imageUrl, flexDirection, width, videoId }) => ( <StyledThumbnailContainer width={width} flexDirection={flexDirection}> <StyledImageContainer> <StyledImage src={imageUrl} alt='some' /> <Link to={`/watch/${videoId}/${title}`}> <StyledPlayIconContainer> <i className='fa fa-play fa-3x' /> </StyledPlayIconContainer> </Link> </StyledImageContainer> <StyledTitle>{title}</StyledTitle> </StyledThumbnailContainer> ); VideoThumbnail.propTypes = propTypes; <file_sep>import React from 'react'; import { ThemeProvider } from 'styled-components'; import { BrowserRouter as Router } from 'react-router-dom'; // components import { Routes } from './routes'; // constants import { THEME } from './styles/theme'; export const App = () => ( <ThemeProvider theme={THEME}> <Router> <Routes /> </Router> </ThemeProvider> ); <file_sep>import React, { useEffect, useState } from 'react'; import { Header, VideoThumbnail, ErrorMessage } from '../../components'; import { propTypes } from './prop-types'; import { StyledWrapper } from './styled'; import { fetchTopVideos } from '../../api'; export const HomePage = () => { const [topVideosList, setTopVideosList] = useState([]); const [error, setError] = useState(false); const getTopVideosList = async () => { const response = await fetchTopVideos(); if (response.error) { setError(true); } else { setTopVideosList(response.data.items); } }; useEffect(() => { getTopVideosList(); }, []); if (error) { return <ErrorMessage />; } return ( <div> <Header /> <StyledWrapper> {topVideosList.map(({ snippet, etag, id }) => ( <VideoThumbnail key={etag} flexDirection='column' width='20%' videoId={id.videoId} imageUrl={snippet.thumbnails.medium.url} title={snippet.title} /> ))} </StyledWrapper> </div> ); }; HomePage.propTypes = propTypes; <file_sep>export { VideoThumbnail } from './video-thumbnail'; <file_sep>import styled from 'styled-components'; export const StyledWrapper = styled.div` display: flex; flex-direction: row; margin: 20px; padding: 0 60px; `; export const StyledSuggestionsContainer = styled.div` flex-basis: 40%; height: 100vh; margin: 0 30px; `; export const StyledVideoWrapper = styled.div` height: 0; padding-bottom: 56.25%; /* 16:9 */ position: relative; width: 50vw; `; export const StyledIframe = styled.iframe` height: 100%; left: 0; position: absolute; top: 0; width: 100%; `; <file_sep>export { fetchSearchResults, fetchTopVideos, fetchRelatedVideos } from './youtube-api'; <file_sep>export { SearchResultsPage } from './search-results-page'; <file_sep>export const YOUTUBE_SEARCH_API_URL = 'https://www.googleapis.com/youtube/v3/search'; <file_sep>import { string } from 'prop-types'; export const propTypes = { flexDirection: string.isRequired, imageUrl: string.isRequired, title: string.isRequired, videoId: string.isRequired, width: string.isRequired, }; <file_sep>import styled from 'styled-components'; export const StyledErrorPageContainer = styled.div` color: #ff9494; padding: 40px; `; <file_sep>import styled from 'styled-components'; export const StyledThumbnailContainer = styled.div` display: flex; flex-basis: ${({ width }) => width}; flex-direction: ${({ flexDirection }) => flexDirection}; margin: 20px; max-width: ${({ width }) => width}; padding: 10px; `; export const StyledImageContainer = styled.div` flex: 1; float: left; position: relative; `; export const StyledTitle = styled.p` flex: 1; margin: 0 20px; `; export const StyledPlayIconContainer = styled.div` align-items: center; color: transparent; display: flex; height: 100%; justify-content: center; left: 0px; margin: 0 auto; position: absolute; right: 0px; top: 0px; visibility: visible; width: 100%; &:hover { color: white; cursor: pointer; } `; export const StyledImage = styled.img` height: auto; width: 100%; &:hover { cursor: pointer; } `; <file_sep>import React, { useState, useEffect } from 'react'; import { Header, VideoThumbnail, ErrorMessage } from '../../components'; import { StyledWrapper, StyledSuggestionsContainer, StyledIframe, StyledVideoWrapper, } from './styled'; import { fetchRelatedVideos } from '../../api'; import { YOUTUBE_EMBED_URL } from '../../utils'; import { propTypes } from './prop-types'; export const VideoDetailPage = ({ match: { params } }) => { const [relatedVideosList, setRelatedVideosList] = useState([]); const [error, setError] = useState(false); const { videoId, title } = params; const getRelatedVideosList = async () => { const response = await fetchRelatedVideos(videoId); if (response.error) { setError(true); } else { setRelatedVideosList(response.data.items); } }; useEffect(() => { getRelatedVideosList(); }, []); if (error) { return <ErrorMessage />; } return ( <div> <Header /> <StyledWrapper> <div> <StyledVideoWrapper> <StyledIframe title='videPlayer' src={`${YOUTUBE_EMBED_URL}/${videoId}`} frameBorder='0' allow='accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture' allowFullScreen /> </StyledVideoWrapper> <h3>{title}</h3> </div> <StyledSuggestionsContainer> {relatedVideosList.map(({ snippet, id }) => ( <VideoThumbnail flexDirection='row' width='100%' key={id.videoId} videoId={id.videoId} imageUrl={snippet.thumbnails.medium.url} title={snippet.title} /> ))} </StyledSuggestionsContainer> </StyledWrapper> </div> ); }; VideoDetailPage.propTypes = propTypes; <file_sep>import React, { useEffect, useState } from 'react'; import { Header, VideoThumbnail, ErrorMessage } from '../../components'; import { propTypes } from './prop-types'; import { StyledWrapper } from './styled'; import { fetchSearchResults } from '../../api'; export const SearchResultsPage = ({ match: { params } }) => { const [searchResultsList, setSearchResultsList] = useState([]); const [error, setError] = useState(false); const { query } = params; const getSearchResultsList = async () => { const response = await fetchSearchResults(query); if (response.error) { setError(true); } else { setSearchResultsList(response.data.items); } }; useEffect(() => { getSearchResultsList(); }, []); if (error) { return <ErrorMessage />; } return ( <div> <Header /> <StyledWrapper> {searchResultsList.map(({ snippet, id }) => ( <VideoThumbnail flexDirection='row' width='80%' key={id.videoId} videoId={id.videoId} imageUrl={snippet.thumbnails.medium.url} title={snippet.title} /> ))} </StyledWrapper> </div> ); }; SearchResultsPage.propTypes = propTypes; <file_sep>import React, { useState } from 'react'; import { Link } from 'react-router-dom'; import { StyledBrandName, StyledBrandNameSuperscript, StyledButton, StyledForm, StyledHeader, StyledInput, StyledLinkWrapper, StyledLogoContainer, } from './styled'; export const Header = () => { const [query, setQuery] = useState(''); const handleInputChange = event => { setQuery(event.target.value); }; return ( <StyledHeader> <StyledLinkWrapper> <Link to='/'> <StyledLogoContainer> <i className='fa fa-youtube-play fa-4x' /> <StyledBrandName> VideoTube <StyledBrandNameSuperscript>PK</StyledBrandNameSuperscript> </StyledBrandName> </StyledLogoContainer> </Link> </StyledLinkWrapper> <StyledForm> <StyledInput value={query} onChange={handleInputChange} placeholder='search' type='text' /> <Link to={`/results/${query}`}> <StyledButton> <i className='fa fa-search' /> </StyledButton> </Link> </StyledForm> </StyledHeader> ); }; <file_sep>import styled from 'styled-components'; export const StyledHeader = styled.header` align-items: center; border-bottom: 1px solid #999999; display: flex; text-align: center; `; export const StyledLinkWrapper = styled.div` a { color: black; text-decoration: none; } `; export const StyledLogoContainer = styled.div` align-items: center; display: flex; font-family: sans-serif; justify-self: flex-start; list-style: none; text-decoration: none; `; export const StyledBrandName = styled.span` font-size: 20px; font-weight: bolder; margin: 0 10px; `; export const StyledBrandNameSuperscript = styled.sup` font-size: 10px; font-weight: lighter; margin: 0; `; export const StyledForm = styled.form` color: 'red'; flex: 2; `; export const StyledInput = styled.input` padding: 5px; width: 30%; `; export const StyledButton = styled.button` background-color: transparent; border: 1px solid black; padding: 5px 20px; `; <file_sep>import { object, shape } from 'prop-types'; export const propTypes = { match: shape({ params: object.isRequired, }).isRequired, };
371dfd6edc6c2831f43c7673dd043c4a9f3a48d5
[ "JavaScript" ]
18
JavaScript
Touseef-ahmad/mock-youtube
e94c706d3c1deaf0f6c392ad82394bc395e04aee
39d2ab840a5895e7d2e58174c1990a0b2414b3a8
refs/heads/master
<file_sep>//commands to run drunk_turtle: cd catkin_ws source devel/setup.bash //to produce a random stumbling motion, enter: roslaunch drunk_turtle stumble.launch //OR //to produce a swaying stumble, enter: roslaunch drunk_turtle stumble.launch swag:=sway //to monitor turtle's position, open a new terminal window and type: cd catkin_ws rostopic echo turtle1/pose <file_sep>#!/usr/bin/env python import rospy import numpy as np from turtlesim.msg import Pose def callback(data): rospy.set_param('x',data.x) rospy.set_param('y',data.y) rospy.set_param('theta',data.theta) def listener(): rospy.init_node('walkie_talkie',anonymous=True) rospy.Subscriber('turtle1/pose',Pose,callback) rospy.spin() if __name__ == '__main__': listener() <file_sep>#!/usr/bin/env python import rospy import numpy as np from std_msgs.msg import String from geometry_msgs.msg import Twist from geometry_msgs.msg import Vector3 def talker(): pub = rospy.Publisher('turtle1/cmd_vel', Twist, queue_size=10) rospy.init_node('turtle_uber', anonymous=True) rate = rospy.Rate(1) # 1hz while not rospy.is_shutdown(): t = rospy.Time.now().secs if rospy.get_param('stumble_style')=="sway": if rospy.get_param('x')>=11 or rospy.get_param('x')==0 or rospy.get_param('y')>=11 or rospy.get_param('y')==0: if rospy.get_param('x')>=11: desired_theta = np.pi elif rospy.get_param('x')==0: desired_theta = 0 elif rospy.get_param('y')>=11: desired_theta = 3*np.pi/2 elif rospy.get_param('y')==0: desired_theta = np.pi/2 while rospy.get_param('theta') < desired_theta - np.pi/6 or rospy.get_param('theta') > desired_theta + np.pi/6: ang = Vector3(0,0,1) lin = Vector3(0,0,0) stumble = Twist(lin,ang) pub.publish(stumble) rate.sleep() ang = Vector3(0,0,0) lin = Vector3(.5,0,0) stumble = Twist(lin,ang) pub.publish(stumble) rate.sleep() else: lin = Vector3(1,0,0) ang = Vector3(0,0,np.pi*np.sin(2*t)) stumble = Twist(lin,ang) pub.publish(stumble) rate.sleep() else: lin = Vector3(np.random.rand(1,1)[0][0],0,0) ang = Vector3(0,0,(np.random.choice([-1,1],1)[0])*3*np.random.rand(1,1)[0][0]) stumble = Twist(lin,ang) pub.publish(stumble) rate.sleep() if __name__ == '__main__': try: talker() except rospy.ROSInterruptException: pass
27203c857ed73f9bdd4f5ec98b7ab006e716d67a
[ "Python", "Text" ]
3
Text
ilsemae/DrunkTurtle
9853151b961eb949d29cef54d392696951a8588a
9ea2eae3aabcfdc288a4668c3a52c16ca393685d
refs/heads/main
<file_sep>self.__precacheManifest = (self.__precacheManifest || []).concat([ { "revision": "c13c761fd919658b19f0d9410c234b90", "url": "/index.html" }, { "revision": "0d03d4b988e7a8d6aa39", "url": "/static/css/2.66b7f74f.chunk.css" }, { "revision": "cae61da56edfea7a1c4d", "url": "/static/css/main.4e622f55.chunk.css" }, { "revision": "0d03d4b988e7a8d6aa39", "url": "/static/js/2.ea4a5471.chunk.js" }, { "revision": "e88a3e95b5364d46e95b35ae8c0dc27d", "url": "/static/js/2.ea4a5471.chunk.js.LICENSE.txt" }, { "revision": "cae61da56edfea7a1c4d", "url": "/static/js/main.eb9bb03a.chunk.js" }, { "revision": "01fefba12108911d1878", "url": "/static/js/runtime-main.28f5bb8f.js" } ]);
4c41d060e4bb6ab3ef76d344c20c051bc9858899
[ "JavaScript" ]
1
JavaScript
forlediska/webservice
e5d8e48ca3f4f6d12f6bb1ea32bff94f7d4ecb05
9c3893d058f738961749805bd460bac0233595f8
refs/heads/master
<file_sep>const paths = { home: '/', notFound: '/not-found', editTask: '/edit-task/:id', } export default paths <file_sep>import { TaskStatus } from '../types' /** * Status */ export const STATUS_TO_DO = 'ToDo' export const STATUS_IN_PROGRESS = 'InProgress' export const STATUS_BLOCKED = 'Blocked' export const STATUS_IN_QA = 'InQA' export const STATUS_DONE = 'Done' export const STATUS_DEPLOYED = 'Deployed' export const STATUS_LABEL_MAP: { [key in TaskStatus]: string } = { [STATUS_TO_DO]: 'To Do', [STATUS_IN_PROGRESS]: 'In Progress', [STATUS_BLOCKED]: 'Blocked', [STATUS_IN_QA]: 'In Qa', [STATUS_DONE]: 'Done', [STATUS_DEPLOYED]: 'Deployed', } <file_sep>/** * Validation Rules */ export const VALIDATION_TITLE_MIN_LENGTH = 5 export const VALIDATION_DESCRIPTION_MIN_LENGTH = 20 <file_sep>import { STATUS_LABEL_MAP } from 'constants/status' import { TaskStatus } from 'types' export const getStatusLabel = (status: TaskStatus): string => { return STATUS_LABEL_MAP[status] } <file_sep>import { createTheme } from '@material-ui/core/styles' const theme = createTheme({ palette: { primary: { main: '#1775b9', light: '#a2ceed', }, }, shape: { borderRadius: 8, }, }) export default theme <file_sep>import { getStatusLabel } from './get-status-label' test('getPossibleTaskStatus should return correct value for next status', () => { expect(getStatusLabel('ToDo')).toBe('To Do') expect(getStatusLabel('InProgress')).toBe('In Progress') expect(getStatusLabel('Blocked')).toBe('Blocked') expect(getStatusLabel('InQA')).toBe('In Qa') expect(getStatusLabel('Done')).toBe('Done') expect(getStatusLabel('Deployed')).toBe('Deployed') }) <file_sep># Changelog All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. ### [0.1.2](https://gitlab.com/idin.khayami/rechat-frontend-challenge/compare/v0.1.1...v0.1.2) (2021-08-20) ### Features - add delete into context and component ([fb3653b](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/fb3653beea97bcc76bce7685e5033b32a0191f5b)) - add delete into home and implement search ([4096ec7](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/4096ec7526b8b9009e27cb6b404896fe18993c09)) ### 0.1.1 (2021-08-09) ### Features - add button component ([f21967e](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/f21967e10a88f148e1fc56b8a4c91eeea9281576)) - add classname util ([5dc5027](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/5dc50273e0bf6e8ae4c2e50d10d58220b4cec9d0)) - add commitlint and husky ([8ee4ab8](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/8ee4ab84e5590a3b62f4fbcc7e21796eba798708)) - add custom theme color ([1a720fd](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/1a720fd4937a04a7b5c563d075b9b6fe80aaf296)) - add eslint and prettier ([633b768](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/633b7681df05e0e16c98b349a1275fe948351f9d)) - add import/order into lint ([da63c20](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/da63c20eec76dad4df8cee53718c59191def4f69)) - add new readme ([3262d8a](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/3262d8ab43234b80af6263bd228841efe08ef79d)) - add new rule for import alias ([84a3230](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/84a32305b2bc2af05fad28c7d0e319d22c936451)) - add no result component ([acadd13](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/acadd13acd993dab71a2e544037299472a30ddf7)) - add page header in the top of pages ([4c6b160](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/4c6b160bcc766a3d0c8d87d4323fc6985ba5e59f)) - add pages ([faf03ba](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/faf03ba69fe248b2de200b5a7c98a3ff231a4a4d)) - add react router and implement paths ([a14f37e](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/a14f37ef7a33587f9525212a6987a55b5045e578)) - add react-testing-library ([185ffe1](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/185ffe10e435bb56fa9f5da5a50d590450beb743)) - add select form control ([f48b4a0](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/f48b4a058f954e6e2595a75d78fd2acce9a6c543)) - add splash screen component and suspense ([6d8acbf](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/6d8acbf5c6857e293c17e2389e0d03bb5dc8448a)) - add standard version ([a3b1d78](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/a3b1d78050c5e9747f1b08de79a413dfb6d9dff7)) - add task card component ([a6dee0a](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/a6dee0a3f632e3d4c17a5a81561273bd9b4e706b)) - add text field component ([417a13b](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/417a13ba42c38e02605a8ffdf271b3dea405c2f6)) - add type files ([48697ce](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/48697cea2816f1f4f2175354ac0f9c4d6f68c7d9)) - add type for status ([7773a18](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/7773a185090b04cc0649749c3f7ccae9115e61aa)) - change status type and use getPossibleTaskStatus and mapStatusStateToName ([e2e0622](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/e2e0622b53d87559c49b7cd9f6688151510de7a2)) - implement add new task component ([a73552b](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/a73552bdd2d1703895c61d8dc51cfac977b5d780)) - implement add new task components ([1066dd0](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/1066dd09756aad515f72e9101621f89007fadfd5)) - implement add task and taskslist with context ([9a8d579](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/9a8d5795196761d236da5ab2fb7e0191c4277215)) - implement add/edit form test with valid/invalid data ([3aa8e16](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/3aa8e167b112ac2d2220996b9294879302dd10b2)) - implement add/edit task form component ([1bf512d](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/1bf512d79b2dc360d5e98867036a04d4bec153ae)) - implement edit task route and page ([de37e00](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/de37e00e6d17142b147252d381cf30a29253c605)) - implement get possible task status function ([2e4d4c3](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/2e4d4c34a4bb16cfeaa377ea8fdcd459d197c9d7)) - implement get possible task status test ([7fd0125](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/7fd012525ff082f3f991cd9e3d945679424db6aa)) - implement get status label test ([4c2dd31](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/4c2dd3115018a13910fec4d878c662b16bb19614)) - implement map for mapping status state to name ([3bb2af8](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/3bb2af878ab994949f9173a73f4f55831197c4e1)) - implement page and container and wrong page components ([c6e91d3](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/c6e91d396ed6ae6b9a87ca98bb3ce04691a99053)) - implement page header component ([0f0576b](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/0f0576bf0a1eca6127f7f969ae63f5714f5c698b)) - implement splash screen page ([c10116a](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/c10116aaa0edc1d44d7b1ef0c094d4acefc7da35)) - implement task context and provider ([74c25a6](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/74c25a63303fa9aaa2616ff2105577b38b84ed86)) - implement task list component ([33f0797](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/33f0797a74e825b2770162bc404d63922450fb03)) - implement taskslist components ([53948db](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/53948db68ab04487de849a8acfa9c3bce03b70b6)) - implement ui for task card ([f664672](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/f66467263e43a8ddc80e0966c76117533b91bbd7)) - implement update task function inside context ([5b68628](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/5b686289197cd7496bfbd6612cfedb6d7e265e7f)) - install classnames package and use it in components style ([31a8a80](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/31a8a80f07cfa6aead0ec70716c55cf35c8c21f8)) - install material ui core and makeStyles ([42d5cf2](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/42d5cf2247d0abbccb1b83c883e3af18b02424d9)) - install react hooks eslint plugin ([615771f](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/615771fa2657dfbf09ce556539f4a5506a807e25)) - install react icons ([87d4bd4](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/87d4bd413d70170636b39476cb62125c75f5f715)) - install react-hook-form ([8337f27](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/8337f272ea937a39316dd47293f0c83518432e90)) - install uuid package for generate id ([887202c](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/887202c446b21a0bb81f16293f22806f8910003a)) - move onSubmitTask into edit task component ([0de4331](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/0de43312575f5c54602ef12dfb78204937bf697a)) - use mapStatusStateToStatusName inside components ([99c02d7](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/99c02d7628324fbeedd8315cc9e9cf1fd2131392)) - use task form in edit task and add task components ([595ff8e](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/595ff8e2994f52394b97b2ba64e1f93f6de6dc95)) ### Bug Fixes - add custom scroll style and fix reset form after submit ([f015ca1](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/f015ca17301a0063b4130dc6b359b1cfb1da0d48)) - add into begin of tasks array ([a98078f](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/a98078f5e32656961280cf4f9fc33060d584b083)) - add size to button ([367af47](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/367af472a4561d3354a3dab87a72ab67c2e75eed)) - add storage util with test ([f04734f](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/f04734fc798c4187a146d9e42655c0ec6ac3a912)) - change chevron icon and use material icons ([bdf67cd](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/bdf67cd4bd7b6286ff30d1022bfc51f57a113197)) - change home tasklist style and add scroll inside box ([1d8c530](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/1d8c530f4446dd996179fba9440517a30d2239e7)) - change import url and remove unused \*/../ ([639d500](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/639d5006116c4ad312f3f9eca55f71c1de214bc6)) - change link href for cancel button ([019a285](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/019a285850ff4b4d2ce00c91e294a8de6acdcbf9)) - change mobile 340px with task card ([f1c35ad](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/f1c35ad5c40e1f5ed784ad06668818649d6b68e0)) - change not found page ([9c0c5ab](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/9c0c5abf10990a08a639bbf7b21f8c0cd5d26177)) - change prop types in page header component ([f684a90](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/f684a90a2e8f619de818cc34f58851d21d1f6f17)) - change select id ([f45b0da](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/f45b0da8606ea667b2434f78c80a88a373b17c77)) - change status service folders ([7e01763](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/7e01763ab7fb0ca1632ba203b3886babe588e098)) - change task card max width ([4fed617](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/4fed61760acd82c9bdd4222d5908ceb7a45baaf5)) - change task form validation rule ([f2e5427](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/f2e54274ab25c2fb5d925c086e4de004ae4d9837)) - change theme str ([f8d7034](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/f8d70346130cc0a36b4f709933b3904da4d51ece)) - change update task argumants and add id ([06b8a8b](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/06b8a8b6d0d093349783db36088ddc719087f7fc)) - change wrongpage component props ([4c4f19e](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/4c4f19e2e592d463191e9f653b7759d384bddb7e)) - clean home style ([42b69ca](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/42b69cace6bc8b7663473ad97a6b06e5f1e19b47)) - clean task context types ([0314721](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/03147215c6038febe8809605c6c0c82184852b4c)) - disable typescript-eslint/explicit-module-boundary-types ([e4823c9](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/e4823c9b92fbb3f1e06d0bba60f529e3697fb766)) - fix edit button wrapper in tasks list component ([98c026d](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/98c026d8579d63ba4b06a114b09fb18006d5f2cb)) - fix task card grid in tasks list ([f7c3e8f](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/f7c3e8f955169e9a1b001042f10106ee6fed6a90)) - handle select status using react hook form ([a638721](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/a638721d785ceef4faaf93399206c121a2543f48)) - implement task card small devices ui ([f420052](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/f4200525c8c3995c9640d1c4e7e6aa3010f5ebd6)) - import routes with lazy ([b8b3b22](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/b8b3b22c315f11af8826af2f7d188dc401eb62ee)) - improve styles ([f9ff205](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/f9ff205b53716fd77223ca539b5b8699a3f092c0)) - move status list object into constants ([a5b432e](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/a5b432e6a2758518e5cf10efe6825e25685084b5)) - refactor add and edit task folders ([4f81a60](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/4f81a603787470a02720db17e50b3fb9abec15e4)) - refactor add and update task functions ([34d7ab1](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/34d7ab180e88d2ee2c836d1b85ac2020d916d62c)) - refactor get/set storage functions ([7a78764](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/7a78764eb20869d5ba12de5c525eb3b08dbcc7f6)) - refactor splash to error and add ...otherProps ([1075639](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/10756396855a1e9a8af39b20146e404a0eb34b11)) - refactor style names and attributes ([4028632](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/40286326f6d402b37918f246aecda17b6b532976)) - refctor get status functions ([e2961bd](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/e2961bdf91cd2cc19c710c003cfe0a0acb18ab9d)) - remove log ([7115249](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/7115249e0b0c9773de57be438bc0616f62bc6ea5)) - remove log ([ba9fcb5](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/ba9fcb53c5a682f064c9cac6e9dace709f56746b)) - remove page header unused semicolon ([91dc648](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/91dc64833ded0f8f5908901b21ca0b5c4b18a432)) - remove react icons and install material icons ([6fc3b95](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/6fc3b9538bf2be3252cd259836e8b40d1962cee8)) - remove storage utils ([5a102bb](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/5a102bbce101ae816a345d6102be187bbe15cf24)) - remove storage utils ([61d0237](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/61d02372ac7c33afbed42cd07b4367299d69f4b2)) - remove unused semicolon in add components ([5a88bec](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/5a88beccc9976210f53da0b454445f11ad96f854)) - remove unused semicolon in page header component ([4b61c1a](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/4b61c1aab90addbf9e354f17d459f057cd7952dc)) - remove unused test ([860cbc9](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/860cbc9095ebe73240c1550815ad0fc6147b4bae)) - rename taskslist route to home ([c97c31b](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/c97c31b59821443c82595e160e1a5383bb4e2a24)) - resolve conflicts and remove yarn.lock ([a36259f](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/a36259f652efa9da5109ec4ba5534340f51f892c)) - resolve prettierrc and eslintrc conflict about semicolon ([8e268b7](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/8e268b7daf450e7a8edfe9e02bb3336c548c1fb4)) - use container component instead of box ([aca417b](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/aca417b79cb44838ccd42b656db5937ebf824bd2)) - use new components and remove duplicate styles ([46ef890](https://gitlab.com/idin.khayami/rechat-frontend-challenge/commit/46ef8905f15baf19f1a1d460e8ad0ae52f8543da)) <file_sep># ToDoList Project ## Getting Started First install project dependencies as below: ```bash $: npm install ``` Then for dev purpose launch Vite dev server: ```bash $: npm start ``` for build: ```bash $: npm build ``` After, You can use build folder to publish it. ## Things that may have skipped: - Using production and develop environments - Better style - More test <file_sep>export type UUID = string export type TaskStatus = | 'ToDo' | 'InProgress' | 'Blocked' | 'InQA' | 'Done' | 'Deployed' interface BaseTask { title: string description: string } export interface Task extends BaseTask { id: UUID status: TaskStatus } export type CreateTaskInput = BaseTask export type UpdateTaskInput = Task export type TaskInput = CreateTaskInput | UpdateTaskInput export type DeleteFunction = (id: UUID) => void <file_sep>import { Task } from 'types' import { storageKeys } from './storage-keys' export const getTasksList = (): Task[] => JSON.parse(localStorage.getItem(storageKeys.tasksList) ?? '[]') export const setTasksList = (data: Task[]): void => localStorage.setItem(storageKeys.tasksList, JSON.stringify(data)) <file_sep>import { TaskStatus } from 'types' const TASK_STATUS_MAP: { [key in TaskStatus]: TaskStatus[] } = { ToDo: ['ToDo', 'InProgress'], InProgress: ['InProgress', 'Blocked', 'InQA'], Blocked: ['Blocked', 'ToDo'], InQA: ['InQA', 'ToDo', 'Done'], Done: ['Done', 'Deployed'], Deployed: ['Deployed'], } export function getPossibleTaskStatus(status: TaskStatus): TaskStatus[] { return TASK_STATUS_MAP[status] } <file_sep>import { getPossibleTaskStatus } from './get-possible-task-status' test('getPossibleTaskStatus should return correct value for next status', () => { expect(getPossibleTaskStatus('ToDo')).toEqual(['ToDo', 'InProgress']) expect(getPossibleTaskStatus('InProgress')).toEqual([ 'InProgress', 'Blocked', 'InQA', ]) expect(getPossibleTaskStatus('Blocked')).toEqual(['Blocked', 'ToDo']) expect(getPossibleTaskStatus('InQA')).toEqual(['InQA', 'ToDo', 'Done']) expect(getPossibleTaskStatus('Done')).toEqual(['Done', 'Deployed']) expect(getPossibleTaskStatus('Deployed')).toEqual(['Deployed']) }) <file_sep>import { lazy } from 'react' import paths from './paths' const Home = lazy(() => import('../pages/Home')) const NotFound = lazy(() => import('../pages/NotFound')) const EditTask = lazy(() => import('../pages/EditTask')) const routes = [ { component: Home, path: paths.home, }, { component: NotFound, path: paths.notFound, exact: false, }, { component: EditTask, path: paths.editTask, }, ] export default routes <file_sep>/** * Please don't include any utils storage keys here */ export const storageKeys = { tasksList: 'tasksList', }
c91799eb753257e1103a38a7866046cc7ceb1b89
[ "Markdown", "TypeScript" ]
14
TypeScript
idin-khayami/Todo-list
b974ad124e483bae5c17dc8bc80c9f053e33940e
3b5714087bb34b1467292cc47c69c5ffe7dbe729
refs/heads/master
<repo_name>TylerCreator/react-redux-boilerplate<file_sep>/android/settings.gradle rootProject.name = 'NiceRadio' include ':app' <file_sep>/app/index.js import React, { Component } from "react"; import { AppRegistry, View, Text } from "react-native"; import { Provider } from "react-redux"; import configureStore from "./store/configureStore"; global.store = configureStore(); /* eslint-disable */ // Enable debugging network calls from inside Chrome Dev Tools. // http://www.preslav.me/2017/03/26/debugging-network-calls-in-react-native-using-the-chrome-debugger/ XMLHttpRequest = GLOBAL.originalXMLHttpRequest ? GLOBAL.originalXMLHttpRequest : GLOBAL.XMLHttpRequest; /* eslint-enable */ console.ignoredYellowBox = ["Remote debugger"]; class NiceRadio extends Component { render() { return ( <Provider store={global.store}> <View style={{ flex: 1 }} /> </Provider> ); } } AppRegistry.registerComponent("NiceRadio", () => NiceRadio);
1b5227e4ebf07f4dad59ce7014306ff0ecadbf84
[ "JavaScript", "Gradle" ]
2
Gradle
TylerCreator/react-redux-boilerplate
1aed34cb2b8551bee255e12884d709eb0153c64f
1a526f80f7e9feadf9d924befc31a32210b902ad
refs/heads/master
<repo_name>smithjw/2016_JNUC_Security_Reporting_Compliance<file_sep>/1_Set_Organization_Priorities.sh #!/bin/bash #################################################################################################### # # Copyright (c) 2016, Jamf, LLC. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the JAMF Software, LLC nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY JAMF SOFTWARE, LLC "AS IS" AND ANY # EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL JAMF SOFTWARE, LLC BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # #################################################################################################### # written by <NAME>, Jamf October 2016 # github.com/jamfprofessionalservices # USAGE # Admins set organizational compliance for each listed item, which gets written to plist. # Values default to "true," and must be commented to "false" to disregard as an organizational priority. # Writes to /Library/Application Support/SecurityScoring/org_security_score.plist by default. # Create the Scoring file destination directory if it does not already exist dir="/Library/Application Support/CAmperIT/SecurityScoring" if [[ ! -e "$dir" ]]; then mkdir "$dir" fi plistlocation="$dir/org_security_score.plist" ################################################################## ############### ADMINS DESIGNATE ORG VALUES BELOW ################ ################################################################## # 1.1 Verify all Apple provided software is current OrgScore1_1="true" # OrgScore1_1="false" # 1.2 Enable Auto Update OrgScore1_2="true" # OrgScore1_2="false" # 1.3 Enable app update installs OrgScore1_3="true" # OrgScore1_3="false" # 1.4 Enable system data files and security update installs OrgScore1_4="true" # OrgScore1_4="false" # 1.5 Enable OS X update installs OrgScore1_5="true" # OrgScore1_5="false" # 2.1.1 Turn off Bluetooth, if no paired devices exist OrgScore2_1_1="true" # OrgScore2_1_1="false" # 2.1.3 Show Bluetooth status in menu bar # OrgScore2_1_3="true" OrgScore2_1_3="false" # 2.2.2 Ensure time set is within appropriate limits OrgScore2_2_2="true" # OrgScore2_2_2="false" # 2.3.1 Set an inactivity interval of 20 minutes or less for the screen saver OrgScore2_3_1="true" # OrgScore2_3_1="false" # 2.3.2 Secure screen saver corners OrgScore2_3_2="true" # OrgScore2_3_2="false" # 2.3.4 Set a screen corner to Start Screen Saver OrgScore2_3_4="true" # OrgScore2_3_4="false" # 2.4.1 Disable Remote Apple Events OrgScore2_4_1="true" # OrgScore2_4_1="false" # 2.4.2 Disable Internet Sharing OrgScore2_4_2="true" # OrgScore2_4_2="false" # 2.4.3 Disable Screen Sharing # OrgScore2_4_3="true" OrgScore2_4_3="false" # 2.4.5 Disable Remote Login # OrgScore2_4_5="true" OrgScore2_4_5="false" # 2.4.7 Disable Bluetooth Sharing OrgScore2_4_7="true" # OrgScore2_4_7="false" # 2.4.8 Disable File Sharing # OrgScore2_4_8="true" OrgScore2_4_8="false" # 2.4.9 Disable Remote Management # OrgScore2_4_9="true" OrgScore2_4_9="false" # 2.5.1 Disable "Wake for network access" OrgScore2_5_1="true" # OrgScore2_5_1="false" # 2.5.2 Disable sleeping the computer when connected to power OrgScore2_5_2="true" # OrgScore2_5_2="false" # 2.6.3 Enable Firewall # OrgScore2_6_3="true" OrgScore2_6_3="false" # 2.6.4 Enable Firewall Stealth Mode # OrgScore2_6_4="true" OrgScore2_6_4="false" # 2.6.5 Review Application Firewall Rules # OrgScore2_6_5="true" OrgScore2_6_5="false" # 2.8 Pair the remote control infrared receiver if enabled # OrgScore2_8="true" OrgScore2_8="false" # 2.9 Enable Secure Keyboard Entry in terminal.app # OrgScore2_9="true" OrgScore2_9="false" # 2.10 Java 6 is not the default Java runtime OrgScore2_10="true" # OrgScore2_10="false" # 3.1.1 Retain system.log for 90 or more days OrgScore3_1_1="true" # OrgScore3_1_1="false" # 3.1.3 Retain authd.log for 90 or more days OrgScore3_1_3="true" # OrgScore3_1_3="false" # 3.5 Retain install.log for 365 or more days OrgScore3_5="true" # OrgScore3_5="false" # 4.1 Disable Bonjour advertising service # OrgScore4_1="true" OrgScore4_1="false" # 4.2 Enable "Show Wi-Fi status in menu bar" OrgScore4_2="true" # OrgScore4_2="false" # 4.4 Ensure http server is not running # OrgScore4_4="true" OrgScore4_4="false" # 4.5 Ensure ftp server is not running # OrgScore4_5="true" OrgScore4_5="false" # 4.6 Ensure nfs server is not running # OrgScore4_6="true" OrgScore4_6="false" # 5.1.1 Secure Home Folders OrgScore5_1_1="true" # OrgScore5_1_1="false" # 5.1.2 Check System Wide Applications for appropriate permissions # OrgScore5_1_2="true" OrgScore5_1_2="false" # 5.1.3 Check System folder for world writable files # OrgScore5_1_3="true" OrgScore5_1_3="false" # 5.1.4 Check Library folder for world writable files # OrgScore5_1_4="true" OrgScore5_1_4="false" # 5.3 Reduce the sudo timeout period # OrgScore5_3="true" OrgScore5_3="false" # 5.4 Automatically lock the login keychain for inactivity OrgScore5_4="true" # OrgScore5_4="false" # 5.7 Do not enable the "root" account # OrgScore5_7="true" OrgScore5_7="false" # 5.8 Disable automatic login OrgScore5_8="true" # OrgScore5_8="false" # 5.9 Require a password to wake the computer from sleep or screen saver OrgScore5_9="true" # OrgScore5_9="false" # 5.10 Require an administrator password to access system-wide preferences OrgScore5_10="true" # OrgScore5_10="false" # 5.18 System Integrity Protection status OrgScore5_18="true" # OrgScore5_18="false" # 6.1.4 Disable "Allow guests to connect to shared folders" OrgScore6_1_4="true" # OrgScore6_1_4="false" # 6.2 Turn on filename extensions OrgScore6_2="true" # OrgScore6_2="false" # 6.3 Disable the automatic run of safe files in Safari OrgScore6_3="true" # OrgScore6_3="false" ################################################################## ############# DO NOT MODIFY ANYTHING BELOW THIS LINE ############# ################################################################## # Write org_security_score values to local plist echo "<?xml version=\"1.0\" encoding=\"UTF-8\"?> <!DOCTYPE plist PUBLIC \"-//Apple//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\"> <plist version=\"1.0\"> <dict> <key>OrgScore1_1</key> <${OrgScore1_1}/> <key>OrgScore1_2</key> <${OrgScore1_2}/> <key>OrgScore1_3</key> <${OrgScore1_3}/> <key>OrgScore1_4</key> <${OrgScore1_4}/> <key>OrgScore1_5</key> <${OrgScore1_5}/> <key>OrgScore2_1_1</key> <${OrgScore2_1_1}/> <key>OrgScore2_1_3</key> <${OrgScore2_1_3}/> <key>OrgScore2_2_2</key> <${OrgScore2_2_2}/> <key>OrgScore2_3_1</key> <${OrgScore2_3_1}/> <key>OrgScore2_3_2</key> <${OrgScore2_3_2}/> <key>OrgScore2_3_4</key> <${OrgScore2_3_4}/> <key>OrgScore2_4_1</key> <${OrgScore2_4_1}/> <key>OrgScore2_4_2</key> <${OrgScore2_4_2}/> <key>OrgScore2_4_3</key> <${OrgScore2_4_3}/> <key>OrgScore2_4_5</key> <${OrgScore2_4_5}/> <key>OrgScore2_4_7</key> <${OrgScore2_4_7}/> <key>OrgScore2_4_8</key> <${OrgScore2_4_8}/> <key>OrgScore2_4_9</key> <${OrgScore2_4_9}/> <key>OrgScore2_5_1</key> <${OrgScore2_5_1}/> <key>OrgScore2_5_2</key> <${OrgScore2_5_2}/> <key>OrgScore2_6_3</key> <${OrgScore2_6_3}/> <key>OrgScore2_6_4</key> <${OrgScore2_6_4}/> <key>OrgScore2_6_5</key> <${OrgScore2_6_5}/> <key>OrgScore2_8</key> <${OrgScore2_8}/> <key>OrgScore2_9</key> <${OrgScore2_9}/> <key>OrgScore2_10</key> <${OrgScore2_10}/> <key>OrgScore3_1_1</key> <${OrgScore3_1_1}/> <key>OrgScore3_1_3</key> <${OrgScore3_1_3}/> <key>OrgScore3_5</key> <${OrgScore3_5}/> <key>OrgScore4_1</key> <${OrgScore4_1}/> <key>OrgScore4_2</key> <${OrgScore4_2}/> <key>OrgScore4_4</key> <${OrgScore4_4}/> <key>OrgScore4_5</key> <${OrgScore4_5}/> <key>OrgScore4_6</key> <${OrgScore4_6}/> <key>OrgScore5_1_1</key> <${OrgScore5_1_1}/> <key>OrgScore5_1_2</key> <${OrgScore5_1_2}/> <key>OrgScore5_1_3</key> <${OrgScore5_1_3}/> <key>OrgScore5_1_4</key> <${OrgScore5_1_4}/> <key>OrgScore5_3</key> <${OrgScore5_3}/> <key>OrgScore5_4</key> <${OrgScore5_4}/> <key>OrgScore5_7</key> <${OrgScore5_7}/> <key>OrgScore5_8</key> <${OrgScore5_8}/> <key>OrgScore5_9</key> <${OrgScore5_9}/> <key>OrgScore5_10</key> <${OrgScore5_10}/> <key>OrgScore5_18</key> <${OrgScore5_18}/> <key>OrgScore6_1_4</key> <${OrgScore6_1_4}/> <key>OrgScore6_2</key> <${OrgScore6_2}/> <key>OrgScore6_3</key> <${OrgScore6_3}/> </dict> </plist>" > "$plistlocation" <file_sep>/2_Security_Audit_Compliance.sh #!/bin/bash #################################################################################################### # # Copyright (c) 2016, Jamf, LLC. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the JAMF Software, LLC nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY JAMF SOFTWARE, LLC "AS IS" AND ANY # EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL JAMF SOFTWARE, LLC BE LIABLE FOR ANY # DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # #################################################################################################### # written by <NAME>, Jamf October 2016 # github.com/jamfprofessionalservices # USAGE # Reads from plist at /Library/Application Support/SecurityScoring/org_security_score.plist by default. # For "true" items, runs query for current computer/user compliance. # Non-compliant items are logged to /Library/Application Support/SecurityScoring/org_audit plistlocation="/Library/Application Support/SecurityScoring/org_security_score.plist" auditfilelocation="/Library/Application Support/SecurityScoring/org_audit" if [[ ! -e $plistlocation ]]; then echo "No scoring file present" exit 0 fi # Cleanup audit file to start fresh rm "$auditfilelocation" touch "$auditfilelocation" # Other variables currentUser=$(ls -l /dev/console | cut -d " " -f4) systemprofiled="/tmp/systemprofiled.txt" #Profile The system for only the Values needed, add them to a temp file echo "Creating A System Profile Relavent to this Audit" /usr/sbin/system_profiler SPHardwareDataType SPBluetoothDataType SPUSBDataType > "$systemprofiled" hardwareUUID=$(cat "$systemprofiled" | grep "Hardware UUID" | awk -F ": " '{print $2}' | xargs) # 1.1 Verify all Apple provided software is current # Verify organizational score Audit1_1="$(defaults read "$plistlocation" OrgScore1_1)" # If organizational score is 1 or true, check status of client if [ "$Audit1_1" = "1" ]; then countAvailableSUS="$(softwareupdate -l | grep "*" | wc -l)" # If client fails, then note category in audit file if [ $countAvailableSUS = "0" ]; then echo "1.1 passed"; else echo "* 1.1 Verify all Apple provided software is current" >> "$auditfilelocation" fi fi # 1.2 Enable Auto Update # Verify organizational score Audit1_2="$(defaults read "$plistlocation" OrgScore1_2)" # If organizational score is 1 or true, check status of client if [ "$Audit1_2" = "1" ]; then automaticUpdates="$(defaults read /Library/Preferences/com.apple.SoftwareUpdate AutomaticCheckEnabled)" # If client fails, then note category in audit file if [ $automaticUpdates = "1" ]; then echo "1.2 passed"; else echo "* 1.2 Enable Auto Update" >> "$auditfilelocation" fi fi # 1.3 Enable app update installs # Verify organizational score Audit1_3="$(defaults read "$plistlocation" OrgScore1_3)" # If organizational score is 1 or true, check status of client if [ "$Audit1_3" = "1" ]; then automaticAppUpdates="$(defaults read /Library/Preferences/com.apple.commerce AutoUpdate)" # If client fails, then note category in audit file if [ $automaticAppUpdates = "1" ]; then echo "1.3 passed"; else echo "* 1.3 Enable app update installs" >> "$auditfilelocation" fi fi # 1.4 Enable system data files and security update installs # Verify organizational score Audit1_4="$(defaults read "$plistlocation" OrgScore1_4)" # If organizational score is 1 or true, check status of client if [ "$Audit1_4" = "1" ]; then criticalUpdates="$(defaults read /Library/Preferences/com.apple.SoftwareUpdate ConfigDataInstall)" # If client fails, then note category in audit file if [ $criticalUpdates = "1" ]; then echo "1.4 passed"; else echo "* 1.4 Enable system data files and security update installs" >> "$auditfilelocation" fi fi # 1.5 Enable OS X update installs # Verify organizational score Audit1_5="$(defaults read "$plistlocation" OrgScore1_5)" # If organizational score is 1 or true, check status of client if [ "$Audit1_5" = "1" ]; then updateRestart="$(defaults read /Library/Preferences/com.apple.commerce AutoUpdateRestartRequired)" # If client fails, then note category in audit file if [ $updateRestart = "1" ]; then echo "1.5 passed"; else echo "* 1.5 Enable OS X update installs" >> "$auditfilelocation" fi fi # 2.1.1 Turn off Bluetooth, if no paired devices exist # Verify organizational score Audit2_1_1="$(defaults read "$plistlocation" OrgScore2_1_1)" # If organizational score is 1 or true, check status of client if [ "$Audit2_1_1" = "1" ]; then btPowerState="$(defaults read /Library/Preferences/com.apple.Bluetooth ControllerPowerState)" # If client fails, then note category in audit file if [ $btPowerState = "0" ]; then echo "2.1.1 passed"; else connectable=$(cat "$systemprofiled" | grep "Bluetooth:" -A 20 | grep Connectable | awk '{print $2}' | head -1) if [ "$connectable" = "Yes" ]; then echo "2.1.1 passed"; else echo "* 2.1.1 Turn off Bluetooth, if no paired devices exist" >> "$auditfilelocation" fi fi fi # 2.1.3 Show Bluetooth status in menu bar # Verify organizational score Audit2_1_3="$(defaults read "$plistlocation" OrgScore2_1_3)" # If organizational score is 1 or true, check status of client if [ "$Audit2_1_3" = "1" ]; then btMenuBar="$(defaults read /Users/$currentUser/Library/Preferences/com.apple.systemuiserver menuExtras | grep -c Bluetooth.menu)" # If client fails, then note category in audit file if [ $btMenuBar = "0" ]; then echo "* 2.1.3 Show Bluetooth status in menu bar" >> "$auditfilelocation"; else echo "2.1.3 passed" fi fi # 2.2.2 Ensure time set is within appropriate limits # Not audited - only enforced if identified as priority # Verify organizational score Audit2_2_2="$(defaults read "$plistlocation" OrgScore2_2_2)" # If organizational score is 1 or true, check status of client # if [ "$Audit2_2_2" = "1" ]; then # sync time # fi # 2.3.1 Set an inactivity interval of 20 minutes or less for the screen saver # Verify organizational score Audit2_3_1="$(defaults read "$plistlocation" OrgScore2_3_1)" # If organizational score is 1 or true, check status of client if [ "$Audit2_3_1" = "1" ]; then screenSaverTime="$(defaults read /Users/$currentUser/Library/Preferences/ByHost/com.apple.screensaver.$hardwareUUID.plist idleTime)" # If client fails, then note category in audit file if [ "$screenSaverTime" -le "1200" ]; then echo "2.3.1 passed"; else echo "* 2.3.1 Set an inactivity interval of 20 minutes or less for the screen saver" >> "$auditfilelocation" fi fi # 2.3.2 Secure screen saver corners # Verify organizational score Audit2_3_2="$(defaults read "$plistlocation" OrgScore2_3_2)" # If organizational score is 1 or true, check status of client if [ "$Audit2_3_2" = "1" ]; then bl_corner=$(defaults read /Users/$currentUser/Library/Preferences/com.apple.dock wvous-bl-corner) tl_corner=$(defaults read /Users/$currentUser/Library/Preferences/com.apple.dock wvous-tl-corner) tr_corner=$(defaults read /Users/$currentUser/Library/Preferences/com.apple.dock wvous-tr-corner) br_corner=$(defaults read /Users/$currentUser/Library/Preferences/com.apple.dock wvous-br-corner) # If client fails, then note category in audit file if [ "$bl_corner" = "6" ] || [ "$tl_corner" = "6" ] || [ "$tr_corner" = "6" ] || [ "$br_corner" = "6" ]; then echo "* 2.3.2 Secure screen saver corners" >> "$auditfilelocation"; else echo "2.3.2 passed" fi fi # 2.3.4 Set a screen corner to Start Screen Saver # Verify organizational score Audit2_3_4="$(defaults read "$plistlocation" OrgScore2_3_4)" # If organizational score is 1 or true, check status of client if [ "$Audit2_3_4" = "1" ]; then # If client fails, then note category in audit file bl_corner=$(defaults read /Users/$currentUser/Library/Preferences/com.apple.dock wvous-bl-corner) tl_corner=$(defaults read /Users/$currentUser/Library/Preferences/com.apple.dock wvous-tl-corner) tr_corner=$(defaults read /Users/$currentUser/Library/Preferences/com.apple.dock wvous-tr-corner) br_corner=$(defaults read /Users/$currentUser/Library/Preferences/com.apple.dock wvous-br-corner) if [ "$bl_corner" = "5" ] || [ "$tl_corner" = "5" ] || [ "$tr_corner" = "5" ] || [ "$br_corner" = "5" ]; then echo "2.3.4 passed"; else echo "* 2.3.4 Set a screen corner to Start Screen Saver" >> "$auditfilelocation" fi fi # 2.4.1 Disable Remote Apple Events # Verify organizational score Audit2_4_1="$(defaults read "$plistlocation" OrgScore2_4_1)" # If organizational score is 1 or true, check status of client if [ "$Audit2_4_1" = "1" ]; then remoteAppleEvents=$(systemsetup -getremoteappleevents | awk '{print $4}') # If client fails, then note category in audit file if [ "$remoteAppleEvents" = "Off" ]; then echo "2.4.1 passed"; else echo "* 2.4.1 Disable Remote Apple Events" >> "$auditfilelocation" fi fi # 2.4.2 Disable Internet Sharing # Verify organizational score Audit2_4_2="$(defaults read "$plistlocation" OrgScore2_4_2)" # If organizational score is 1 or true, check status of client # If client fails, then note category in audit file if [ "$Audit2_4_2" = "1" ]; then natAirport=$(/usr/libexec/PlistBuddy -c "print :NAT:AirPort:Enabled" /Library/Preferences/SystemConfiguration/com.apple.nat.plist) natEnabled=$(/usr/libexec/PlistBuddy -c "print :NAT:Enabled" /Library/Preferences/SystemConfiguration/com.apple.nat.plist) natPrimary=$(/usr/libexec/PlistBuddy -c "print :NAT:PrimaryInterface:Enabled" /Library/Preferences/SystemConfiguration/com.apple.nat.plist) if [ "$natAirport" = "0" ] && [ "$natEnabled" = "0" ] && [ "$natPrimary" = "0" ]; then echo "2.4.2 passed"; else echo "* 2.4.2 Disable Internet Sharing" >> "$auditfilelocation" fi fi # 2.4.3 Disable Screen Sharing # Verify organizational score Audit2_4_3="$(defaults read "$plistlocation" OrgScore2_4_3)" # If organizational score is 1 or true, check status of client if [ "$Audit2_4_3" = "1" ]; then # If client fails, then note category in audit file screenSharing=$(defaults read /System/Library/LaunchDaemons/com.apple.screensharing Disabled) if [ "$screenSharing" = "1" ]; then echo "2.4.3 passed"; else echo "* 2.4.3 Disable Screen Sharing" >> "$auditfilelocation" fi fi # 2.4.5 Disable Remote Login # Verify organizational score Audit2_4_5="$(defaults read "$plistlocation" OrgScore2_4_5)" # If organizational score is 1 or true, check status of client if [ "$Audit2_4_5" = "1" ]; then remoteLogin=$(systemsetup -getremotelogin | awk '{print $3}') # If client fails, then note category in audit file if [ "$remoteLogin" = "Off" ]; then echo "2.4.5 passed"; else echo "* 2.4.5 Disable Remote Login" >> "$auditfilelocation" fi fi # 2.4.7 Disable Bluetooth Sharing # Verify organizational score Audit2_4_7="$(defaults read "$plistlocation" OrgScore2_4_7)" # If organizational score is 1 or true, check status of client and user if [ "$Audit2_4_7" = "1" ]; then btSharing=$(/usr/libexec/PlistBuddy -c "print :PrefKeyServicesEnabled" /Users/$currentUser/Library/Preferences/ByHost/com.apple.Bluetooth.$hardwareUUID.plist) # If client fails, then note category in audit file if [ "$btSharing" = "false" ]; then echo "2.4.7 passed"; else echo "* 2.4.7 Disable Bluetooth Sharing" >> "$auditfilelocation" fi fi # 2.4.8 Disable File Sharing # Verify organizational score Audit2_4_8="$(defaults read "$plistlocation" OrgScore2_4_8)" # If organizational score is 1 or true, check status of client if [ "$Audit2_4_8" = "1" ]; then afpEnabled=$(launchctl list | egrep AppleFileServer) smbEnabled=$(launchctl list | egrep smbd) # If client fails, then note category in audit file if [ "$afpEnabled" = "" ] && [ "$smbEnabled" = "" ]; then echo "2.4.8 passed"; else echo "* 2.4.8 Disable File Sharing" >> "$auditfilelocation" fi fi # 2.4.9 Disable Remote Management # Verify organizational score Audit2_4_9="$(defaults read "$plistlocation" OrgScore2_4_9)" # If organizational score is 1 or true, check status of client if [ "$Audit2_4_9" = "1" ]; then remoteManagement=$(ps -ef | egrep ARDAgent | grep -c "/System/Library/CoreServices/RemoteManagement/ARDAgent.app/Contents/MacOS/ARDAgent") # If client fails, then note category in audit file if [ "$remoteManagement" = "1" ]; then echo "2.4.9 passed"; else echo "* 2.4.9 Disable Remote Management" >> "$auditfilelocation" fi fi # 2.5.1 Disable "Wake for network access" # Verify organizational score Audit2_5_1="$(defaults read "$plistlocation" OrgScore2_5_1)" # If organizational score is 1 or true, check status of client if [ "$Audit2_5_1" = "1" ]; then wompEnabled=$(pmset -g | grep womp | awk '{print $2}') # If client fails, then note category in audit file if [ "$wompEnabled" = "0" ]; then echo "2.5.1 passed"; else echo "* 2.5.1 Disable Wake for network access" >> "$auditfilelocation" fi fi # 2.5.2 Disable sleeping the computer when connected to power # Verify organizational score Audit2_5_2="$(defaults read "$plistlocation" OrgScore2_5_2)" # If organizational score is 1 or true, check status of client if [ "$Audit2_5_2" = "1" ]; then disksleepEnabled=$(pmset -g | grep disksleep | awk '{print $2}') # If client fails, then note category in audit file if [ "$disksleepEnabled" = "0" ]; then echo "2.5.2 passed"; else echo "* 2.5.2 Disable sleeping the computer when connected to power" >> "$auditfilelocation" fi fi # 2.6.3 Enable Firewall # Verify organizational score Audit2_6_3="$(defaults read "$plistlocation" OrgScore2_6_3)" # If organizational score is 1 or true, check status of client if [ "$Audit2_6_3" = "1" ]; then firewallEnabled=$(defaults read /Library/Preferences/com.apple.alf globalstate) # If client fails, then note category in audit file if [ "$firewallEnabled" = "0" ]; then echo "* 2.6.3 Enable Firewall" >> "$auditfilelocation"; else echo "2.6.3 passed" fi fi # 2.6.4 Enable Firewall Stealth Mode # Verify organizational score Audit2_6_4="$(defaults read "$plistlocation" OrgScore2_6_4)" # If organizational score is 1 or true, check status of client if [ "$Audit2_6_4" = "1" ]; then stealthEnabled=$(/usr/libexec/ApplicationFirewall/socketfilterfw --getstealthmode | awk '{print $3}') # If client fails, then note category in audit file if [ "$stealthEnabled" = "enabled" ]; then echo "2.6.4 passed"; else echo "* 2.6.4 Enable Firewall Stealth Mode" >> "$auditfilelocation" fi fi # 2.6.5 Review Application Firewall Rules # Verify organizational score Audit2_6_5="$(defaults read "$plistlocation" OrgScore2_6_5)" # If organizational score is 1 or true, check status of client if [ "$Audit2_6_5" = "1" ]; then appsInbound=$(/usr/libexec/ApplicationFirewall/socketfilterfw --listapps | grep ALF | awk '{print $7}') # If client fails, then note category in audit file if [ "$appsInbound" -le "10" ]; then echo "2.6.5 passed"; else echo "* 2.6.5 Review Application Firewall Rules" >> "$auditfilelocation" fi fi # 2.8 Pair the remote control infrared receiver if enabled # Verify organizational score Audit2_8="$(defaults read "$plistlocation" OrgScore2_8)" # If organizational score is 1 or true, check status of client if [ "$Audit2_8" = "1" ]; then IRPortDetect=$(cat "$systemprofiled" | egrep "IR Receiver" -c) # If client fails, then note category in audit file if [ "$IRPortDetect" = "0" ]; then echo "2.8 passed"; else echo "* 2.8 Pair the remote control infrared receiver if enabled" >> "$auditfilelocation" fi fi # 2.9 Enable Secure Keyboard Entry in terminal.app # Verify organizational score Audit2_9="$(defaults read "$plistlocation" OrgScore2_9)" # If organizational score is 1 or true, check status of client if [ "$Audit2_9" = "1" ]; then currentUser=$(ls -l /dev/console | cut -d " " -f4) secureKeyboard=$(defaults read /Users/$currentUser/Library/Preferences/com.apple.Terminal SecureKeyboardEntry) # If client fails, then note category in audit file if [ "$secureKeyboard" = "1" ]; then echo "2.9 passed"; else echo "* 2.9 Enable Secure Keyboard Entry in terminal.app" >> "$auditfilelocation" fi fi # 2.10 Java 6 is not the default Java runtime # Verify organizational score Audit2_10="$(defaults read "$plistlocation" OrgScore2_10)" # If organizational score is 1 or true, check status of client if [ "$Audit2_10" = "1" ]; then # If client fails, then note category in audit file if [ -f "/Library/Internet Plug-Ins/JavaAppletPlugin.plugin/Contents/Enabled.plist" ] ; then javaVersion=$( defaults read "/Library/Internet Plug-Ins/JavaAppletPlugin.plugin/Contents/Enabled.plist" CFBundleVersion ) javaMajorVersion=$(echo "$javaVersion" | awk -F'.' '{print $2}') if [ $javaMajorVersion -lt "7" ]; then echo "* 2.10 Java 6 is not the default Java runtime" >> "$auditfilelocation"; else echo "2.10 passed" fi fi if [ ! -f "/Library/Internet Plug-Ins/JavaAppletPlugin.plugin/Contents/Enabled.plist" ] ; then echo "2.10 passed" fi fi # 3.1.1 Retain system.log for 90 or more days # Verify organizational score Audit3_1_1="$(defaults read "$plistlocation" OrgScore3_1_1)" # If organizational score is 1 or true, check status of client if [ "$Audit3_1_1" = "1" ]; then sysRetention=$(grep -i ttl /etc/asl.conf | awk -F'ttl=' '{print $2}') # If client fails, then note category in audit file if [ "$sysRetention" -lt "90" ]; then echo "* 3.1.1 Retain system.log for 90 or more days" >> "$auditfilelocation"; else echo "3.1.1 passed" fi fi # 3.1.3 Retain authd.log for 90 or more days # Verify organizational score Audit3_1_3="$(defaults read "$plistlocation" OrgScore3_1_3)" # If organizational score is 1 or true, check status of client if [ "$Audit3_1_3" = "1" ]; then authdRetention=$(grep -i ttl /etc/asl/com.apple.authd | awk -F'ttl=' '{print $2}') # If client fails, then note category in audit file if [ "$authdRetention" = "" ] || [ "$authdRetention" -lt "90" ]; then echo "* 3.1.3 Retain authd.log for 90 or more days" >> "$auditfilelocation"; else echo "3.1.3 passed" fi fi # 3.5 Retain install.log for 365 or more days # Verify organizational score Audit3_5="$(defaults read "$plistlocation" OrgScore3_5)" # If organizational score is 1 or true, check status of client if [ "$Audit3_5" = "1" ]; then installRetention=$(grep -i ttl /etc/asl/com.apple.install | awk -F'ttl=' '{print $2}') # If client fails, then note category in audit file if [ "$installRetention" = "" ] || [ "$installRetention" -lt "365" ]; then echo "* 3.5 Retain install.log for 365 or more days" >> "$auditfilelocation"; else echo "3.5 passed" fi fi # 4.1 Disable Bonjour advertising service # Verify organizational score Audit4_1="$(defaults read "$plistlocation" OrgScore4_1)" # If organizational score is 1 or true, check status of client if [ "$Audit4_1" = "1" ]; then bonjourAdvertise=$(defaults read /Library/Preferences/com.apple.alf globalstate) # If client fails, then note category in audit file if [ "$bonjourAdvertise" = "0" ]; then echo "* 4.1 Disable Bonjour advertising service" >> "$auditfilelocation"; else echo "4.1 passed" fi fi # 4.2 Enable "Show Wi-Fi status in menu bar" # Verify organizational score Audit4_2="$(defaults read "$plistlocation" OrgScore4_2)" # If organizational score is 1 or true, check status of client if [ "$Audit4_2" = "1" ]; then wifiMenuBar="$(defaults read com.apple.systemuiserver menuExtras | grep -c AirPort.menu)" # If client fails, then note category in audit file if [ $wifiMenuBar = "0" ]; then echo "* 4.2 Enable Show Wi-Fi status in menu bar" >> "$auditfilelocation"; else echo "4.2 passed" fi fi # 4.4 Ensure http server is not running # Verify organizational score Audit4_4="$(defaults read "$plistlocation" OrgScore4_4)" # If organizational score is 1 or true, check status of client # Code fragment from https://github.com/krispayne/CIS-Settings/blob/master/ElCapitan_CIS.sh if [ "$Audit4_4" = "1" ]; then if /bin/launchctl list | egrep httpd > /dev/null; then echo "* 4.4 Ensure http server is not running" >> "$auditfilelocation"; else echo "4.4 passed" fi fi # 4.5 Ensure ftp server is not running # Verify organizational score Audit4_5="$(defaults read "$plistlocation" OrgScore4_5)" # If organizational score is 1 or true, check status of client if [ "$Audit4_5" = "1" ]; then ftpEnabled=$(launchctl list | egrep ftp | grep -c "com.apple.ftpd") # If client fails, then note category in audit file if [ "$ftpEnabled" -lt "1" ]; then echo "4.5 passed"; else echo "* 4.5 Ensure ftp server is not running" >> "$auditfilelocation" fi fi # 4.6 Ensure nfs server is not running # Verify organizational score Audit4_6="$(defaults read "$plistlocation" OrgScore4_6)" # If organizational score is 1 or true, check status of client if [ "$Audit4_6" = "1" ]; then # If client fails, then note category in audit file if [ -e /etc/exports ]; then echo "4.6 Ensure nfs server is not running" >> "$auditfilelocation"; else echo "4.6 passed" fi fi # 5.1.1 Secure Home Folders # Verify organizational score Audit5_1_1="$(defaults read "$plistlocation" OrgScore5_1_1)" # If organizational score is 1 or true, check status of client if [ "$Audit5_1_1" = "1" ]; then homeFolders=$(find /Users -mindepth 1 -maxdepth 1 -type d -perm -1 | grep -v "Shared" | grep -v "Guest" | wc -l | xargs) # If client fails, then note category in audit file if [ "$homeFolders" = "0" ]; then echo "5.1.1 passed"; else echo "* 5.1.1 Secure Home Folders" >> "$auditfilelocation" fi fi # 5.1.2 Check System Wide Applications for appropriate permissions # Verify organizational score Audit5_1_2="$(defaults read "$plistlocation" OrgScore5_1_2)" # If organizational score is 1 or true, check status of client if [ "$Audit5_1_2" = "1" ]; then appPermissions=$(find /Applications -iname "*\.app" -type d -perm -2 -ls | wc -l | xargs) # If client fails, then note category in audit file if [ "$appPermissions" = "0" ]; then echo "5.1.2 passed"; else echo "* 5.1.2 Check System Wide Applications for appropriate permissions" >> "$auditfilelocation" fi fi # 5.1.3 Check System folder for world writable files # Verify organizational score Audit5_1_3="$(defaults read "$plistlocation" OrgScore5_1_3)" # If organizational score is 1 or true, check status of client if [ "$Audit5_1_3" = "1" ]; then sysPermissions=$(find /System -type d -perm -2 -ls | grep -v "Public/Drop Box" | wc -l | xargs) # If client fails, then note category in audit file if [ "$sysPermissions" = "0" ]; then echo "5.1.3 passed"; else echo "* 5.1.3 Check System folder for world writable files" >> "$auditfilelocation" fi fi # 5.1.4 Check Library folder for world writable files # Verify organizational score Audit5_1_4="$(defaults read "$plistlocation" OrgScore5_1_4)" # If organizational score is 1 or true, check status of client if [ "$Audit5_1_4" = "1" ]; then libPermissions=$(find /Library -type d -perm -2 -ls | grep -v Caches | wc -l | xargs) # If client fails, then note category in audit file if [ "$libPermissions" = "0" ]; then echo "5.1.4 passed"; else echo "* 5.1.4 Check Library folder for world writable files" >> "$auditfilelocation" fi fi # 5.3 Reduce the sudo timeout period # Verify organizational score Audit5_3="$(defaults read "$plistlocation" OrgScore5_3)" # If organizational score is 1 or true, check status of client if [ "$Audit5_3" = "1" ]; then sudoTimeout=$(cat /etc/sudoers | grep timestamp) # If client fails, then note category in audit file if [ "$sudoTimeout" = "" ]; then echo "* 5.3 Reduce the sudo timeout period" >> "$auditfilelocation"; else echo "5.3 passed" fi fi # 5.4 Automatically lock the login keychain for inactivity # Verify organizational score Audit5_4="$(defaults read "$plistlocation" OrgScore5_4)" # If organizational score is 1 or true, check status of client if [ "$Audit5_4" = "1" ]; then keyTimeout=$(security show-keychain-info /Users/$currentUser/Library/Keychains/login.keychain 2>&1 | awk '{print $3}') # If client fails, then note category in audit file if [ "$keyTimeout" = "no-timeout" ]; then echo "* 5.4 Automatically lock the login keychain for inactivity" >> "$auditfilelocation"; else echo "5.4 passed" fi fi # 5.7 Do not enable the "root" account # Verify organizational score Audit5_7="$(defaults read "$plistlocation" OrgScore5_7)" # If organizational score is 1 or true, check status of client if [ "$Audit5_7" = "1" ]; then rootEnabled=$(dscl . -read /Users/root AuthenticationAuthority 2>&1 | grep -c "No such key") # If client fails, then note category in audit file if [ "$rootEnabled" = "1" ]; then echo "5.7 passed"; else echo "* 5.7 Do not enable the root account" >> "$auditfilelocation" fi fi # 5.8 Disable automatic login # Verify organizational score Audit5_8="$(defaults read "$plistlocation" OrgScore5_8)" # If organizational score is 1 or true, check status of client if [ "$Audit5_8" = "1" ]; then autologinEnabled=$(defaults read /Library/Preferences/com.apple.loginwindow | grep autoLoginUser) # If client fails, then note category in audit file if [ "$autologinEnabled" = "" ]; then echo "5.8 passed"; else echo "* 5.8 Disable automatic login" >> "$auditfilelocation" fi fi # 5.9 Require a password to wake the computer from sleep or screen saver # Verify organizational score Audit5_9="$(defaults read "$plistlocation" OrgScore5_9)" # If organizational score is 1 or true, check status of client # If client fails, then note category in audit file if [ "$Audit5_9" = "1" ]; then screensaverPwd=$(defaults read /Users/$currentUser/Library/Preferences/com.apple.screensaver askForPassword) if [ "$screensaverPwd" = "1" ]; then echo "5.9 passed"; else echo "* 5.9 Require a password to wake the computer from sleep or screen saver" >> "$auditfilelocation" fi fi # 5.10 Require an administrator password to access system-wide preferences # Verify organizational score Audit5_10="$(defaults read "$plistlocation" OrgScore5_10)" # If organizational score is 1 or true, check status of client if [ "$Audit5_10" = "1" ]; then adminSysPrefs=$(security authorizationdb read system.preferences 2> /dev/null | grep -A1 shared | grep -E '(true|false)' | grep -c "true") # If client fails, then note category in audit file if [ "$adminSysPrefs" = "1" ]; then echo "* 5.10 Require an administrator password to access system-wide preferences" >> "$auditfilelocation"; else echo "5.10 passed" fi fi # 5.18 System Integrity Protection status # Verify organizational score Audit5_18="$(defaults read "$plistlocation" OrgScore5_18)" # If organizational score is 1 or true, check status of client if [ "$Audit5_18" = "1" ]; then sipEnabled=$(/usr/bin/csrutil status | awk '{print $5}') # If client fails, then note category in audit file if [ "$sipEnabled" = "enabled." ]; then echo "5.18 passed"; else echo "* 5.18 System Integrity Protection status" >> "$auditfilelocation" fi fi # 6.1.4 Disable "Allow guests to connect to shared folders" # Verify organizational score Audit6_1_4="$(defaults read "$plistlocation" OrgScore6_1_4)" # If organizational score is 1 or true, check status of client if [ "$Audit6_1_4" = "1" ]; then afpGuestEnabled=$(defaults read /Library/Preferences/com.apple.AppleFileServer guestAccess) smbGuestEnabled=$(defaults read /Library/Preferences/SystemConfiguration/com.apple.smb.server AllowGuestAccess) # If client fails, then note category in audit file if [ "$afpGuestEnabled" = "1" ] || [ "$smbGuestEnabled" = "1" ]; then echo "* 6.1.4 Disable Allow guests to connect to shared folders" >> "$auditfilelocation"; else echo "6.1.4 passed" fi fi # 6.2 Turn on filename extensions # Verify organizational score Audit6_2="$(defaults read "$plistlocation" OrgScore6_2)" # If organizational score is 1 or true, check status of client if [ "$Audit6_2" = "1" ]; then filenameExt=$(defaults read /Users/$currentUser/Library/Preferences/com.apple.finder AppleShowAllExtensions) # If client fails, then note category in audit file if [ "$filenameExt" = "1" ]; then echo "6.2 passed"; else echo "* 6.2 Turn on filename extensions" >> "$auditfilelocation" fi fi # 6.3 Disable the automatic run of safe files in Safari # Verify organizational score Audit6_3="$(defaults read "$plistlocation" OrgScore6_3)" # If organizational score is 1 or true, check status of client if [ "$Audit6_3" = "1" ]; then safariSafe=$(defaults read /Users/$currentUser/Library/Preferences/com.apple.Safari AutoOpenSafeDownloads) # If client fails, then note category in audit file if [ "$safariSafe" = "1" ]; then echo "* 6.3 Disable the automatic run of safe files in Safari" >> "$auditfilelocation"; else echo "6.3 passed" fi fi exit 0
e087335a504437f7fcff6379f7bdd9c46df5d731
[ "Shell" ]
2
Shell
smithjw/2016_JNUC_Security_Reporting_Compliance
2ba2960d233d3db1b34cf88d23491da610c5889c
e8fd0971b46798b64f66a38b65037ea49f5d6bc4
refs/heads/master
<repo_name>Delphineray/CsvToReferenceEntity<file_sep>/README.md # CsvToRefenceEntity > Migrate data from [CustomEntityBundle](https://github.com/akeneo-labs/CustomEntityBundle) to Akeneo Reference Entities (_available since 3.0_), using CSV file import. # Installation ```bash git clone <EMAIL>:akeneo/CsvToRefenceEntity.git csv_to_reference_entity cd csv_to_reference_entity composer install ``` # Setup Note: to setup this tool, you'll need a valid **API Client ID** and its **API Client secret** from your Akeneo PIM instance. Read the dedicated documentation to proceed: https://api.akeneo.com/getting-started-admin.html Back in tool, you need to copy the [.env](https://symfony.com/doc/current/components/dotenv.html) file: ```bash cp .env .env.local ``` Then open `.env.local` to define the needed configuration vars: ``` AKENEO_API_BASE_URI=http://your-akeneo-pim-instance.com AKENEO_API_CLIENT_ID=123456789abcdefghijklmnopqrstuvwxyz AKENEO_API_CLIENT_SECRET=<KEY> AKENEO_API_USERNAME=admin AKENEO_API_PASSWORD=<PASSWORD> ``` # How to Use ## 1) Create Reference Entities in your PIM instance In your PIM instance, you will need to create your structure for your records. In short, you'll need to create your reference entities first, to define their attributes, if they have a value per channel/locale, etc. ## 2) Generate your .csv file The only **required field** is the `code`. Regarding attributes, it depends on whether they have a value per channel/locale (_we use the same structure as for products_): - For attribute without value per channel/locale: - `<attribute_code>`, eg. `description` - For attribute with value per channel: - `<attribute_code>-<channel_code>`, eg. `description-ecommerce` - For attribute with value per locale - `<attribute_code>-<locale_code>`, eg. `description-en_US` - For attribute with value per channel and per locale: - `<attribute_code>-<locale_code>-<channel_code>`, eg. `description-en_US-mobile` (_locale first_) So, let's imagine this structure for the `brand` reference entity: - A code - A description with one value per locale - Some tags (an attribute with multiple options) This would be a valid file: ```csv code;description-en_US;description-fr_FR;tags ikea;A famous scandinavian brand;Une célèbre marque scandinave;family,nordic made.com;A famous english brand;Une célèbre marque anglaise;design,online ``` ## 3) Import your file Once you have your .csv file, you can import it with this syntax: ```bash php bin/console app:import <csv_file_path> <reference_entity_code> ``` So if you want to import your records in the .csv file located in `/tmp/file.csv` for your `brand` reference entity: ```bash php bin/console app:import /tmp/file.csv brand ``` <file_sep>/src/FileLogger.php <?php declare(strict_types=1); namespace App; use Monolog\Handler\StreamHandler; use Monolog\Logger; use Psr\Log\LoggerInterface; use Symfony\Component\DependencyInjection\ParameterBag\ParameterBagInterface; /** * Wraps the Monolog logger to have a dedicated log file per import. * * @author <NAME> <<EMAIL>> * @copyright 2019 Akeneo SAS (https://www.akeneo.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ class FileLogger { public $numSkipped = 0; public $numCreated = 0; public $numUpdated = 0; /** @var LoggerInterface */ private $logger; /** @var ParameterBagInterface */ private $params; /** @var string */ private $logFilePath; public function __construct(LoggerInterface $logger, ParameterBagInterface $params) { $this->logger = $logger; $this->params = $params; } public function warning(string $message, array $context = []): void { $this->logger->warning($message, $context); } public function info(string $message, array $context = []): void { $this->logger->info($message, $context); } public function skip(string $message, array $context = []): void { $this->logger->warning($message, $context); $this->numSkipped++; } public function startLogging() { $this->logFilePath = $this->generateLogFilePath(); $this->logger->pushHandler(new StreamHandler($this->logFilePath, Logger::DEBUG, false)); } public function logResponses(array $responses) { foreach ($responses as $response) { $statusCode = $response['status_code']; switch ($statusCode) { case 201: $this->numCreated++; break; case 204: $this->numUpdated++; break; default: $this->skip( sprintf('Skipped record "%s", an error occured during import: %s', $response['code'], json_encode($response['errors'])) ); } } } public function getLogFilePath(): string { return $this->logFilePath; } private function generateLogFilePath(): string { return sprintf( '%s/import-%s.log', $this->params->get('kernel.logs_dir'), date('Y-m-d-H-i-s') ); } }
ba6f4e51ec476f51f111528cecb7bff8e8af7915
[ "Markdown", "PHP" ]
2
Markdown
Delphineray/CsvToReferenceEntity
263f3319b9b2737aec502624ad4aea1f78fa8da1
2499b38e3549bafacf80d6be7d95d9df24e330e0
refs/heads/master
<repo_name>pesonainformatika/simple-password-generator<file_sep>/main.py data_dict = { 'username': ['pery', 'bayu', 'fajar', 'iqbal'] } def passMaker(dict_keys): password_list = [] username_list = [] for username in dict_keys: generate_password = sum(ord(c) for c in username) password_list.append(f"{username}{generate_password}") username_list.append(username) # append result named_dict = { "username": username_list, "generate_password": password_list } return named_dict print(passMaker(data_dict['username']))
f904a25b8ee0d84e16463660d4cb23023a3e8437
[ "Python" ]
1
Python
pesonainformatika/simple-password-generator
18157803b34a54c2c3a52f5de5c1413f74bf6974
8dbda38049fa838a2ba94ce1d0e716ddbd276673
refs/heads/master
<repo_name>Rutgaizerrino/1000-7<file_sep>/script.js var button= document.getElementById('button'); var display= document.getElementById('display'); button.addEventListener('click', kopikaka) function kopikaka(){ for (let i=1000; i>=0;i= i-7){ var kop=i//,+'-7'); do { display.textContent=kop; } while (kop>=0) { } // console.log(kop,'-7') } }
74b2ae55f5dc9c4a5d6d8ac5f68a4f05ad6d1913
[ "JavaScript" ]
1
JavaScript
Rutgaizerrino/1000-7
1272bf58191beb4ffba321c6a2d4f00c67ba890b
67912b3fab60d4c67308b367a15fdb3af2cfed5e
refs/heads/master
<file_sep># Changelog This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [1.0.0] - 2020-02-17 ### Added - initial commit - added service traefik ## [1.0.1] - 2020-02-17 ### Added - added service portainer ## [1.0.2] - 2020-02-17 ### Changed - renamed LICENSE.md to LICENSE ## [1.1.0] - 2020-06-06 ### Changed - added the option to configure both main ports in .env. <file_sep># traefik A lightweight HTTP reverse proxy, based on [Traefik](https://traefik.io/). ## Usage Copy `.env.example` to `.env` and set a new value for `COMPOSE_PROJECT_NAME` if needed. Run `docker-compose`. ``` docker-compose up ``` Run `docker-compose` with the `-d` (detached) parameter if you want the service to run in the background. ``` docker-compose up -d ``` Usually you do not need more than one Traefik instance running on the same machine because Traefik ties directly into Docker to automatically handle all running containers. ## Configuring access to the Traefik public network In order for your other services to gain access to the generated public network, through which Traefik is listening for incoming requests, you need to configure them properly. First add the following service level `networks` configuration to each of the services that will be using Traefik. ``` networks: - public ``` After that add a top level `networks` section which will allow containers to gain access to the public Traefik network. ``` networks: public: external: NETWORK_NAME driver: bridge ``` The value for `NETWORK_NAME` should be replaced with whatever `COMPOSE_PROJECT_NAME_public` yields in this project (for the default value of `COMPOSE_PROJECT_NAME` it would be `docker-services-traefik_public`). Finally, explicitely allow your services to access Traefik by adding the following line to the `labels` section. ``` labels: - "traefik.enable=true" ``` Here is an example of of a `docker-compose.yml` file with the above configuration. ``` version: '3.2' services: my-awesome-service: networks: - public labels: - "traefik.enable=true" networks: public: external: docker-services-traefik_public driver: bridge ``` ## Default container restart policy Please note that the `traefik` service has been configured with the `unless-stopped` restart policy which means that it will continue to run until manually stopped, even after the machine has been restarted. If you do not want the service to restart automatically you can disable that behavior by simply removing the `restart: unless-stopped` line from `docker-compose.yml` or by choosing a different restart policy from the list which you can find [here](https://docs.docker.com/compose/compose-file/#restart). ## Requirements Docker 17.12.0 or newer. ## License This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. <file_sep># portainer [Portainer](https://www.portainer.io/) is a web based tool for maintaining and managing Docker environments. ## Usage Copy `.env.example` to `.env` and set a new value for `COMPOSE_PROJECT_NAME` if needed. Run `docker-compose` with the `-d` (detached) argument to make the container run in the background. ``` docker-compose up -d ``` If you do not want to run Portainer in the background simply omit the `-d` argument. ``` docker-compose up ``` **NOTE:** you only need one running Portainer instance per machine since it ties directly into Docker, which allows to control all the images, containers etc. from one instance. ## Default container restart policy Please note that the `portainer` service has been configured with an `unless-stopped` restart policy which means that it will continue to run until manually stopped, even after the machine has been restarted. If you do not want the service to restart automatically you can disable that behavior by simply removing the `restart: unless-stopped` line from `docker-compose.yml` or by choosing a different restart policy from the list which you can find [here](https://docs.docker.com/compose/compose-file/#restart). ## Requirements Docker 17.04.0 or newer. ## License This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details. <file_sep>version: '3.5' services: traefik: image: traefik:latest container_name: "${COMPOSE_PROJECT_NAME}_traefik" command: - "--log.level=ERROR" - "--api.insecure=true" - "--providers.docker=true" - "--providers.docker.exposedbydefault=false" - "--entrypoints.web.address=:80" networks: - public ports: - "80:80" - "8080:8080" volumes: - /var/run/docker.sock:/var/run/docker.sock restart: unless-stopped networks: public: name: "${COMPOSE_PROJECT_NAME}_public" driver: bridge <file_sep># docker-services A collection of Docker services, meant for quick deployment with Docker Compose. ## Usage Clone the repository to a place of your choosing. ``` git clone https://github.com/brezanac/docker-services.git docker-services ``` For specific details on how to run and use each of the services please consult their respective README files. ## List of available services ### Traefik [Traefik](https://docs.traefik.io/) is a lightweight HTTP reverse proxy and load balancer which is especially suitable for deploying microservices through infrastructure like Docker, Swarm mode, Kubernetes, Amazon ECS etc. ### Portainer [Portainer](https://www.portainer.io) is a management tool for Docker environments, offering full control over Docker hosts and Swarm clusters through an easily accessible web interface. lightweight management UI which allows you to easily manage your different Docker environments (Docker hosts or Swarm clusters). ## License This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.<file_sep>COMPOSE_PROJECT_NAME=docker-services-traefik <file_sep># Docker compose project name. # It will be used as prefix for generated images, running containers, volumes and network names. COMPOSE_PROJECT_NAME=docker-services-portainer # Docker host port that will be connecting to the exposed Portainer service port. # Please note that the Portainer container itself will still expose the default port 9000. # However, since port 9000 is often used for by other apps, the default here is set to 9090. PORTAINER_GENERAL_HOST_PORT=9090 # Docker host port for the Portainer EDGE agent that is used for reverse tunneling. # Please note that the Portainer container itself will still expose the default Edge agent port 8000. PORTAINER_EDGE_HOST_PORT=8000
698ea9528f0a1a1d0a04120ddd5db5ccde5f9bb2
[ "Markdown", "YAML", "Shell" ]
7
Markdown
brezanac/docker-services
2a3e766f8136053344cda16b0ea8de776d04a269
9b502611f450ca2a499f61f9ebf4db4a51638c2c
refs/heads/master
<repo_name>Anthony-Wilson/Douban-RN<file_sep>/component/books/BookDetail.js import React, { Component } from "react"; import { StyleSheet, View, Text, ScrollView, Button, Platform, BackHandler } from "react-native"; import Service from "./../common/Service"; import Common from './../common/Common' import Header from './../common/Header'; import Icon from "react-native-vector-icons/FontAwesome"; export default class BookDetial extends Component { constructor(props) { super(props); this.state = { bookData: null //图书对象详细信息 }; } static navigationOptions = { header: () => null } _backhandler = () => { const { navigation } = this.props; navigation.goBack(); return true; } componentWillMount() { if (Platform.OS === 'android') { BackHandler.addEventListener('hardwareBackPress', this._backhandler); } } componentWillUnmount() { if (Platform.OS === 'android') { BackHandler.removeEventListener('hardwareBackPress', this._backhandler); } } getData() { const { state } = this.props.navigation; var that = this; var url = Service.book_detial_id + state.params.bookId; // alert(url) Common.getRequset(url, (data) => { // 请求成功回调函数 that.setState({ bookData: data }) }, (error) => { // 请求失败回调函数 // ToastAndroid.show(error, ToastAndroid.SHORT) 无法使用,第一个参数只能是字符串 alert(error) }) } render() { const { navigation } = this.props; return (<View style={styles.container}> { this.state.bookData ? <View> <Header navigation = {navigation} initObj={{ backName:navigation.state.params.backName, barTitle:this.state.bookData.title }} /> <ScrollView> <View> <Text style={styles.title}>图书简介</Text> <Text style={styles.text}>{this.state.bookData.summary}</Text> </View> <View style={{ marginTop: 10 }}> <Text style={styles.title}>作者简介</Text> <Text style={styles.text}>{this.state.bookData.author_intro}</Text> </View> <View style={{ height: 60 }}></View> </ScrollView> </View> : Common.loading } </View>); } componentDidMount() { // 请求数据 this.getData(); } } const styles = StyleSheet.create({ container: { flex: 1, backgroundColor: "#fff" }, bookTitle: { fontSize: 24, marginTop: 10, marginLeft: 10, marginBottom: 10, color: 'blue', alignSelf: 'center', fontWeight: "bold" }, title: { fontSize: 16, marginTop: 10, marginLeft: 10, marginBottom: 10, fontWeight: "bold" }, text: { marginHorizontal: 10, color: "#000" } })<file_sep>/component/common/Service.js /* 接口Api 给予豆瓣开放的Api图书与电影等 */ var BaseURL = "https://api.douban.com/v2/"; var DoubanApi = { /* 图书搜索 image 图书缩略图 title 图书名称 publish 出版社 price 价格 pages 页数 */ book_search: BaseURL + "book/search", /* 图书详情 image 图书缩略图 title 图书名称 publish 出版社 price 价格 pages 页数 summary 图书简介 author_intro 作者简介 */ book_detial_id: BaseURL + "book/", /* 电影搜索 images.medium 电影图像 title 电影名称 casts 电影演员 tating.average 电影评分 year 上映时间 genres 电影标签 mobile_url 电影详情URL */ movie_search: BaseURL + "movie/search" }; export default DoubanApi;<file_sep>/README.md # Douban-RN this is the DOUBAN API react-navive demo git克隆下面的地址: https://github.com/Anthony-Wilson/Douban-RN.git 此版本为 react-native 0.45.1版本 执行: > npm init 连接手机并执行: > react-native run-android 具体的图片信息 这是apk图标 ![Alt text](http://mdpic-1253455210.cosgz.myqcloud.com/Douban-RN/1.png) 进去后的界面: ![Alt text](http://mdpic-1253455210.cosgz.myqcloud.com/Douban-RN/2.png) 一个图书详情的页面: ![Alt text](http://mdpic-1253455210.cosgz.myqcloud.com/Douban-RN/3.png) 电影列表: ![Alt text](http://mdpic-1253455210.cosgz.myqcloud.com/Douban-RN/4.png) 电影加载页面: ![Alt text](http://mdpic-1253455210.cosgz.myqcloud.com/Douban-RN/5.png) 电影加载成功页面: ![Alt text](http://mdpic-1253455210.cosgz.myqcloud.com/Douban-RN/6.png) <file_sep>/component/movies/MovieWebView.js import React, { Component } from "react"; import { View, Text, WebView, Platform, BackHandler } from "react-native"; import Header from './../common/Header'; export default class MovieWebView extends Component { static navigationOptions = { header: () => null } _backhandler = () => { const { navigation } = this.props; navigation.goBack(); return true; } componentWillMount() { if (Platform.OS === 'android') { BackHandler.addEventListener('hardwareBackPress', this._backhandler); } } componentWillUnmount() { if (Platform.OS === 'android') { BackHandler.removeEventListener('hardwareBackPress', this._backhandler); } } render() { const {navigation} = this.props; return (<View style={{ backgroundColor: "#fff", flex: 1 }}> <Header navigation={navigation} initObj={{ backName:navigation.state.params.backName, barTitle:navigation.state.params.title }}/> <WebView startInLoadingState={true} contentInset={{ top: -44, bottom: -120 }} source={{ uri: navigation.state.params.url }} /> </View>); } }<file_sep>/component/books/BookList.js import React, { Component } from "react"; import { StyleSheet, View, Text, FlatList, ToastAndroid, Dimensions, Platform, BackHandler } from "react-native"; import Common from "./../common/Common"; import SearchBar from "./../common/SearchBar"; import Service from "./../common/Service"; import BookItems from './BookItems'; export default class BookList extends Component { constructor(props) { super(props) this.state = { show: false, keyWords: '么么哒', dataSource: [], count: 20, } } static navigationOptions = { header: () => null } // 物理返回键的代码 _backhandler = () => { if (this.lastBackPressed && this.lastBackPressed + 2000 >= Date.now()) { BackHandler.exitApp(); } this.lastBackPressed = Date.now(); ToastAndroid.show('再按一次退出应用', ToastAndroid.SHORT) return true // 取消默认行为 } componentWillMount() { if (Platform.OS === 'android') { BackHandler.addEventListener('hardwareBackPress', this._backhandler); } } componentWillUnmount() { if (Platform.OS === 'android') { BackHandler.removeEventListener('hardwareBackPress', this._backhandler); } } getData() { // 发起网络请求开启loading this.setState({ show: false }) // 请求数据 var that = this; var url = Service.book_search + "?count=" + this.state.count + "&q=" + this.state.keyWords; // console.log(url) Common.getRequset(url, (data) => { // 请求成功回调函数 // {"count":"0","start":"0","total":"0","books":[]} // 如果this.books不存在或者长度为0则显示 if (!data.books || data.books.length == 0) { ToastAndroid.show("未找到相关书籍", ToastAndroid.SHORT) // console.log("未找到相关书籍") } that.setState({ show: true, dataSource: data.books }) }, (error) => { // 请求失败回调函数 // ToastAndroid.show(error, ToastAndroid.SHORT) 无法使用,第一个参数只能是字符串 alert(error); }) } _renderItem = ({ item }) => { const { navigate } = this.props.navigation; return <BookItems book={item} onPress={() => navigate('BookDetail', { bookId: item.id, backName: '图书' })} /> } _changeText = (text) => { this.setState({ keyWords: text, count: 20 }) } _searchText = () => { this.getData(); if (this.list) { this.list.scrollToOffset({ x: 0, y: 0 }) } } _onEndReached = () => { this.setState({ count: this.state.count + 10 }) this.getData(); } render() { return (<View style={{ backgroundColor: '#fff' }}> <SearchBar placeholder="请输入图书名字" onPress={this._searchText} onChangeText={this._changeText} /> <FlatList style={{ height: Dimensions.get('window').height - 140 }} data={this.state.dataSource} renderItem={this._renderItem} keyExtractor={(item, index) => index} onEndReachedThreshold={0.01} onEndReached={this._onEndReached} getItemLayout={(data, index) => ({ length: 120, offset: 120 * index, index })} initialScrollIndex={0} ref={list => this.list = list} /> </View>) } componentDidMount() { // 请求数据 this.getData(); } }<file_sep>/component/movies/MovieItem.js import React, { Component } from "react"; import { StyleSheet, View, Text, Image, TouchableOpacity } from "react-native"; export default class MovieItem extends Component { render() { var movie = this.props.movie; var actors = []; for (let i in movie.casts) { actors.push(movie.casts[i].name); } return (<TouchableOpacity style={styles.container} {...this.props}> {/*电影图像*/} <View style={styles.imageContainer}> <Image style={styles.image} source={{uri:movie.images.medium}}/> </View> {/*电影信息*/} <View style={styles.contentContainer}> <View style={styles.textContainer}> <Text NumberOfLines={1}>{movie.title}</Text> </View> <View style={styles.textContainer}> <Text style={styles.casts_year}>{actors}</Text> </View> <View style={styles.textContainer}> <Text style={styles.casts_year}>{movie.year}</Text> </View> <View style={{flexDirection:"row",alignItems:"center"}}> <Text style={styles.genres}>{movie.genres}</Text> <Text style={styles.ratingAverage}>{movie.rating.average}分</Text> </View> </View> </TouchableOpacity>); } } const styles = StyleSheet.create({ container: { flexDirection: "row", height: 120, padding: 10 }, imageContainer: { justifyContent: "center", alignItems: "center" }, image: { width: 80, height: 100 }, contentContainer: { flex: 1, marginLeft: 15 }, textContainer: { flex: 1, justifyContent: "center" }, casts_year: { color: "#a3a3a3", fontSize: 13 }, genres: { color: "#2BB2A3", fontSize: 16 }, ratingAverage: { marginLeft: 10, color: "#A70A0A" } })<file_sep>/component/common/Header.js /* 封装 Header 头部组件,在头部显示标题和返回按钮 包含组件: 外部传入的值: navigator 点击返回上一级页面 initObj(backName,title) */ import React, { Component } from "react"; import { StyleSheet, View, Text, TouchableOpacity } from "react-native"; import Icon from "react-native-vector-icons/FontAwesome"; export default class Header extends Component { _pop = () => { this.props.navigation.goBack() } render() { // 获取对象,包括按钮名称barTitle与 标题barTitle let headerContent = this.props.initObj; return (<View style={styles.container}> <TouchableOpacity style={styles.headerBtn} onPress={this._pop}> <Icon name="chevron-left" size={20} color="#fff"/> <Text style={styles.headerBtnText}>{headerContent.backName}</Text> </TouchableOpacity> <View style={styles.headerTitle}> <Text style={styles.headerTitleText} NumberOfLines={1}>{headerContent.barTitle}</Text> </View> </View>); } } const styles = StyleSheet.create({ container: { flexDirection: "row", height: 40, backgroundColor: "#1CF529" }, headerBtn: { width: 70, height: 40, flexDirection: "row", justifyContent: "center", alignItems: "center" }, headerBtnText: { marginLeft: 10, color: "white", fontSize: 16 }, headerTitle: { flex: 1, height: 40, justifyContent: "center", alignItems: "center" }, headerTitleText: { color: "white", fontSize: 16 } })<file_sep>/component/common/SearchBar.js import React, { Component } from "react"; import { StyleSheet, View, Text, TextInput, TouchableOpacity } from "react-native"; export default class SearchBar extends Component { render() { return (<View style={styles.container}> <TextInput {...this.props} underlineColorAndroid="transparent" multiline={true} style={styles.searchTnput}/> <TouchableOpacity {...this.props} style={styles.searchBtn}> <Text style={styles.searchBtnText}>搜索</Text> </TouchableOpacity> </View>); } } const styles = StyleSheet.create({ container: { height: 40, flexDirection: "row", marginHorizontal: 20, marginTop: 10, marginBottom: 10 }, searchTnput: { flex: 1, height: 40, borderWidth: StyleSheet.hairlineWidth, borderColor: "gray", borderRadius: 5, paddingLeft: 10 }, searchBtn: { width: 50, height: 40, backgroundColor: "#1CF529", justifyContent: "center", alignItems: "center", borderRadius: 5, marginLeft: 10 }, searchBtnText: { color: "white", fontSize: 16 } })
f204664108778b6e5fc1e09cfedeed1f25b75c78
[ "JavaScript", "Markdown" ]
8
JavaScript
Anthony-Wilson/Douban-RN
5a20d83c82d542a861ff8337d69151a75226b0ac
b9f7ede120238bde812bce34a128a5f504aa6139
refs/heads/master
<file_sep>function startApp() { $("#menuLogout").click(logoutUser); showHideMenuLinks(); $("#formLogin").submit(loginUser); $("#formRegister").submit(registerUser); $("#loadingBox").hide(); $("#infoBox").hide(); $("#errorBox").hide(); function showHideMenuLinks() { if (sessionStorage.getItem('username')) { // We have logged in user $("#menuProfile").show(); $("#menuHome").show(); $("#menuInfo").show(); $("#menuSchedule").show(); $("#menuBooks").show(); $("#menuRents").show(); $("#menuEvents").show(); $("#menuContacts").show(); $("#menuLogin").hide(); $("#menuRegister").hide(); $("#menuLogout").show(); } else { // No logged in user $("#menuProfile").hide(); $("#menuHome").hide(); $("#menuInfo").hide(); $("#menuSchedule").hide(); $("#menuBooks").hide(); $("#menuRents").hide(); $("#menuEvents").hide(); $("#menuContacts").hide(); $("#menulogin").show(); $("#menuRegister").show(); $("#menuLogout").hide(); } } $("form").submit(function (e) { e.preventDefault() }); const kinveyBaseUrl = "https://baas.kinvey.com/"; const kinveyAppKey = "<KEY>"; const kinveyAppSecret = "02a1f9e33f4d43a1829550f91dd1a4d3"; const kinveyAppAuthHeaders = { 'Authorization': "Basic " + btoa(kinveyAppKey + ":" + kinveyAppSecret), }; function showInfo(message) { $('#infoBox').text(message); $('#infoBox').show(); setTimeout(function() { $('#infoBox').fadeOut(); }, 3000); } function showError(errorMsg) { $('#errorBox').text(errorMsg); $('#errorBox').show(); } function handleAjaxError(response) { let errorMsg = JSON.stringify(response); if (response.readyState === 0) errorMsg = "Cannot connect due to network error."; if (response.responseJSON && response.responseJSON.description) errorMsg = response.responseJSON.description; showError(errorMsg); if(errorMsg == "Invalid credentials. Please retry your request with correct credentials") { showError("Невалидно потребителско име или парола") } } function loginUser() { event.preventDefault(); let userData = { username: $('#formLogin input[name=username]').val(), password: $('#formLogin input[name=password]').val() }; $.ajax({ method: "POST", url: kinveyBaseUrl + "user/" + kinveyAppKey + "/login", headers: kinveyAppAuthHeaders, data: userData, success: loginSuccess, error: handleAjaxError }); function loginSuccess(userInfo) { saveAuthInSession(userInfo); showHideMenuLinks(); showInfo('Успешен вход.'); window.setTimeout(function() { window.location.href = 'index.html'; }, 2000); } } function logoutUser() { $.ajax({ method: "POST", url: kinveyBaseUrl + "user/" + kinveyAppKey + "/_logout", headers: getKinveyUserAuthHeaders() }); sessionStorage.clear(); showHideMenuLinks(); showInfo('Успешен изход.'); } function registerUser() { event.preventDefault(); let userData = { username: $('#formRegister input[name=username]').val(), password: $('#formRegister input[name=password]').val(), name: $('#formRegister input[name=fname]').val(), sname: $('#formRegister input[name=sname]').val(), studentID: $('#formRegister input[name=studentID]').val(), email: $('#formRegister input[name=email]').val() }; $.ajax({ method: "POST", url: kinveyBaseUrl + "user/" + kinveyAppKey + "/", headers: kinveyAppAuthHeaders, data: userData, success: registerSuccess, error: handleAjaxError }); function registerSuccess(userInfo) { showInfo('Успешна регистрация.'); } } function saveAuthInSession(userInfo) { let userAuth = userInfo._kmd.authtoken; sessionStorage.setItem('authToken', userAuth); let userId = userInfo._id; sessionStorage.setItem('userId', userId); let username = userInfo.username; sessionStorage.setItem('username', username); $('#profile h2').text( "Здравей, " + userInfo.name + ""); } function getKinveyUserAuthHeaders() { return { 'Authorization': "Kinvey " + sessionStorage.getItem('authtoken'), }; } }
06e86906484f252c60b8f5919a7789ac721364e7
[ "JavaScript" ]
1
JavaScript
MuPowe/UNIcorn
3bd47b55dc133b36a8471e53781d3876ba6ec621
0b481fb458558cbeb995d0b424fe55431433ca58
refs/heads/master
<repo_name>andrerds/capacitor-plugin-facebook-analytics<file_sep>/src/web.ts import { WebPlugin } from '@capacitor/core'; import { FacebookAnalyticsPlugin } from './definitions'; export class FacebookAnalyticsWeb extends WebPlugin implements FacebookAnalyticsPlugin { constructor() { super({ name: 'FacebookAnalytics', platforms: ['web'], }); } logEvent(options: { event: string, valueToSum?: number, params?: any }): Promise<string> { // TODO: Implement. return Promise.resolve(options.event); } logPurchase(options: { amount: number, params?: any, currency: string }): Promise<string> { // TODO: Implement. return Promise.resolve(JSON.stringify(options)); } logAddPaymentInfo(options: {success: number}): Promise<string> { // TODO: Implement. return Promise.resolve(JSON.stringify(options)); } logAddToCart(options: {amount: number, currency: string, params?: any}): Promise<string> { // TODO: Implement. return Promise.resolve(JSON.stringify(options)); } logCompleteRegistration(options: {params?: any}): Promise<string> { // TODO: Implement. return Promise.resolve(JSON.stringify(options)); } logInitiatedCheckout(options: {amount: number, params?: any}): Promise<string> { // TODO: Implement. return Promise.resolve(JSON.stringify(options)); } } const FacebookAnalytics = new FacebookAnalyticsWeb(); export { FacebookAnalytics }; import { registerWebPlugin } from '@capacitor/core'; registerWebPlugin(FacebookAnalytics);
fb0aeb8168e602513114aa48f2109d94cf82e388
[ "TypeScript" ]
1
TypeScript
andrerds/capacitor-plugin-facebook-analytics
a307bf9d8c5b7b2b3fd3efcc3228b44295b2b1f2
35d2fef3c70b794847cc0d5954df8b8b4cd7410b
refs/heads/main
<repo_name>picrossin/roll-a-ball-galaxy<file_sep>/Assets/Scripts/PlayerController.cs using TMPro; using UnityEngine; using Valve.VR; [RequireComponent(typeof(Rigidbody))] public class PlayerController : MonoBehaviour { [SerializeField] private bool controlWithKeyboard; [SerializeField] [Range(1f, 100f)] private float speed = 2f; [SerializeField] private TextMeshProUGUI countText; [SerializeField] private TextMeshProUGUI winText; [SerializeField] private TextMeshProUGUI pauseText; [SerializeField] private Transform gravityCenter; [SerializeField] [Range(0f, 100f)] private float gravityConstant = 9.81f; [SerializeField] private Transform movementHelper; [SerializeField] private AudioClip collectSound; [SerializeField] private AudioSource _audioSource; [SerializeField] private ParticleSystem _ps; private bool _isRunning; public bool IsRunning => _isRunning; private ParticleSystem.MainModule _psMain; private ParticleSystem.TrailModule _psTrails; private Vector2 _input; private Rigidbody _rigidbody; private int _count; private int _goalCount; private void Start() { _rigidbody = GetComponent<Rigidbody>(); _psMain = _ps.main; _psTrails = _ps.trails; SetCountText(); winText.text = ""; _isRunning = true; _goalCount = GameObject.FindGameObjectsWithTag("Pick Up").Length; } private void Update() { _input = new Vector2(Input.GetAxis("Horizontal"), Input.GetAxis("Vertical")); if (Input.GetKeyDown(KeyCode.Escape)) { _isRunning = !_isRunning; } pauseText.text = _isRunning ? "" : "Paused"; Time.timeScale = _isRunning ? 1 : 0; AudioListener.pause = !_isRunning; } private void FixedUpdate() { // Gravity _rigidbody.AddForce((gravityCenter.position - transform.position).normalized * gravityConstant); // Movement Vector3 movementForce = Vector3.zero; float steering = 0f; if (controlWithKeyboard) { movementForce = movementHelper.forward * _input.y * speed; steering = _input.x; } else { movementForce = movementHelper.forward * SteamVR_Actions.buggy.Throttle[SteamVR_Input_Sources.Any].axis * speed; steering = SteamVR_Actions.buggy.Steering[SteamVR_Input_Sources.Any].axis.x; Debug.Log($"throttle: {SteamVR_Actions.buggy.Throttle[SteamVR_Input_Sources.Any].axis}, steering x: {SteamVR_Actions.buggy.Steering[SteamVR_Input_Sources.Any].axis.x}"); } movementHelper.position = transform.position; movementHelper.rotation = Quaternion.FromToRotation( movementHelper.up, (movementHelper.position - gravityCenter.position).normalized) * movementHelper.rotation * Quaternion.Euler(0, steering, 0); _rigidbody.AddForce(movementForce); } private void OnTriggerEnter(Collider other) { if (other.gameObject.CompareTag("Pick Up")) { other.gameObject.SetActive(false); _audioSource.PlayOneShot(collectSound, 0.5f); _count++; _psMain.maxParticles += 5; _psTrails.lifetimeMultiplier += 0.025f; SetCountText(); } } private void SetCountText() { countText.text = $"Count: {_count}"; if (_count == _goalCount) { winText.text = "You Win!"; } } }<file_sep>/Assets/Scripts/OrientToPlanet.cs using UnityEngine; public class OrientToPlanet : MonoBehaviour { [SerializeField] private GameObject planet; private void Update() { transform.up = (transform.position - planet.transform.position).normalized; } } <file_sep>/Assets/Scripts/LockPlayer.cs using UnityEngine; public class LockPlayer : MonoBehaviour { [SerializeField] private Transform ball; private void Update() { transform.position = ball.position; transform.rotation = ball.rotation; } } <file_sep>/Assets/Scripts/Rotator.cs using UnityEngine; public class Rotator : MonoBehaviour { [SerializeField] private Vector3 rotationDirection; [SerializeField] [Range(1f, 100f)] private float speed; private void Update() { transform.Rotate(rotationDirection * speed * Time.deltaTime); } } <file_sep>/README.md # roll-a-ball-galaxy [https://picrossin.github.io/roll-a-ball-galaxy/](https://picrossin.github.io/roll-a-ball-galaxy/) <file_sep>/Assets/Scripts/Pacer.cs using UnityEngine; public class Pacer : MonoBehaviour { [SerializeField] private Vector3 rotationVector; [SerializeField] private PlayerController player; private GameObject _parent; private void Start() { _parent = new GameObject(); _parent.transform.position = Vector3.zero; transform.parent = _parent.transform; } private void Update() { if (player.IsRunning) { transform.parent.rotation *= Quaternion.Euler(rotationVector); } } }
6d850661e1a87b5ec1be07fe360412a130301647
[ "Markdown", "C#" ]
6
C#
picrossin/roll-a-ball-galaxy
0d96a30551a1e874375e6d57698c33c67e1f9d09
abb2852d7687866a0b358303f77bd6c2181ded17
refs/heads/master
<file_sep>#!/usr/bin/python from future_client import FutureClient, Game, MessageSlot import serial import serial.tools.list_ports as list_ports import time import struct import random import threading import logging illum_count = 25 led_count = 12 helices = { 'D':1,'C':2,'B':3,'A':4,'G':5,'F':6,'E':7 } boosters = { '1':24, '2':23, '3':22, '4':21, '5':20 } buttongame_map = { 'Dump Core':0, 'PURGE NOW':8, 'Elide Nesting':9, 'Enable Life Support':10, 'Semiaxis Out':12, 'Escape Timeline':13, 'Jump to Parallel Timeline':14, 'Accelerate Timeline':15, 'Advance Timeline':16, 'Halt Timeline':17, 'Reverse Timeline':18, 'RELOAD CORE':19, } class SingleButtonGame(Game): def __init__(self,c,id='single_button_game'): self.c = c super(SingleButtonGame, self).__init__(id) def play_game(self): (msg,target) = random.sample(buttongame_map.items(),1)[0] logging.debug('single button {}: {}'.format(msg,target)) self.update_message(msg) # randomize all buttons self.c.randomize_illuminated() # blink target self.c.set_illuminated(target,random.choice([2,3,2,3,4])) starttime = time.time() duration = 8.5 + 2.0 while self.is_running() and (time.time()-starttime) < duration: if not self.wait(0.05): return for i in self.c.get_keypresses(): if i == target: self.c.set_illuminated(i,1) self.finish(3) return self.finish(-5); class ButtonSetGame(Game): def __init__(self,c,id='button_set_game'): self.c = c super(ButtonSetGame, self).__init__(id) def make_indices_and_msg(self): pass def play_game(self): (superl,targetl,msg) = self.make_indices_and_msg() selected = set() target = set(targetl) superset = set(superl) for idx in superl: self.c.set_illuminated(idx,0) self.update_message(msg) starttime = time.time() duration = 8.5 + (2.0 * len(target)) while self.is_running() and (time.time()-starttime) < duration: if not self.wait(0.05): return for i in self.c.get_keypresses(): if i in selected: self.c.set_illuminated(i,0) selected.remove(i) elif i in superset: self.c.set_illuminated(i,1) selected.add(i) if target == selected: self.finish(5) return for idx in superl: self.c.set_illuminated(idx,2) self.finish(-5); class HelicesGame(ButtonSetGame): def __init__(self,c,id='helices_game'): super(HelicesGame, self).__init__(c,id) def make_indices_and_msg(self): k = random.randint(1,5) if k > 3: return (helices.values(),helices.values(), 'ACTIVATE ALL HELICES') else: elements = random.sample(helices.items(),k) msg = 'Activate Helix '+', '.join([x[0] for x in elements]) logging.debug("helix elements {}".format(elements)) return (helices.values(),[x[1] for x in elements],msg) class BoostersGame(ButtonSetGame): def __init__(self,c,id='boosters_game'): super(BoostersGame, self).__init__(c,id) def make_indices_and_msg(self): k = random.randint(1,8) if k >= 3: return (boosters.values(),boosters.values(), 'ENGAGE ALL BOOSTERS') else: elements = random.sample(boosters.items(),k) msg = 'Engage Booster '+', '.join([x[0] for x in elements]) logging.debug("booster elements {}".format(elements)) return (boosters.values(),[x[1] for x in elements],msg) class Controller: def __init__(self): ports={} for (name,_,_) in list_ports.grep('/dev/ttyACM*'): port = serial.Serial(name, timeout=3) port.write('I\n') teensyid = port.readline().strip() ports[teensyid] = port for (i,p) in ports.items(): print("Found {0}".format(i)) self.t=ports['teensy'] self.tpp=ports['teensypp'] # teensy 3 is unreliable; we're not using it right now anyway try: self.t3=ports['teensy3'] except: self.t3=None self.tlock = threading.RLock() self.tpplock = threading.RLock() self.t3lock = threading.RLock() # imap entries are (pressed, mode) self.imap = [(False,0)]*illum_count self.tlock.acquire() self.t.write('m\\x0cmBoot sequence\\ncomplete.\n') time.sleep(0.5) self.t.write('m\\x0c\n') for i in range(illum_count): self.set_illuminated(i,0) self.tlock.release() def get_knobs(self): self.t3lock.acquire() self.t3.write('r\n') knobs = self.t3.readline().strip() self.t3lock.release() return map(lambda x:map(int,x.split('/')),knobs.split()) def get_keypresses(self): ipressed = [] self.tpplock.acquire() self.tpp.write('r\n') keys = self.tpp.readline().strip() self.tpplock.release() for i in range(illum_count): newp = keys[i]=='1' (oldp,mode) = self.imap[i] if (newp and not oldp): # button down press ipressed.append(i) self.imap[i] = (newp,mode) return ipressed def set_illuminated(self,i,mode): self.tlock.acquire() self.t.write('i{0}:{1}\n'.format(i,mode)) self.tlock.release() (oldp, _) = self.imap[i] self.imap[i] = (oldp, mode) def randomize_illuminated(self): for idx in range(illum_count): if random.randint(0,2) == 1: self.set_illuminated(idx,random.choice([0,0,0,0,1,2,2,3,3,4])) for idx in range(led_count): if random.randint(0,2) == 1: self.set_led(idx,random.choice([0,0,0,0,1,2,2,3,3,4])) def set_led(self,i,mode): self.tlock.acquire() self.t.write('l{0}:{1}\n'.format(i,mode)) self.tlock.release() def set_light(self,colors): self.tlock.acquire() all_colors = list('rgb') colors = list(colors) for color in all_colors: m='p'+color if color in colors: m += '+\n' else: m += '-\n' self.t.write(m) self.tlock.release() def send_msg(self,msg,clear=True): if msg == None: msg = '' if clear: msg = '\x0c'+msg msg = msg.replace('\n','\\n') self.tlock.acquire() self.t.write('m{0}\n'.format(msg)) self.tlock.release() def attract(self): for i in range(illum_count): self.set_illuminated(i,0) for i in range(led_count): self.set_led(i,random.choice([0,2,3])) class PressBlinkersGame(Game): def __init__(self,c): super(PressBlinkersGame, self).__init__('blinkers','Disable blinking buttons') self.c = c self.candidates = set(range(illum_count)) self.candidates.remove(11) # #11 doesn't illuminate :( def make_blinkers(self): count = random.randint(4,10) self.blinkers=set(random.sample(self.candidates,count)) def play_game(self): self.make_blinkers() for i in range(illum_count): if i in self.blinkers: c.set_illuminated(i,4) else: c.set_illuminated(i,0) starttime = time.time() while self.is_running() and (time.time()-starttime) < 10.0: if not self.wait(0.05): return for i in c.get_keypresses(): if i in self.blinkers: c.set_illuminated(i,0) self.blinkers.remove(i) if len(self.blinkers) == 0: self.finish(5) return self.finish(-5); class SyncBlinkersGame(Game): def __init__(self,c): super(SyncBlinkersGame, self).__init__('synchronize','Synchronize blinking buttons') self.c = c self.candidates = set(range(illum_count)) self.candidates.remove(11) # #11 doesn't illuminate :( def make_blinkers(self): count = random.randint(6,14) part = count/2 self.a=set(random.sample(self.candidates,count)) self.b=set(random.sample(self.a,part)) self.a=self.a.difference(self.b) def play_game(self): self.make_blinkers() for i in range(illum_count): if i in self.a: c.set_illuminated(i,2) elif i in self.b: c.set_illuminated(i,3) else: c.set_illuminated(i,0) starttime = time.time() while self.is_running() and (time.time()-starttime) < 15.0: if not self.wait(0.05): return for i in c.get_keypresses(): if i in self.a: c.set_illuminated(i,3) self.a.remove(i) self.b.add(i) elif i in self.b: c.set_illuminated(i,2) self.b.remove(i) self.a.add(i) if (len(self.a) == 0) or (len(self.b) == 0): self.finish(5) return self.finish(-5); class LCDSlot(MessageSlot): def __init__(self, c, id=None, length=40): self.c = c super(LCDSlot, self).__init__(id,length) def on_message(self,text): self.c.send_msg(text) c = Controller() c.set_light('r') games = [ SingleButtonGame(c), HelicesGame(c), BoostersGame(c), # PressBlinkersGame(c), # SyncBlinkersGame(c) ] slots = [ ] class VidEditClient(FutureClient): def __init__(self,controller): self.c = controller super(VidEditClient,self).__init__(name='VidEditConsole') def on_session_start(self,message): c.set_light('b') def on_session_fail(self,message,score): c.set_light('r') c.attract() def on_session_success(self,message,score): c.set_light('g') c.attract() import sys if __name__ == '__main__' and len(sys.argv) == 1: fc = VidEditClient(c) fc.available_games = games fc.message_slots = slots fc.start() try: while True: time.sleep(0.05) except: fc.quit() else: # test mode for i in range(led_count): c.set_led(i,1) time.sleep(0.1) c.set_led(i,0) for i in range(illum_count): c.set_illuminated(i,1) time.sleep(0.1) c.set_illuminated(i,0) while True: for i in c.get_keypresses(): print i," ", for (a,b) in c.get_knobs(): print "{0}-{1} ".format(a,b), print "" <file_sep><pre> ___________________ ( Toy Piano Console ) ------------------- \ ^__^ \ (oo)\_______ (__)\ )\/\ ||----w | || || </pre> It's the toy piano that Shelby found in the street, wired up so each of its keys can transmit either a keyboard press or a MIDI note. See teensyduino code for key mapping. So far, it can only send keypresses. It has no lights, meters, etc. futurecrew_toypiano is the teensyduino sketch. It requires the bounce library from http://playground.arduino.cc/code/bounce toypiano.py is the console controller. Run it with `PYTHONPATH=.. python toypiano.pi` feedback sounds in ogg format are in sounds/ folder. I found some useable placeholder sounds already oggified at http://www.acoustica.com/files/aclooplib/ especially http://www.acoustica.com/files/aclooplib/Sound%20Effects%20Tones/ Installation ============ Install the ToyPianoConsole (game) script: sudo cp ToyPianoConsole.init.d /etc/init.d/ToyPianoConsole sudo update-rc.d ToyPianoConsole defaults 99 Reboot: sudo reboot <file_sep>How to create a Future Crew console =================================== System requirements ------------------- A Future Crew node needs to run on sort of basic computer; if you're implementing your console with Teensies or Arduinos they'll need to connect to a Raspberry Pi or other machine to act as the actual FC node. We have several RPis set aside for Future Crew use. Ask for help setting up a Raspberry Pi image if you need it. While developing your console, you may want to use your laptop or whatever until you're ready to install it on a Pi. Here's what you'll need for a node: - Python 2.6 or thereabouts - Python websocket-client package. There are many python websocket packages, so make sure to install the correct one: - pip install websocket-client - If you're using serial connections to connect to your microcontrollers, you'll need the python serial package (pyserial). - On debian: apt-get install python-serial - On openembedded: opkg install python-serial - Local network connection - The files in this repository Configuring Pi's ---------------- For a console: sudo apt-get install python-pip sudo pip install websocket-client pyserial git clone git://github.com/nycresistor/Future-Crew.git For a server: sudo apt-get update # get a coffee... sudo apt-get upgrade # Fly to Brazil to harvest a coffee crop sudo apt-get install python-pip python2.7-dev sudo apt-get install libevent-dev sudo pip install tornado sudo pip install websocket-client git clone git://github.com/nycresistor/Future-Crew.git For anything needing OpenGL, you'll need pogles. First, install SIP: apt-get install python-dev wget http://sourceforge.net/projects/pyqt/files/sip/sip-4.14.6/sip-4.14.6.tar.gz tar zxf sip-4.14.6.tar.gz cd sip-4.14.6/ python configure.py make sudo make install Then, install pogles: mkdir tmp cd tmp sudo pip install pogles # this step will fail! Don't freak out. cd build/pogles sudo vi setup.py Change this file so the lines: include_dirs = ['/opt/vc/include', '/opt/vc/include/interface/vcos/pthreads'] become: include_dirs = ['/opt/vc/include', '/opt/vc/include/interface/vmcs_host/linux', '/opt/vc/include/interface/vcos/pthreads'] Then finish the installation: sudo python setup.py install Console hardware ---------------- A console consists of two basic parts: - The "game" component, which is a set of switches, dials, plugs, etc. that the player has to manipulate to complete a task - The "message" component, which is a display terminal or other device capable of displaying ASCII text You should be able to communicate with both of these components from your node. Games and Message Slots ----------------------- A "game" in the context of Future Crew is a simple task that can be performed at a console. For instance, a game could be: * Push all the blinking buttons * Plug port A into port 7 * Turn dial X to 500 * Play "Ode to Joy" on a toy piano Games can be as complex or simple as you like; it's up to the node to implement them. The only restrictions that a game needs to have are: * The task needs to be described (in a goofy or simple way) in under 80 characters of text * It should be able to be accomplished fairly quickly * The console should be able to report success or failure A "message slot" is a area of the message component where a text message can be displayed. A console can have zero or more message slots. (A console could be composed entirely of message slots and have no games-- the player would just shout out instructions for other people! This could be used for a fast food restaurant simulator.) A message is displayed on a message slot until the server sends another message to replace it (or clears it). Outline of a console implementation ----------------------------------- The "future_client" python package provides the three classes you'll need to use to create a console: FutureClient, Game, and MessageSlot. You'll need to make sure he future_client.py file is in your PYTHONPATH; you can do this either by copying or linking the file into a directory in your python path (not recommended) or by adding the directory it's in to the PYTHONPATH variable at runtime. For instance, when I run the curses client from the TestClients directory, I invoke it like this: ```bash PYTHONPATH=.. python curses_client.py ``` You implement the console in python. You'll want to start out by importing the important bits of the interface, like so: ```python from future_client import FutureClient, Game, MessageSlot ``` You'll subclass the "Game" object to implement games, and the "MessageSlot" object to implement message slots. You generally won't need to subclass FutureClient. Creating a game --------------- To create a game, just subclass the "Game" class and override the play_game() method. Here's a very, very simple game: ```python class DeptOfMotorVehicles(Game): def __init__(self): super(DeptOfMotorVehicles, self).__init__( 'DMV','Please take a seat.') def play_game(self): self.wait(60*60*4) ``` When this game starts, it will do nothing but wait for four hours. Once the game is over, the player is assumed to have lost. It's not a very fun game, but neither is the real DMV. You'll notice that we used the call self.wait() instead of time.delay() or whatever. You should always use self.wait() to delay; that's because it will immediately exit if the game is cancelled at any time. You'll notice also that we've created a custom constructor here. It calls the parent constructor, which takes two arguments: the name of the game, and the message that should be sent to a console when the game starts. Obviously, we want to be able to win games once in a while. Here's a slightly less pointless one: ```python class FlipTheSwitch(Game): def __init__(self, whichSwitch): self.switch = whichSwitch super(FlipTheSwitch, self).__init__( 'FlipSwitch','Please flip switch '+whichSwitch) def play_game(self): starttime = time.time() while self.is_running() and (time.time() - starttime) < 10.0: self.wait(0.1): if flipped_switch(self.switch): self.finish(2) ``` This game will wait ten seconds for the user to flip the switch. If they do, it awards them two points using the self.finish() call. self.finish() sets the number of points earned for this game-- a positive number of points is a "win"; zero or fewer points is a "loss". self.is_running() returns false if the game has been cancelled or 'finished', so it will return false after self.finish() is called. Thus, the game ends right after the user flips the switch. self.wait() happens to return the value of self.is_running(), so we could make this even simpler: ```python def play_game(self): starttime = time.time() while self.wait(0.1) and (time.time() - starttime) < 10.0: if flipped_switch(): self.finish(2) ``` That's about all there is to creating a game! Go crazy. There are a few things to know before you go too crazy, though: * every game runs in its own thread. Be careful if you have code outside of play_game interacting with the variables in play_game! * make sure play_game will always terminate quickly when it is cancelled! Otherwise your console may end up in a bad place. * later, you'll see how to set the default message that is sent to a console when the game starts. However, you're not stuck with it forever-- you can send out updates while the game is running! Just use the self.update_message() call, like so: ```python self.update_message('Time is running out! Flip switch now!') ``` Creating a message slot ----------------------- Creating a message slot is similar: you just need to subclass a MessageSlot class and override the on_message() function. Here's an example message slot that just prints out the messages sent to it: ```python class SimpleMessageSlot(MessageSlot): def __init__(self, slotname): self.slotname = slotname super(SimpleMessageSlot,self).__init__() def on_message(self,text): if text: print "Simple Slot",self.slotname,"says:",text else: print "Simple Slot",self.slotname,"has been cleared!" ``` That's it! Creating a client ----------------- We're almost there! All we have to do is create a client object and tell it about the message slots and games available. Here's a quick example: ```python fc = FutureClient('ws://localhost:8888/socket','switchflipper') fc.available_games = [ FlipTheSwitch('A'), FlipTheSwitch('B') ] fc.message_slots = [ SimpleMessageSlot() ] fc.start() try: while True: time.sleep(1) except: pass fc.quit() ``` The FutureClient() initializer takes two arguments: the websocket URL of the server, and the name of the console. When fc.start() is called, the client starts. Since the client runs in its own thread, fc.start() returns immediately. You can then go into an infinite loop (as we do here), or do any task that your console requires (like checking for button presses). And that's it! Your console is ready to go. Pester me when you run into the inevitable problems! <file_sep><pre> __________________ ( Rotary Console ) ------------------ \ ^__^ \ (oo)\_______ (__)\ )\/\ ||----w | || || </pre> Please add data! Installation ============ Install the RotaryConsole (game) script: sudo cp RotaryConsole.init.d /etc/init.d/RotaryConsole sudo update-rc.d RotaryConsole defaults 99 Reboot: sudo reboot <file_sep>#!/bin/bash for node in {94..99}; do echo "Rebooting Future-Crew on 192.168.1.$node" ssh -i ~/pi-key [email protected].$node "sudo reboot" done <file_sep>from future_client import FutureClient, Game, MessageSlot import time import threading import curses stdscr = curses.initscr() stdscr.nodelay(True) curses.noecho() curses.cbreak() class PressGame(Game): def __init__(self,name,prefix,message,button): self.button = button self.prefix = prefix super(PressGame, self).__init__(name, prefix+": "+message) def play_game(self): if not self.wait(5): return self.update_message(self.prefix+': PRESS BUTTON '+self.button+' NOW!!!') if not self.wait(5): return self.finish(-5,self.prefix+":Too slow!") def on_keypress(self,key): if self.is_running() and key.lower() == self.button.lower(): self.finish(5,self.prefix+":Success") class PressMessageSlot(MessageSlot): def __init__(self, id=None, length=40, x=0, y=0): self.x = x self.y = y super(PressMessageSlot, self).__init__(id,length) def on_message(self,text): global stdscr stdscr.move(self.y,self.x) stdscr.clrtoeol() if (text): stdscr.addstr(self.y,self.x,text,curses.A_BLINK|curses.A_BOLD) slots = [ PressMessageSlot(1,50,10,2) ] class FC_curses(FutureClient): def on_drop(self): stdscr.addstr(0,0,"CONNECTION DROP",curses.A_BOLD) import sys if __name__ == '__main__': try: if len(sys.argv)>1: name=sys.argv[1] else: name='test' fc = FC_curses(name=name) games = [ PressGame('pg1',name,'Press button A.','A'), PressGame('pg2',name,'Press button B.','B') ] fc.available_games = games fc.message_slots = slots fc.start() stdscr.addstr(0,0,"Console "+name+" Client running; type 'q' to quit",curses.A_BOLD) while True: c = stdscr.getch() if c > 0: c = chr(c) stdscr.addstr(5,0,"last keypress: "+c) if c == 'q' or c == 'Q': break else: for game in games: game.on_keypress(c) time.sleep(0.05) fc.quit() finally: curses.nocbreak() curses.echo() curses.endwin() <file_sep>from websocket import create_connection, socket import json import time import threading from sys import argv from os import getenv urlstring = None if __name__=='__main__': if not urlstring: urlstring = getenv('SERVER_URL',"ws://localhost:2600/socket") try: socket = create_connection(urlstring,5.0) except socket.error: print "Could not connect to server. Trying again." time.sleep(1.5) msg = {} if argv[1] == 'start': msg = {'a':'session_start'} socket.send(json.dumps(msg)) elif argv[1] == 'abort' or argv[1] == 'stop': msg = {'a':'session_abort'} socket.send(json.dumps(msg)) <file_sep>#!/usr/bin/python # # Teletype and pushbutton interface # from future_client import FutureClient, Game, MessageSlot import serial import time import struct import threading import random import teletype_buttons port = serial.Serial("/dev/ttyACM0", timeout=0.01) class Controller: def __init__(self): self.cons = {} self.read = "" #self.port = serial.Serial("/dev/tty.usbmodem12341", timeout=1) def get_buttons(self): key = port.readline() if not key: return print "read: '" + key + "'" self.read = self.read + key port.write(key) return class PressButtonGame(Game): def __init__(self,c): super(PressButtonGame, self).__init__('pressbutton', time=5) self.c = c self.target = None def play_game(self): self.desired = random.choice(teletype_buttons.buttons) self.update_message("TX CODE " + self.desired) print "desired: " + self.desired self.desired = self.desired.lower() start_time = time.time() while self.is_running() and (time.time() - start_time < 10): if not self.c.read.endswith(self.desired + " "): self.wait(0.05) continue print "Button Success! (read '" + self.c.read + "')" self.c.read = "" self.finish(5) port.write(" OK\r\n") return print "Failure!" self.finish(-5) def on_start(self): t = threading.Thread(target = self.play_game) self.thread = t t.start() class TeletypeSlot(MessageSlot): def __init__(self, c, id=None, length=60): super(TeletypeSlot, self).__init__(id,length, slow=True) self.c = c def on_message(self,text): if (text): print "Teletyping: ", text port.write(' ' + text) else: print "Done" port.write('\r\n') def on_session_start(self,text): print "New session: ", text port.write("\r\n### New game ###\r\n") def on_session_fail(self,text): print "FAIL" port.write("\r\n### GAME LOST ###\r\n") def on_session_success(self,text): print "WIN" port.write("\r\n### YOUR FUTURE CREW HAS WON! ###\r\n") c = Controller() games = [ PressButtonGame(c), ] slots = [ TeletypeSlot(c), ] class TeletypeClient(FutureClient): def __init__(self,controller): self.c = controller super(TeletypeClient,self).__init__('ws://192.168.1.99:2600/socket', name='TeletypeConsole') def on_session_start(self,message): slots[0].on_session_start(message) def on_session_fail(self,message,score): slots[0].on_session_fail(message) def on_session_success(self,message,score): slots[0].on_session_success(message) import sys if __name__ == '__main__' and len(sys.argv) == 1: #fc = FutureClient('ws://192.168.1.99:2600/socket','TeletypeConsole') fc = TeletypeClient(c) fc.available_games = games fc.message_slots = slots fc.start() try: while True: c.get_buttons() #time.sleep(0.05) except: print "except" fc.quit() else: # test mode # do nothing print "test" <file_sep> ____ _ _ ____ _ | _ \ __ _| |_ ___| |__ | _ \ __ _ _ __ ___| | | |_) / _` | __/ __| '_ \| |_) / _` | '_ \ / _ \ | | __/ (_| | || (__| | | | __/ (_| | | | | __/ | |_| \__,_|\__\___|_| |_|_| \__,_|_| |_|\___|_| The patch panel console has 35 BNC connectors that can be "patched" with cables and eight switches that can be toggled. The firmware determines which ports are connected to which other ports and prints on the serial port messages of the form: [switches] [from:to] [from:to]... Sample output with four switches set and three cables plugged in: 17 A7:B2 B6:F7 F4:F5 The switches are an 8-bit bitmap. The names of the ports correspond to the IO port on the Teensy++ that is used. The front panel should have its names redone to be funnier. The games are: Activate [switch x]! Disable [switch x]! Wiggle [switch x]! (meaning toggle it, and then toggle it back) [verb] [input] to [other input]! Patch, connect, route, reroute, wire, introduce, bridge, hook up, plug, span, affix [verb] [input]! Sever, disconnect, pull the plug on, disable, eliminate, separate, bisect Disconncted all patches! Installation ============ Install the invasion (display) script: sudo cp ../VidEditConsole/invasion.init.d /etc/init.d/invasion sudo update-rc.d invasion defaults 99 Install the PatchConsole (game) script: sudo cp PatchConsole.init.d /etc/init.d/PatchConsole sudo update-rc.d PatchConsole defaults 99 Reboot: sudo reboot A A A A A A A A A A <file_sep><pre> __________________ ( Teletype Console ) ------------------ \ ^__^ \ (oo)\_______ (__)\ )\/\ ||----w | || || </pre> Please add data! Installation ============ Install the TeletypeConsole (game) script: sudo cp TeletypeConsole.init.d /etc/init.d/TeletypeConsole sudo update-rc.d TeletypeConsole defaults 99 Reboot: sudo reboot <file_sep>#!/usr/bin/python # toy piano Future Crew client! # # toy piano sends midi notes 48 - 72 (C - C - C) from future_client import FutureClient, Game, MessageSlot from matrix_orbital_lcd import MatrixOrbitalLCD import time import sys import pygame import pygame.midi import pygame.mixer # sound output from random import random class TinySongGame(Game): def __init__(self, controller, song_name, song_notes): super(TinySongGame, self).__init__( 'TinySongGame'+song_name, song_name + ' ON PIANO!') self.c = controller self.song_name = song_name self.song_notes = song_notes self.timeLimit = 20.0 self.warningTime = 17.0 self.GPO_BAD = 1 self.GPO_GOOD = 3 self.c.lcd.backlight(True) self.c.lcd.gpo(self.GPO_BAD,False) self.c.lcd.gpo(self.GPO_GOOD,False) def play_game(self): starttime = time.time() mistakes = 0 match_idx = 0 lost = False self.c.flushMidi() # make sure there's no old notes queued up self.c.lcd.backlight(True) # make sure LCD light is on and not blinking self.c.lcd.gpo(self.GPO_BAD,False) #self.c.lcd.gpo(self.GPO_GOOD,False) --- DON'T clear the GOOD lamp, let it keep blinking from previous success for a little while #self.c.lcd.brightness(128) while self.is_running(): if not self.wait(0.05): return if (self.c.midi.poll()): message = self.c.midi.read(1) if (self.c.matchNotes(self.song_notes[match_idx], message[0][0][1], 'octave')): match_idx += 1 if match_idx == len(self.song_notes): print 'YES' self.c.sound('yes') self.c.flushMidi() #self.c.lcd.brightness(255) self.c.lcd.gpoBlink(self.GPO_GOOD, 0.1, 0.55) self.finish(1) else: match_idx = 0 print 'NO' self.c.sound('no') #self.c.lcd.blink(0.1, 0.35) self.c.lcd.gpoBlink(1, 0.15, 0.4) mistakes += 1 self.c.flushMidi() if (mistakes > 3): self.finish(0) if ((time.time()-starttime) > self.warningTime): #self.c.lcd.blink(0.1) self.c.lcd.gpoBlink(self.GPO_BAD, 0.1) if (not lost and (time.time()-starttime) > self.timeLimit): print 'OUT OF TIME' lost = True self.c.sound('timeout') sys.stdout.flush() #self.c.lcd.backlight(False) self.c.lcd.gpo(self.GPO_BAD,False) if ((time.time()-starttime) > self.timeLimit + 0.5): self.finish(0) class PlayChords(Game): def __init__(self, controller, whichChord, key): super(PlayChords, self).__init__( 'PlayChords'+str(whichChord), controller.chordName(whichChord, key) + ' CHORD ON PIANO!') self.controller = controller self.correctChord = self.controller.getChordSequence(whichChord, key) self.timeLimit = 8.0 self.warningTime = 4.0 self.GPO_BAD = 1 self.GPO_GOOD = 3 self.controller.lcd.backlight(True) self.controller.lcd.gpo(self.GPO_BAD,False) self.controller.lcd.gpo(self.GPO_GOOD,False) def play_game(self): starttime = time.time() mistakes = 0 lost = False self.controller.flushMidi() # make sure there's no old notes queued up self.controller.lcd.backlight(True) # make sure LCD light is on and not blinking self.controller.lcd.gpo(self.GPO_BAD,False) while self.is_running(): if not self.wait(0.05): return if (self.controller.midi.poll()): you_lost = False message = self.controller.midi.read(3) if len(message) == len(self.correctChord): chord = [message[0][0][1] , message[1][0][1], message[2][0][1]] if (self.controller.matchChords(self.correctChord, chord)): print 'YES' self.controller.sound('yes') self.controller.flushMidi() self.controller.lcd.gpoBlink(self.GPO_GOOD, 0.1, 0.55) self.finish(1) else: you_lost = True else: you_lost = True if you_lost == True: print 'NO' self.controller.sound('no') self.controller.lcd.gpoBlink(1, 0.15, 0.4) mistakes += 1 self.controller.flushMidi() if (mistakes > 3): self.finish(0) if ((time.time()-starttime) > self.warningTime): self.controller.lcd.gpoBlink(self.GPO_BAD, 0.1) if (not lost and (time.time()-starttime) > self.timeLimit): print 'OUT OF TIME' lost = True self.controller.sound('timeout') sys.stdout.flush() self.controller.lcd.gpo(self.GPO_BAD,False) if ((time.time()-starttime) > self.timeLimit + 0.5): self.finish(0) class PlayOneNote(Game): def __init__(self, controller, whichNote): super(PlayOneNote, self).__init__( 'PlayOneNote'+str(whichNote), controller.noteName(whichNote) + ' ON PIANO!') self.c = controller self.whichNote = whichNote self.timeLimit = 5.0 self.warningTime = 2.5 self.GPO_BAD = 1 self.GPO_GOOD = 3 self.c.lcd.backlight(True) self.c.lcd.gpo(self.GPO_BAD,False) self.c.lcd.gpo(self.GPO_GOOD,False) def play_game(self): starttime = time.time() mistakes = 0 lost = False self.c.flushMidi() # make sure there's no old notes queued up self.c.lcd.backlight(True) # make sure LCD light is on and not blinking self.c.lcd.gpo(self.GPO_BAD,False) #self.c.lcd.gpo(self.GPO_GOOD,False) --- DON'T clear the GOOD lamp, let it keep blinking from previous success for a little while #self.c.lcd.brightness(128) while self.is_running(): if not self.wait(0.05): return if (self.c.midi.poll()): message = self.c.midi.read(1) if (self.c.matchNotes(self.whichNote, message[0][0][1], 'octave')): print 'YES' self.c.sound('yes') self.c.flushMidi() #self.c.lcd.brightness(255) self.c.lcd.gpoBlink(self.GPO_GOOD, 0.1, 0.55) self.finish(1) else: print 'NO' self.c.sound('no') #self.c.lcd.blink(0.1, 0.35) self.c.lcd.gpoBlink(1, 0.15, 0.4) mistakes += 1 self.c.flushMidi() if (mistakes > 3): self.finish(0) if ((time.time()-starttime) > self.warningTime): #self.c.lcd.blink(0.1) self.c.lcd.gpoBlink(self.GPO_BAD, 0.1) if (not lost and (time.time()-starttime) > self.timeLimit): print 'OUT OF TIME' lost = True self.c.sound('timeout') sys.stdout.flush() #self.c.lcd.backlight(False) self.c.lcd.gpo(self.GPO_BAD,False) if ((time.time()-starttime) > self.timeLimit + 0.5): self.finish(0) class ToyPianoConsole: def __init__(self): pygame.init() pygame.midi.init() self.midi = pygame.midi.Input(3, 0) self.lcd = MatrixOrbitalLCD() pygame.mixer.init() self.soundList = [ ('yes', 'Alert Tone 22.ogg'), ('no', 'Exclamation Tone 32.ogg'), ('timeout', 'Error Tone 37.ogg') ] self.sounds = dict((n, pygame.mixer.Sound('sounds/'+f)) for (n,f) in self.soundList) self.whiteKeys = [0,2,4,5,7,9,11] self.allKeys = range(0, 12) self.noteNames = { 'terse': { 'sharps': ['C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B'], 'flats' : ['C', 'Db', 'D', 'Eb', 'E', 'F', 'Gb', 'G', 'Ab', 'A', 'Bb', 'B'] }, 'verbose': { 'sharps': ['C', 'C SHARP', 'D', 'D SHARP', 'E', 'F', 'F SHARP', 'G', 'G SHARP', 'A', 'A SHARP', 'B'], 'flats': ['C', 'D FLAT', 'D', 'E FLAT', 'E', 'F', 'G FLAT', 'G', 'A FLAT', 'A', 'B FLAT', 'B'] } } # use up incoming notes def flushMidi(self): while (self.midi.poll()): self.midi.read(1) # name of note from MIDI note number def noteName(self, whichNote, accidentals='both', verbose=True): n = whichNote % 12 if (accidentals=='both'): if (random() > 0.5): accidentals='flats' else: accidentals='sharps' if (verbose): v = 'verbose' else: v = 'terse' return self.noteNames[v][accidentals][whichNote] def chordName(self, whichNote, key='both', accidentals='flats', verbose=False): note = self.noteName(whichNote, accidentals, verbose) if (key=='both'): if (random() > 0.5): key='MAJOR' else: key='MINOR' return (note + " " + key) # play a sound by key def sound(self, key): if (self.sounds[key]): self.sounds[key].play() def getChordSequence(self, whichChord, key): if key == 'MAJOR': chord_sequence = [whichChord, (whichChord+4) % 12, (whichChord+7) % 12] else: chord_sequence = [whichChord, (whichChord+3) % 12, (whichChord+7) % 12] return chord_sequence # check if note numbers match # options: exact, octave (match % 12) def matchNotes(self, n1, n2, options='octave'): if (options=='octave'): return ((n1 % 12) == (n2 % 12)) elif (options=='exact'): return (n1 == n2) else: return False def matchChords(self, list1, list2): for i in range(0,len(list2)): list2[i] = list2[i] % 12 return sorted(list1) == sorted(list2) class SimpleMessageSlot(MessageSlot): def __init__(self, slotname): self.slotname = slotname super(SimpleMessageSlot,self).__init__() def on_message(self,text): if text: print "Simple Slot",self.slotname,"says:",text else: print "Simple Slot",self.slotname,"has been cleared!" class LCDMessageSlot(MessageSlot): def __init__(self, slotname, lcd): self.slotname = slotname self.lcd = lcd; super(LCDMessageSlot,self).__init__() def on_message(self,text): if text: self.lcd.cls() self.lcd.lcdprintln(text) else: self.lcd.cls() controller = ToyPianoConsole() songs = [ #("ROW YOUR BOAT", [0, 0, 0, 2, 4]), # Row Row Row Your Boat - C C C D E ("E-I-E-I-O", [4, 4, 2, 2, 0]), # E-I-E-I-O - E E D D C ("<NAME>", [7, 9, 11, 7]), # <NAME> - G A B G ("TW<NAME>", [7, 7, 2, 2]), # Twinkle Twinkle Little Star - G G D D ("ALOUETTE", [0, 2, 4, 4]), # Alouette - C D E E ("BEETHOVEN", [9, 9, 9, 5]), # Beethoven's 5th - A A A F #("<NAME>", [2, 2, 4, 2, 7, 5]), # Happy Birthday - D D E D G F ] chords = [ [0, 'MAJOR'], [4, 'MAJOR'], [9, 'MAJOR'], [2, 'MINOR'], # [7, 'MINOR'], [5, 'MINOR'] ] fc = FutureClient(name="ToyPianoClient", urlstring="ws://192.168.1.99:2600/socket") fc.available_games = [PlayOneNote(controller, i) for i in controller.allKeys] fc.message_slots = [LCDMessageSlot('PrintSlot', controller.lcd)] fc.start() try: while 1: controller.lcd.update() time.sleep(0.05) except: fc.quit() <file_sep><pre> __________________ ( Octoscroller Console ) ------------------ \ ^__^ \ (oo)\_______ (__)\ )\/\ ||----w | || || </pre> Installation ============ # This is not accurate, since octoscroller is a beaglebone. # However, if you were to run it on a Pi that takes to the scroller # you can do this. Install the OctoscrollerConsole (game) script: sudo cp OctoscrollerConsole.init.d /etc/init.d/OctoscrollerConsole sudo update-rc.d OctoscrollerConsole defaults 99 Reboot: sudo reboot <file_sep>from future_client import FutureClient, Game, MessageSlot import time import threading count = 0 def next_id(): global count count += 1 return str(count) class NothingGame(Game): def __init__(self,name,message): super(NothingGame, self).__init__(name, message) def play_game(self): self.update_message(self.id+': message '+next_id()) if not self.wait(2): return self.update_message(self.id+': further message '+next_id()) if not self.wait(2): return self.finish(1) games = [ NothingGame('A','Initial Message A'), NothingGame('B','Initial Message B'), ] if __name__ == '__main__': fc = FutureClient(name='basic transmission client',max_games=3) fc.available_games = games fc.message_slots = [] fc.start() try: while True: time.sleep(0.05) finally: fc.quit() <file_sep>#!/usr/bin/python from subprocess import call import time def make_static(): call(['tvservice','--explicit=CEA 16']) def set_res(x,y): call(['fbset', '-xres', str(x), '-yres', str(y)]) def make_normal(x,y): call(['tvservice','--sdtv=NTSC 4:3']) set_res(x,y+1) set_res(x,y) print "Let them eat static." make_static() time.sleep(0.5) make_normal(320,200) <file_sep>// wire all 25 teensy 2.0 inputs as piano keys // -- sends keypresses or MIDI notes depending on whether MIDI_MODE is defined below. // requires debounce library // http://playground.arduino.cc/code/bounce #include <Bounce.h> // Fix bug with arduino IDE preprocessor - insert me on top of your arduino-code // From: http://www.a-control.de/arduino-fehler/?lang=en (now dead) // I found it at http://subethasoftware.com/2013/04/09/arduino-compiler-problem-with-ifdefs-solved/ #if 1 __asm volatile ("nop"); #endif // end of fix // comment out for keyboard mode, uncomment for midi mode #define MIDI_MODE #define NUM_KEYS 25 #define DEBOUNCE_MSEC 20 #ifdef MIDI_MODE int lownote = 48; #else char keys[NUM_KEYS] = { 'z','s','x','d','c', 'v','g','b','h','n','j','m', 'q','2','w','3','e', 'r','5','t','6','y','7','u','i' }; #endif Bounce *bounce[NUM_KEYS]; void setup() { for (int i=0; i<NUM_KEYS; i++) { pinMode(i, INPUT_PULLUP); bounce[i] = new Bounce( i, DEBOUNCE_MSEC ); } } void loop() { // Update the debouncers for (int i=0; i<NUM_KEYS; i++) { bounce[i]->update(); if (bounce[i]->fallingEdge()) { #ifdef MIDI_MODE usbMIDI.sendNoteOn(lownote+i, 127, 1); #else Keyboard.print(keys[i]); #endif } #ifdef MIDI_MODE if (bounce[i]->risingEdge()) { usbMIDI.sendNoteOff(lownote+i, 0, 1); } #endif } } <file_sep>The protocol for future crew is entirely unidirectional. Notation note: a 'game' is a task carried out on a console. A 'session' is what players think of as a game; a few people playing Future Crew together. Registration ============ Console to server. Must be the first transaction. ``` { 'a' : 'register' 'name' : _console name as string_ } ``` Unregistration ============== There is no unregistration message; the client merely closes the socket. Starting and Stopping: Session Control ====================================== Games are started or aborted with these messages. ``` { 'a':'session_start' } ``` ``` { 'a':'session_abort' } ``` Session update ============== Server to console. Sent when game is starting, game is over, or potentially for in-game events (like "Level 2!!!"). Whenever a session update is received, all in-progress games should immediately be cancelled and all slots cleared. ``` { 'a': 'session_update' 'state': 'starting', 'won', 'lost', 'reset', or 'update' 'message': _a message to display to all clients_ 'score': _total session score_ } ``` Status ====== Console to server. Report the console's current status. If a status packet is not recieved within two seconds, the console is determined to have timed out and is dropped (all in-progress games being decided randomly). ``` Query: { 'a' : 'status' 'avail_slots': _list of available message slots_ 'avail_games': _suggested games_ 'avail_glitches': _available glitches_ 'bored': _boolean; true if the console is waiting for a new game_ } ``` Glitch Slots ------------ The glitch slot describes a glitch that can run on the console. ``` { 'glitchid': _identifier of the glitch_ 'difficulty': _0 is completely cosmetic, 10 is game-ruining_ 'duration': _glitch duration in seconds_ } ``` Message Slots ------------- Message slot objects describe an available space on the console for displaying a message. It is usually characterized by a length. ``` { 'slotid': _identifier for this slot_ 'slow': _optional, True if this terminal is slow (like a teletype)_ 'len': _numeric width of available slot in characters_ } ``` Available Games --------------- Available game objects represent potential games this console can play at this time. ``` { 'level': _optional; numeric difficulty_ 'short': _optional; should be true if this gave is given less than 10 seconds_ 'time': _optional; maximum time to accomplish game_ 'gameid': _id of this game_ } ``` Messages ======== Server to console. Messages fill message slots. They can be posted to fill slots that are currently empty, overwrite full slots, or release slots. ``` { 'a': 'message' 'slotid': _identifier for this slot, as in message slot object_ 'text': _text to display in the slot; null to free slot_ 'level': _optional; numeric. 0 for normal message, negative for disposable, 1+ for ALERT_ 'success': _only displayed if this is a success/failure message; boolean value that is True if the game was won, False otherwise_ } ``` Announcements ============= Announcement for scroller or whatnot ``` { 'a': 'announcement' 'name': _name of console that announcement pertains to (optional)_ 'message': _text of message_ 'game_score': _number of points won in this game (negative for a loss)_ 'score': _current game score_ } ``` Glitches ======== Server to console. Run a glitch which disrupts gameplay to a greater or lesser extent. ``` { 'a': 'glitch' 'glitchid': _identifier for this glitch, as in glitch slot object_ } ``` Game Update =========== Console to server. Game updates are posted while a game is in progress, or after it has been won or lost. It can be used to update the message displayed on the remote console as well. ``` { 'a': 'update' 'gameid': _id of this game_ 'message': _replacement message text (alert! hurry up!)_ 'running': _boolean, true if game in progress; if false, result included_ 'result': _boolean; true if won, false if lost_ 'score': _numeric, optional; for spectacular win or massive fail_ } ``` Game Control ============ Server to console. A game control message is sent to start or cancel a game. ``` { 'a': 'control' 'operation': _string, either 'start' or 'cancel'_ 'game': _entire game object as above_ } ``` <file_sep>from websocket import create_connection, socket, WebSocketException import json import time import threading import socket import sys import logging scriptname = sys.argv[0].replace('.py','') logging.basicConfig(filename='/var/log/fc/{}.log'.format(scriptname),format='%(asctime)s %(levelname)s:%(message)s',level=logging.DEBUG) from os import getenv class RegistrationError(Exception): pass def next_id(): '''Convenience for autogenerating IDs for lazy programmers''' id = next_id.nid next_id.nid += 1 return id next_id.nid = 0 class MessageSlot(object): '''A text display for a message from the server. Each must have a console-unique id.''' def __init__(self,id=None,length=40,slow=None): if id == None: id = next_id() self.id = id self.length = length self.in_use = False self.text = None self.slow = slow def message(self,text): self.in_use = bool(text) self.text = text self.on_message(text) def on_message(self,text): logging.info("MESSAGE: {}".format(text)) def jsonable(self): m = { 'id':self.id, 'len':self.length } if self.slow != None: m['slow'] = self.slow return m class Game(object): ''' A Game object represents a game that is played on the console. All you need to do to create a game is subclass Game and implement the play_game method. Games always run in their own threads. During the play_game method, you should check the self.running variable. If it has been set to False, it means that the game has been cancelled and the method should exit as quickly as possible. ''' def __init__(self, gameid, message=None, short=None, level=None, time=None): '''Create a game object. Games must have unique game ids as well as an initial message string. Games can change the displayed string after they start running.''' self.id = gameid self.thread = None self.score = None self.resultmsg = None self.start_time = 0 self.message = message self.short = short self.level = level self.time = time self.supress_msg = False self.exit_evt = threading.Event() self.exit_evt.set() def start(self,client): self.client = client self.start_time = time.time() t = threading.Thread(target = self.play_game_wrapper) self.thread = t t.start() def play_game_wrapper(self): self.exit_evt.clear() self.score = None if self.message: self.update_message(self.message) self.play_game() if self.score == None: self.score = 0 won = self.score > 0 msg = { 'a':'update', 'gameid': self.id, 'running': False, 'result': won, 'score': self.score } if self.resultmsg: msg['message'] = self.resultmsg if not self.supress_msg: try: self.client.socket.send(json.dumps(msg)) except: pass self.supress_msg = False self.exit_evt.set() def wait(self,how_long): return not self.exit_evt.wait(how_long) def is_running(self): return not self.exit_evt.is_set() def cancel(self): self.finish(0) def reset(self): if self.is_running(): self.supress_msg = True self.finish(0) def finish(self,score,resultmsg=None): self.resultmsg = resultmsg if self.score == None: self.score = score self.exit_evt.set() def play_game(self): ''' play_game should check self.is_running() to make sure it exits quickly once the game has been cancelled. Make sure your game eventually ends! If you need to wait for an set amount of time to pass while the game is running, use the self.wait() method instead of time.sleep() or similar functions. self.wait() will immediately return when the game is cancelled. It will also return True if the game is still running, or False if it has been cancelled. play_game should call self.finish(score) and return when the game is completed (one way or the other). Calling finish multiple times will result in the first indicated score being used. If finish is not called before the game ends, the score will be assumed to be 0 (a loss). ''' raise Exception("play_game must be implemented!") def jsonable(self): d = { 'gameid':self.id } if self.level != None: d['level'] = self.level if self.short != None: d['short'] = self.short if self.time != None: d['time'] = self.time return d def update_message(self, new_msg): msg = { 'a':'update', 'gameid': self.id, 'running': True, 'message': new_msg } self.client.socket.send(json.dumps(msg)) class FutureClient(object): """A FutureClient is a forward-thinking class ready to take on the risks and responsibilities that the future offers. Are you ready to extend FutureClient?""" def __init__(self,urlstring = None,name='Generic Client',max_games=1): logging.info("Initializing FutureClient.") if not urlstring: urlstring = getenv('SERVER_URL',"ws://192.168.1.99:2600/socket") self.name = name self.socket = None self.urlstring = urlstring self.message_slots = set() self.available_games = set() self.max_games = max_games self.started = False self.connect() def connect(self): while (self.socket == None): try: self.socket = create_connection(self.urlstring,5.0) except socket.error: logging.info("Could not connect to server. Trying again.") time.sleep(1.5) msg = {'a':'register','name':self.name} self.socket.send(json.dumps(msg)) self.state = 'reset' self.cmdmap = { 'message': self.on_message, 'control': self.on_control, 'session_update': self.on_session, 'announcement': self.on_announcement, } def on_message(self, msg): slotid = msg['slotid'] slot = next(s for s in self.message_slots if s.id == slotid) slot.message(msg['text']) def on_control(self, msg): gameid = msg['game']['gameid'] game = next(g for g in self.available_games if g.id == gameid) if msg['operation'] == 'start': game.start(self) elif msg['operation'] == 'cancel': game.cancel() def on_session_start(self,message): pass def on_session_fail(self,message,score): pass def on_session_success(self,message,score): pass def on_announcement(self,message): pass def on_drop(self): "Notification that server has dropped. Will reconnect after call." logging.info("Connection dropped; reconnecting.") def on_session(self, msg): "Respond to a session update by cancelling all games and clearing messages" for x in self.available_games: if x.is_running(): x.reset() for x in self.message_slots: x.message('') self.state = msg['state'] score = msg['score'] message = msg['message'] if self.state == 'starting': self.on_session_start(message) elif self.state == 'lost': self.on_session_fail(message,score) elif self.state == 'reset': self.on_session_fail(message,score) elif self.state == 'won': self.on_session_success(message,score) def status(self): running_games = [x.jsonable() for x in self.available_games if x.is_running()] msg = { 'a':'status', 'avail_slots':[x.jsonable() for x in self.message_slots if not x.in_use], 'avail_games':[x.jsonable() for x in self.available_games if not x.is_running()], 'bored':len(running_games) < self.max_games } if self.max_games <= len(running_games): msg['avail_games'] = [] self.socket.send(json.dumps(msg)) def poll(self,timeout=-1): if timeout != -1: self.socket.settimeout(timeout) try: msgs = self.socket.recv() if msgs: msg = json.loads(msgs) if self.cmdmap.has_key(msg['a']): self.cmdmap[msg['a']](msg) except socket.timeout: pass def start(self): self.thread = threading.Thread(target=self.run) self.thread.start() return self.thread def run(self): self.socket.settimeout(0.1) self.started = True while self.started: try: self.poll() self.status() except (WebSocketException, socket.error): # abort all games for game in self.available_games: game.cancel() self.on_drop() self.socket = None self.connect() def stop(self): self.started = False def quit(self): # abort all games for game in self.available_games: game.cancel() self.stop() if self.thread: self.thread.join() self.socket.close() <file_sep>#!/usr/bin/python from future_client import FutureClient, Game, MessageSlot from matrix_orbital_lcd import MatrixOrbitalLCD import time import sys import serial from random import random class OneDigitGame(Game): def __init__(self, controller, digit): super(OneDigitGame, self).__init__( 'OneDigitGame'+str(digit), 'Dial '+str(digit)+'!') self.c = controller self.digit = digit self.timeLimit = 7.0 self.warningTime = 5.0 self.GPO_BAD = 1 self.GPO_GOOD = 3 self.c.lcd.backlight(True) self.c.lcd.gpo(self.GPO_BAD,False) self.c.lcd.gpo(self.GPO_GOOD,False) self.c.lcd.brightness(128) def play_game(self): starttime = time.time() mistakes = 0 lost = False self.c.lcd.backlight(True) # make sure LCD light is on and not blinking self.c.lcd.gpo(self.GPO_BAD,False) while self.is_running(): if not self.wait(0.05): return input_digit = self.c.get_digit() if input_digit: if input_digit == self.digit: print 'YES' self.c.lcd.gpoBlink(self.GPO_GOOD, 0.1, 0.55) self.finish(1) else: print 'NO' self.c.lcd.gpoBlink(1, 0.15, 0.4) mistakes += 1 if (mistakes > 3): self.finish(0) if (not lost and (time.time()-starttime) > self.timeLimit): print 'OUT OF TIME' self.c.lcd.gpo(self.GPO_BAD,False) lost = True sys.stdout.flush() if ((time.time()-starttime) > self.timeLimit + 0.5): self.finish(0) class PhonebookGame(Game): def __init__(self, controller, person_name, person_number): super(PhonebookGame, self).__init__( 'PhonebookGame'+person_name, 'Dial '+ person_name + ' !') self.c = controller self.person_name = person_name self.person_number = person_number self.timeLimit = 12.0 self.warningTime = 10.0 self.GPO_BAD = 1 self.GPO_GOOD = 3 self.c.lcd.backlight(True) self.c.lcd.gpo(self.GPO_BAD,False) self.c.lcd.gpo(self.GPO_GOOD,False) self.c.lcd.brightness(128) def play_game(self): starttime = time.time() mistakes = 0 match_idx = 0 lost = False self.c.lcd.backlight(True) # make sure LCD light is on and not blinking self.c.lcd.gpo(self.GPO_BAD,False) while self.is_running(): if not self.wait(0.05): return input_digit = self.c.get_digit() if input_digit: if input_digit == self.person_number[match_idx]: match_idx += 1 if match_idx == len(self.person_number): print 'YES' self.c.lcd.gpoBlink(self.GPO_GOOD, 0.1, 0.55) self.finish(1) else: match_idx = 0 print 'NO' self.c.lcd.gpoBlink(1, 0.15, 0.4) mistakes += 1 if (mistakes > 3): self.finish(0) if (not lost and (time.time()-starttime) > self.timeLimit): print 'OUT OF TIME' self.c.lcd.gpo(self.GPO_BAD,False) lost = True sys.stdout.flush() if ((time.time()-starttime) > self.timeLimit + 0.5): self.finish(0) class RotaryConsole: def __init__(self): self.cons = {} self.port = serial.Serial("/dev/ttyUSB0", timeout=3) self.lcd = MatrixOrbitalLCD() def get_digit(self): digit = self.port.readline().strip() if not digit or not digit.isdigit(): return None return int(digit) class SimpleMessageSlot(MessageSlot): def __init__(self, slotname): self.slotname = slotname super(SimpleMessageSlot,self).__init__() def on_message(self,text): if text: print "Simple Slot",self.slotname,"says:",text else: print "Simple Slot",self.slotname,"has been cleared!" class LCDMessageSlot(MessageSlot): def __init__(self, slotname, lcd): self.slotname = slotname self.lcd = lcd; super(LCDMessageSlot,self).__init__() def on_message(self,text): if text: self.lcd.cls() self.lcd.lcdprintln(text) else: self.lcd.cls() controller = RotaryConsole() people = [("the President", [3, 4, 7]), ("LOL", [5, 6, 5]), ("WTF", [9, 8, 3]), ("the NSA", [6, 7, 2]), ("the FBI", [3, 2, 4]), ("the KGB", [5, 4, 2]), ("the A-Team", [6, 7, 8]), ("your lawyer", [5, 4, 3, 7]), ("<NAME>", [9, 4, 6, 2]), ("not found", [4, 0, 4]), ("<NAME>", [0, 0, 7]), ("your mother", [6, 6, 6]), ("Moviefone", [4, 1, 1]), ("Razor and Blade", [1, 3, 3, 7]), ("<NAME>", [2, 6, 0, 0]), ("the Devil", [6, 6, 6]), ("Jenny", [8, 6, 7, 5, 3, 0, 9]), ("Murder", [6]), ("Pi", [3, 1, 4]), ("the Ghostbusters", [6, 0, 2])] fc = FutureClient(name="RotaryConsole", urlstring="ws://192.168.1.99:2600/socket") fc.available_games = [OneDigitGame(controller, i) for i in range(1, 11)] + [PhonebookGame(controller, i[0], i[1]) for i in people] fc.message_slots = [LCDMessageSlot('PrintSlot', controller.lcd)] fc.start() try: while 1: controller.lcd.update() time.sleep(0.05) except: fc.quit() <file_sep>import LedStrips import optparse from array import array import time import serial import threading console_map = { 'ToyPianoClient':0, 'PatchConsole':1, 'VidEditConsole':2, 'TeletypeConsole':3, 'RotaryConsole':4 } def match_console(console): try: return console_map[console] except: return 0 #globals running = False mode = None queue = [] queueLock = threading.RLock() ###################################################### #### Strip pattern control functions (\/ below \/) ### #color_black = chr(0) + chr(0) + chr(0) #color_light_blue = chr(0) + chr(5) + chr(0) attract_pattern = [ 0,0,0, #black 0,5,0, #light blue 0,10,5, #light blue 5,20,5, #light blue 10,50,5, #blue 10,60,10, #light white 20,80,20, #light white 20,100,60, #light white 50,150,100, #white 20,100,60, #light white 20,80,20, #light white 10,60,10, #light white 10,50,5, #blue 5,20,5, #light blue 0,10,5, #light blue # 0,5,0, #light blue # 0,0,0, #black ] strip_length = 160 strip_count = 8 strip = None # intialize 'compiled' attract pattern compiled_attract = [] def make_frame(offset): frame = [] for row in range(0, 160 + len(attract_pattern)/3): for col in range(0, strip_count): start = ((row+offset)%(len(attract_pattern)/3))*3 frame += attract_pattern[start:start+3] return strip.compile(frame) redpat = array('B',[0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0]) * strip_length whitepat = array('B',[0xff, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b, 0xff, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b, 0xff, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b]) * strip_length clearpat = array('B',[0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0]) * strip_length attract_j = 0 def attract(): "Make a lovely wave pattern" global attract_j global mode if not strip: return patt_len = len(attract_pattern)/3 if not compiled_attract: for i in range(patt_len): compiled_attract.append(make_frame(i)) mode = 'attract' class AttractThread(threading.Thread): def run(self): global mode global attract_j while mode == 'attract': #start = (attract_j % patt_len)*strip_count*3 #bytelen = strip_length*strip_count*3 strip.fast_draw(compiled_attract[attract_j]) attract_j -= 1 # flow out if attract_j == -1: attract_j += patt_len time.sleep(0.02) at = AttractThread() at.start() # When a session starts, make a scorebar def session_begin(): if not strip: print "no strip available" return update_score(score) # A score of '0' will be indicated by a bar of 20 LED pixels # it will go up or down as the score changes #score = 20 #data = '' #for row in range(0, score): # for col in range(0, strip_count): # data += chr(75) # data += chr(75) # data += chr(75) #for row in range(score+1, strip_length): # for col in range(0, strip_count): # data += chr(0) # data += chr(0) # data += chr(0) #strip.draw(data) scorepixel = array('B',[0xff, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b, 0xff, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b, 0xff, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b, 0x4b]) darkpixel = array('B',[0xff,0,0,0,0,0,0,0, 0xff,0,0,0,0,0,0,0, 0xff,0,0,0,0,0,0,0]) def update_score(score): # Update score tower # currently: ignore score; dark tower data = darkpixel * strip_length strip.fast_draw(data) # Make strip blink red if consoles sends a miss def game_miss(console, score): if not strip: return console = match_console(console) score = score + 20 c = 1 << console consolepixel = array('B',[0xff,0,0,0,0,0,0,0, 0xff,c,c,c,c,c,c,c, 0xff,0,0,0,0,0,0,0]) data = (scorepixel * score) + (consolepixel * (strip_length - score)) strip.fast_draw(data) # Wait. time.sleep(.2) update_score(score) # Make strip blink white if console sends a hit #def hit(): def game_hit(console, score): if not strip: return console = match_console(console) score = score + 20 c = 1 << console consolepixel = array('B',[0xff,c,c,c,c,c,c,c, 0xff,c,c,c,c,c,c,c, 0xff,c,c,c,c,c,c,c]) data = (scorepixel * score) + (consolepixel * (strip_length - score)) strip.fast_draw(data) # Wait. time.sleep(.2) update_score(score) # Make all strips blink red if servers declares game is lost def session_lost(): if not strip: return k = 0 # directly create compiled representations: # compiled rep is GRB, each preceded by a 0xff while k < 6: strip.fast_draw(redpat) time.sleep(.3) strip.fast_draw(clearpat) time.sleep(.3) k = k + 1 queue_attract() # Make all strips blink white if servers declare game is won def session_won(): if not strip: return k = 0 while k < 6: # blink 5 times strip.fast_draw(whitepat) time.sleep(.3) strip.fast_draw(clearpat) time.sleep(.3) k = k + 1 queue_attract() # Return to idle/attract mode lightThread = None class TowerThread(threading.Thread): def run(self): global queue global queueLock global running global mode running = True print "Thread starting" while (running): while queue: mode = None queueLock.acquire() if queue: m = queue.pop(0) m() queueLock.release() time.sleep(0.05) # public functions below def queue_attract(): queueLock.acquire() queue.append(attract) queueLock.release() def queue_session_begin(): queueLock.acquire() queue.append(session_begin) queueLock.release() def queue_session_won(): queueLock.acquire() queue.append(session_won) queueLock.release() def queue_session_lost(): queueLock.acquire() queue.append(session_lost) queueLock.release() def queue_game_hit(console,score): queueLock.acquire() queue.append(lambda:game_hit(console,score)) queueLock.release() def queue_game_miss(console,score): queueLock.acquire() queue.append(lambda:game_miss(console,score)) queueLock.release() def stop(): global queue global mode global running queue = [] mode = None running = False def init(serialport): global strip strip=LedStrips.LedStrips(strip_count,0) strip.connect(serialport) print "Initialized strip" TowerThread().start() def shutdown(): stop() if strip: strip.draw([0]*(strip_length*strip_count*3)) #### Strip pattern control functions (/\ above /\) ### ###################################################### if __name__ == "__main__": parser = optparse.OptionParser() parser.add_option("-p", "--serialport", dest="serial_port", help="serial port (ex: /dev/ttyUSB0)", default="/dev/tty.usbmodel12341") parser.add_option("-l", "--length", dest="strip_length", help="length of the strip", default=160, type=int) (options, args) = parser.parse_args() strip_length = options.strip_length init(options.serial_port) try: game_miss('VidEditConsole',10) #session_lost() queue_attract() time.sleep(4) queue_game_hit('VidEditConsole',5); time.sleep(2) queue_game_miss('VidEditConsole',-5); time.sleep(2) queue_game_hit('VidEditConsole',5); queue_game_miss('VidEditConsole',-5); time.sleep(2) shutdown() finally: shutdown() <file_sep>#verb_patches = { #'0A': 'Invert', # 'Mid11', #'0C': 'Convert', # 'Mid21', #'19': 'Polarize', # 'Mid31', #'1A': 'Adjust', # 'Mid41', #'0B': 'Replicate', # 'Mid12', #'17': 'Modulate', # 'Mid22', #'18': 'Decimate', # 'Mid32', #'15': 'Reverse', # 'Mid13', #'16': 'Purge', # 'Mid23', #'09': 'Verify', # 'Mid33', #'21': 'Adjust', # 'R11', #'01': 'Polarize', #'R32', #'22': 'Modulate', # 'R41', #'02': 'Invert', # 'R22', #'00': 'Verify', # 'R23', #} noun_patches_rt = { '1D': 'Main servos', # 'Mid51', '1C': 'Secondary servos', # 'Mid61', '1E': 'Tertiary servos', # 'Mid71', '1F': 'Reserve servos', # 'Mid81', '1B': 'Time sensor', # 'Mid52', '06': 'Temperature sensor', # 'Mid62', '04': 'Altitude sensor', # 'Mid72', '20': 'Attitude sensor', #'Mid82', '08': 'Main thruster', # 'Mid53', '07': 'Lateral thruster', # 'Mid63', '05': 'Medial thruster', # 'Mid73', '03': 'Reverse thruster', # 'Mid83', } noun_patches_lt = { '11': '#1 Frombulator', # L 1,1 '12': '#2 Frombulator', # L 1,2 '13': '#3 Frombulator', # L 1,3 '14': '#4 Frombulator', # L 1,4 '10': '#1 Tranmogrifier', # L 2,1 '0F': '#2 Tranmogrifier', # L 2,2 '0E': '#3 Tranmogrifier', # L 2,3 '0D': '#4 Tranmogrifier', # L 2,4 '22': '#1 Perturbulator', '00': '#2 Perturbulator', '21': '#4 Perturbulator', '01': '#1 Magnifier', '02': '#3 Magnifier', } switches = ( 'Frappe', 'Liquefy', 'Mash', 'Grind', 'Shred', 'Blend', 'Grate', # 'Crush', ); <file_sep>#!/bin/bash cd /home/pi/Future-Crew/consoles/VidEditConsole script -e -c 'python invasion.py' /dev/null exit $? <file_sep>#!/usr/bin/python -u from future_client import FutureClient, Game, MessageSlot import serial #import serial.tools.list_ports as list_ports import time import struct import threading import random import patches from random import randint class Controller: def __init__(self): self.cons = {} self.switches = 0 self.port = serial.Serial("/dev/ttyACM0", timeout=3) def get_patches(self): keys = self.port.readline().strip() if not keys: return #print "Read:" + keys cons = keys.split(' ') self.switches = int(cons[0], 16) & ~0x80 con_map = {} for con in cons[1:]: fromto = con.split(':') #print fromto[0], '=>', fromto[1] con_map[fromto[0]] = fromto[1] con_map[fromto[1]] = fromto[0] self.cons = con_map return class PatchVerbGame(Game): def __init__(self,c): super(PatchVerbGame, self).__init__('a2b', None) self.c = c def play_game(self): self.patch_from = random.choice(patches.noun_patches_lt.keys()) self.patch_to = random.choice(patches.noun_patches_rt.keys()) #self.patch_from = '17' #always modulate msg = "Patch the " + patches.noun_patches_lt[self.patch_from] + " to the " + patches.noun_patches_rt[self.patch_to] self.update_message(msg) print "Sending: " + msg + "(" + self.patch_from + ":" + self.patch_to + ")" starttime = time.time() while self.is_running() and (time.time()-starttime) < 20.0: if (self.c.cons.get(self.patch_from,' ') != self.patch_to): self.wait(0.05) continue print "Success!" self.finish(5) return print "Failure!" self.finish(-5) def on_start(self): t = threading.Thread(target = self.play_game) self.thread = t t.start() # # Simple game to switch off all the blender modes # class AllOffGame(Game): def __init__(self,c): super(AllOffGame, self).__init__('alloff','Disable all blender switches!') self.c = c def play_game(self): if (self.c.switches == 0): self.finish(0) return starttime = time.time() while self.is_running() and (time.time()-starttime) < 10.0: if (self.c.switches != 0): self.wait(0.05) continue print "All switches off!" self.finish(5) return print "Failure!" self.finish(-5) def on_start(self): t = threading.Thread(target = self.play_game) self.thread = t t.start() class ToggleSwitchGame(Game): def __init__(self,c): super(ToggleSwitchGame, self).__init__('sw', None) self.c = c # Should randomly select from a range of choices # be sure that it is the opposite of the current value def operation(self,val): if (val): return "Disable" else: return "Engage" def play_game(self): self.sw_num = randint(0,6) self.start_value = self.c.switches & (1 << self.sw_num) sw_name = patches.switches[self.sw_num] print sw_name, ": ", str(self.start_value) self.update_message(self.operation(self.start_value) + ' ' + sw_name) starttime = time.time() while self.is_running() and (time.time()-starttime) < 10.0: if (self.c.switches & (1 << self.sw_num) == self.start_value): self.wait(0.05) continue print "Success!" self.finish(5) return def on_start(self): t = threading.Thread(target = self.play_game) self.thread = t t.start() class StdoutSlot(MessageSlot): def __init__(self, c, id=None, length=40): self.c = c super(StdoutSlot, self).__init__(id,length) def on_message(self,text): print "M: ", text c = Controller() games = [ PatchVerbGame(c), ToggleSwitchGame(c), AllOffGame(c) ] slots = [ #StdoutSlot(c), ] import sys if __name__ == '__main__' and len(sys.argv) == 1: fc = FutureClient('ws://192.168.1.99:2600/socket','PatchConsole') #fc = FutureClient('ws://localhost:2600/socket', name='PatchConsole') fc.available_games = games fc.message_slots = slots fc.start() try: while True: c.get_patches() #time.sleep(0.05) except: fc.quit() else: # test mode # do nothing print "test" <file_sep>#!/usr/bin/python # # Octoscroller message display # from future_client import FutureClient, Game, MessageSlot import serial import time import struct import threading import random import Image, ImageFont, ImageDraw import socket import time, datetime from colorsys import hsv_to_rgb sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) dest = ("localhost", 9999) width = 256 logo = Image.open("nycr.png") disp = Image.new("RGB", (256,16), "black") im = Image.new("RGB", (256,16), "black") im_draw = ImageDraw.Draw(im) font = ImageFont.truetype("spincycle.ttf", 24) timeout = 0 game_over = True scrolling = True scroll_offset = 128 rainbow_cycle = 0 def rainbow(i): rgb = [int(x*256) for x in hsv_to_rgb(i/1024.0,0.8,0.8)] return (rgb[0],rgb[1],rgb[2]) def show_message(c,s): im.paste("black", (0,0,width,16)) im_draw.text((0, -4), s, font=font, fill=c) def send_image(): global scroll_offset if scrolling: disp.paste(im.crop((0,0,scroll_offset,16)), (width-scroll_offset,0)) disp.paste(im.crop((scroll_offset+1,0,width-1,16)), (0,0)) scroll_offset = (scroll_offset + 1) % width else: disp.paste(im, (0,0)) sock.sendto(chr(1) + disp.tostring(), dest) #import console_locations class Controller: def __init__(self): self.cons = {} c = Controller() games = [] slots = [] class OctoscrollerClient(FutureClient): def __init__(self,controller): self.c = controller super(OctoscrollerClient,self).__init__('ws://192.168.1.99:2600/socket', name='OctoscrollerConsole') def on_session_start(self,message): #slots[0].on_session_start(message) global scrolling, game_over, timeout scrolling = True game_over = False timeout = 200 print("Received message: '" + message + "'") show_message("white", message) def on_session_fail(self,message,score): global scrolling, game_over, timeout scrolling = True game_over = True timeout = 200 print("fail message: '" + message + "'") show_message("red", message) def on_session_success(self,message,score): global scrolling, game_over, timeout scrolling = True game_over = True timeout = 200 show_message("blue", message) print("success message: '" + message + "'") #slots[0].on_session_success(message) def on_announcement(self,msg): global scrolling, game_over, timeout scrolling = True #game_over = True timeout = 200 print "msg: " + str(msg) show_message("red", msg["message"]) import sys if __name__ == '__main__' and len(sys.argv) == 1: scrolling = True scroll_offset = 0 show_message("blue", " FUTURE CREW: WAIT") im.paste(logo, (0,0)) send_image() fc = OctoscrollerClient(c) fc.available_games = games fc.message_slots = slots fc.start() try: while True: if timeout > 0: # Just keep showing stuff timeout = timeout - 1 if timeout == 0: # Blank the screen show_message("black", "") elif game_over: # Attract mode; flash stuff show_message(rainbow(rainbow_cycle), " Play Future Crew!") im.paste(logo, (0,0)) im.paste(logo, (170,0)) rainbow_cycle = (rainbow_cycle + 1) % 1024; send_image() time.sleep(0.05) except: print "except" fc.quit() else: # test mode # do nothing print "test" <file_sep><pre> /''''''/ /``/ /``/ /''''''''/ /``/ /``/ /`````\ /''''''/ / ____// / // / // /__ __// / // / // / __ || / ____// / /___ / / // / // / //`` / // / // / /_/ // / /___`` / ____// / // / // / // / // / // / _ // / ____// / //'''' | \\__/ // / // | \|__/ // / //| || / /___`` /__// \_______// /___// \_______// /__// |__||/______// ``` ``````` ```` ``````` ``` ``` ``````` .--. .--. .--. .-..-..-. ' ..': ..'' '_.': `; `; : `.__.':_; `.__.'`.__.__.' </pre> This is the repository for the "Future Crew" consoles. * docs/ All documentation and specifications that pertain to the system as a whole. * server/ All software that runs on the central Future Crew server. * consoles/ Each console should have its own directory with any design documents, references, or code. * consoles/shared Any documents or code that can be used by multiple consoles. <file_sep>#!/bin/bash for node in {94..99}; do echo "Bringing down Future-Crew on 192.168.1.$node" ssh -i ~/pi-key [email protected].$node "sudo shutdown -h now" done <file_sep>#!/bin/bash for node in {94..99}; do echo "Updating Future-Crew on 192.168.1.$node" ssh -i ~/pi-key [email protected].$node "sudo mkdir /var/log/fc; sudo chown pi /var/log/fc" done <file_sep><pre> ____________________ ( Future Crew Server ) -------------------- \ ^__^ \ (oo)\_______ (__)\ )\/\ ||----w | || || </pre> This is the future crew server. It should run on boot with an init script. Installation ============ Install the server init script: sudo cp FCServer.init.d /etc/init.d/FCServer sudo update-rc.d FCServer defaults 99 Reboot: sudo reboot <file_sep>#!/usr/bin/python # This code is based on a port of the Hello_Triangle example from the # OpenGL ES 2.0 Programming Guide. if __name__ == '__main__': import sys sys.path.append('..') import os pid = str(os.getpid()) f = open('/tmp/invasion.pid', 'w') f.write(pid) f.close() from future_client import FutureClient, Game, MessageSlot from array import array from pogles.egl import * from pogles.gles2 import * import pygame.font as font from pygame import Surface import pygame.image from euclid import Vector3, Matrix4 from math import pi font.init() #f = font.Font('./LCDWinTT/LCD-U___.TTF',48) f = font.Font('../Octoscroller/spincycle.ttf',46) from itertools import chain import time # invader params # inv_z - depth of base cube # inv_inset - inset of recess # inv_recess - recess depth # inv_spike - spike length inv_z = 0.4 inv_inset = 0.3 inv_recess = 0.2 inv_spike = 1.2 # fog density fog_density = 0.027 def truncline(text, maxwidth): real=len(text) stext=text l=f.size(text)[0] cut=0 a=0 done=1 old = None while l > maxwidth: a=a+1 n=text.rsplit(None, a)[0] if stext == n: cut += 1 stext= n[:-cut] else: stext = n l=f.size(stext)[0] real=len(stext) done=0 return real, done, stext def wrapline(text, maxwidth): done=0 wrapped=[] while not done: nl, done, stext=truncline(text, maxwidth) wrapped.append(stext.strip()) text=text[nl:] return wrapped # Create a shader object, load the shader source, and compile the shader. def load_shader(shader_type, shader_source): # Create the shader object. shader = glCreateShader(shader_type) if shader == 0: return 0 # Load the shader source. glShaderSource(shader, shader_source) # Compile the shader. glCompileShader(shader) # Check the compile status. compiled, = glGetShaderiv(shader, GL_COMPILE_STATUS) if not compiled: glDeleteShader(shader); raise GLException( "Error compiling shader:\n%s" % glGetShaderInfoLog(shader)) return shader; text_vertex_shader_src=""" attribute vec4 vPosition; attribute vec2 TexCoordIn; varying vec2 TexCoordOut; void main() { gl_Position = vPosition; TexCoordOut = TexCoordIn; } """ text_fragment_shader_src=""" varying mediump vec2 TexCoordOut; uniform sampler2D Texture; void main(void) { gl_FragColor = texture2D(Texture, TexCoordOut); // vec4(0.2,TexCoordOut.x,TexCoordOut.y,1.0) * } """ text_bindings = [(0, 'vPosition'), (1, 'TexCoordIn')] tri_vertex_shader_src = """ uniform mat4 uTransform; uniform mat4 uPerspective; uniform vec3 uLightDir; attribute vec4 aPosition; attribute vec3 aNormal; attribute vec4 aColor; varying vec4 vColor; void main() { vec3 v1 = vec3(uTransform * aPosition); vec3 n1 = vec3(uTransform * vec4(aNormal, 0.0)); vColor = aColor * dot(n1,uLightDir); gl_Position = uPerspective * uTransform * aPosition; vColor[3] = 1.0; } """ tri_fragment_shader_src = """ precision mediump float; uniform float uFogDensity; uniform vec4 uFogColor; varying vec4 vColor; void main() { float z = gl_FragCoord.z/gl_FragCoord.w; float fog = exp2( -1.442695 * uFogDensity * z * z); fog = clamp(fog, 0.0, 1.0); gl_FragColor = vColor * fog + uFogColor *(1.0-fog); } """ tri_bindings = [(0,'aPosition'),(1,'aNormal'),(2,'aColor')] # Create the program and shaders. def create_program(vertex_src,fragment_src,bindings=[]): # Load the vertex/fragment shaders. vertex_shader = load_shader(GL_VERTEX_SHADER, vertex_src) fragment_shader = load_shader(GL_FRAGMENT_SHADER, fragment_src); # Create the program. program = glCreateProgram() if program == 0: return 0 glAttachShader(program, vertex_shader) glAttachShader(program, fragment_shader) for (index,name) in bindings: glBindAttribLocation(program, index, name) # Link the program. glLinkProgram(program) # Check the link status. linked, = glGetProgramiv(program, GL_LINK_STATUS) if not linked: glDeleteProgram(program) raise GLException( "Error linking program:\n%s" % glGetProgramInfoLog(program)) glClearColor(0.3,0.3,0.35,1.0) return program class TextSlot(MessageSlot): def __init__(self,program,index,texture): self.prog = program self.idx = index self.tex = texture self.x = 0 self.y = 0 self.queue_text = '' super(TextSlot, self).__init__() def on_message(self,text): if text == None: text = '' self.queue_text = text def update(self): if self.queue_text == None: return elif self.queue_text == '': self.set_text('') self.queue_text = None else: self.set_text(self.queue_text) self.queue_text = None def set_text(self,text): sz = (512,256) s = Surface(sz,pygame.SRCALPHA) s.fill((255,0,0,0)) lines = wrapline(text,512) y = 0 for line in lines: img = f.render(line, True, (255,255,255,100)) (_, h) = img.get_size() s.blit(img,(0,y)) y += h print "blitting",text,"on",self.idx glActiveTexture(GL_TEXTURE0+self.idx) glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, sz[0], sz[1], 0, GL_RGBA, GL_UNSIGNED_BYTE, pygame.image.tostring(s,"RGBA",1)) def draw_text_slot(self): z = 0 e = 0.8 top = bottom = 0 if self.idx == 0: top = e else: bottom = -e vVertices = array('f', [-e, bottom, z, e, bottom, z, -e, top, z, e, top, z,]) vTex = array('f', [ 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0]) vIndices = array('H', [ 3, 2, 0, 1, 3, 0 ]) # Load the vertex data. glVertexAttribPointer(0, 3, GL_FLOAT, False, 0, vVertices) glVertexAttribPointer(1, 2, GL_FLOAT, False, 0, vTex) glEnableVertexAttribArray(0) glEnableVertexAttribArray(1) glActiveTexture(GL_TEXTURE0+self.idx) glBindTexture(GL_TEXTURE_2D, self.tex) l = glGetUniformLocation(self.prog,"Texture") glUniform1i(l,self.idx); glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, vIndices) slots = [] def matToList(m): m = m.transposed() return [m.a,m.b,m.c,m.d, m.e,m.f,m.g,m.h, m.i,m.j,m.k,m.l, m.m,m.n,m.o,m.p] translation = Vector3() zstamp = time.time() invrots = [Matrix4.new_identity(), Matrix4.new_rotatex(pi/2), Matrix4.new_rotatex(-pi/2), Matrix4.new_rotatey(pi/2), Matrix4.new_rotatey(-pi/2), Matrix4.new_rotatez(pi/2), Matrix4.new_rotatex(pi)] def add_flat_tri(p, vertices, normals, indices): n = (p[1]-p[0]).cross(p[2]-p[0]).normalize() for i in range(3): vertices += array('f',[p[i].x, p[i].y, p[i].z]) normals += array('f',[n.x, n.y, n.z]) next = len(indices) indices += array('H',[next, next+1, next+2]) def create_invader_element(): vVertices = array('f') vNormals = array('f') vIndices = array('H') r_xy = inv_z - inv_inset r_z = inv_z - inv_recess c = [ Vector3(inv_z,inv_z,inv_z), Vector3(-inv_z,inv_z,inv_z), Vector3(inv_z,-inv_z,inv_z), Vector3(-inv_z,-inv_z,inv_z), Vector3(r_xy,r_xy,r_z), Vector3(-r_xy,r_xy,r_z), Vector3(r_xy,-r_xy,r_z), Vector3(-r_xy,-r_xy,r_z), Vector3(0,0,inv_spike)] add_flat_tri([c[0],c[1],c[4]],vVertices,vNormals,vIndices) add_flat_tri([c[1],c[5],c[4]],vVertices,vNormals,vIndices) add_flat_tri([c[1],c[3],c[5]],vVertices,vNormals,vIndices) add_flat_tri([c[3],c[7],c[5]],vVertices,vNormals,vIndices) add_flat_tri([c[3],c[2],c[7]],vVertices,vNormals,vIndices) add_flat_tri([c[2],c[6],c[7]],vVertices,vNormals,vIndices) add_flat_tri([c[2],c[0],c[6]],vVertices,vNormals,vIndices) add_flat_tri([c[0],c[4],c[6]],vVertices,vNormals,vIndices) add_flat_tri([c[5],c[8],c[4]],vVertices,vNormals,vIndices) add_flat_tri([c[7],c[8],c[5]],vVertices,vNormals,vIndices) add_flat_tri([c[6],c[8],c[7]],vVertices,vNormals,vIndices) add_flat_tri([c[4],c[8],c[6]],vVertices,vNormals,vIndices) return (vVertices,vNormals,vIndices) invader_element = create_invader_element() def create_invader(): vvf = array('f') vnf = array('f') vif = array('H') ie = invader_element for i in invrots: (vv,vn,vi) = (array('f',ie[0]),array('f',ie[1]),array('H',ie[2])) while vv: v = Vector3(vv[0],vv[1],vv[2]) vp = i.transform(v) vvf += array('f',[vp.x,vp.y,vp.z]) vv = vv[3:] while vn: n = Vector3(vn[0],vn[1],vn[2]) np = i.transform(n) vnf += array('f',[np.x,np.y,np.z]) vn = vn[3:] next = len(vif) for idx in vi: vif.append(idx+next) return (vvf,vnf,vif) invader = create_invader() def draw_invader(ix,iy,iz): #for i in invrots: # draw_invader_element(i,x,y,z) glEnableVertexAttribArray(0) glEnableVertexAttribArray(1) glVertexAttribPointer(0, 3, GL_FLOAT, False, 0, invader[0]) glVertexAttribPointer(1, 3, GL_FLOAT, False, 0, invader[1]) glDisableVertexAttribArray(2) glVertexAttrib4f(2, 0.7, 0.0, 1.0, 1.0) tloc = glGetUniformLocation(tri_program,"uTransform") m = Matrix4() zdist = ((time.time()-zstamp)/2.0)%4.0 m.translate(ix,iy,iz+zdist) m.rotatex((pi/2)*(zdist)) m.rotatey((pi/2)*(zdist/2)) #m = m*tmat glUniformMatrix4fv(tloc, False, matToList(m)) #print len(vVertices),len(vNormals),len(vIndices) glDrawElements(GL_TRIANGLES, len(invader[2]), GL_UNSIGNED_SHORT, invader[2]) def draw_invader_element(tmat,ix,iy,iz): # invader params # inv_z - depth of base cube # inv_inset - inset of recess # inv_recess - recess depth # inv_spike - spike length glEnableVertexAttribArray(0) glEnableVertexAttribArray(1) glVertexAttribPointer(0, 3, GL_FLOAT, False, 0, invader_element[0]) glVertexAttribPointer(1, 3, GL_FLOAT, False, 0, invader_element[1]) glDisableVertexAttribArray(2) glVertexAttrib4f(2, 0.7, 0.0, 1.0, 1.0) tloc = glGetUniformLocation(tri_program,"uTransform") m = Matrix4() zdist = ((time.time()-zstamp)/2.0)%4.0 m.translate(ix,iy,iz+zdist) m.rotatex((pi/2)*(zdist)) m.rotatey((pi/2)*(zdist/2)) m = m*tmat glUniformMatrix4fv(tloc, False, matToList(m)) #print len(vVertices),len(vNormals),len(vIndices) glDrawElements(GL_TRIANGLES, len(invader_element[2]), GL_UNSIGNED_SHORT, invader_element[2]) def draw(program,w,h): # Set the viewport. glViewport(0, 0, width, height) # Clear the color buffer. glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT) glUseProgram(tri_program) loc = glGetUniformLocation(tri_program,"uPerspective") p = Matrix4.new_perspective(pi/6,4.0/3.0,0.1,10.0) glUniformMatrix4fv(loc, False, matToList(p)) loc = glGetUniformLocation(tri_program,"uLightDir") glUniform3f(loc,-1.0,0.4,0.7) loc = glGetUniformLocation(tri_program,"uFogDensity") glUniform1f(loc,fog_density) loc = glGetUniformLocation(tri_program,"uFogColor") glUniform4f(loc,0.3,0.3,0.35,1.0) draw_invader(1.0,-0.7,-4.0) draw_invader(0.1,0.2,-8.0) draw_invader(0.35,0.7,-9.0) # Use the text program object. glUseProgram(text_program) for slot in slots: slot.update() slot.draw_text_slot() # Create an EGL rendering context and all associated elements. def create_egl_context(native_window, attribs): # Get the default display. display = eglGetDisplay() # Initialize EGL. eglInitialize(display) # Choose the config. config = eglChooseConfig(display, attribs)[0] # Create a surface from the native window. surface = eglCreateWindowSurface(display, config, native_window, []) # Create a GL context. context = eglCreateContext(display, config, None, [EGL_CONTEXT_CLIENT_VERSION, 2]) # Make the context current. eglMakeCurrent(display, surface, surface, context) return display, surface textures = [] if __name__ == '__main__': import select import sys from pogles.platform import ppCreateNativeWindow native_window = ppCreateNativeWindow() display, surface = create_egl_context(native_window, [EGL_RED_SIZE, 5, EGL_GREEN_SIZE, 6, EGL_BLUE_SIZE, 5, EGL_DEPTH_SIZE, 8 ]) width = eglQuerySurface(display, surface, EGL_WIDTH) height = eglQuerySurface(display, surface, EGL_HEIGHT) print width, height, native_window, display, surface print "flush" print "flush" print "flush" text_program = create_program(text_vertex_shader_src, text_fragment_shader_src, text_bindings) tri_program = create_program(tri_vertex_shader_src, tri_fragment_shader_src, tri_bindings) #f = font.SysFont('ParaAminobenzoic',120) textures = glGenTextures(2) glEnable(GL_BLEND); glEnable(GL_DEPTH_TEST); glEnable(GL_CULL_FACE); glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA); for i in range(len(textures)): glActiveTexture(GL_TEXTURE0 + i) glBindTexture(GL_TEXTURE_2D, textures[i]) glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR ); glTexParameteri ( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR ); slots.append(TextSlot(text_program,i,textures[i])) slots[0].set_text('hello hello') slots[1].set_text('robo robo') stamp = time.time() ready, _, _ = select.select([sys.stdin], [], [], 0) fc = FutureClient(name='TV client') fc.available_games = [] fc.message_slots = slots fc.start() try: while len(ready) == 0: draw(text_program, width, height) eglSwapBuffers(display, surface) ready, _, _ = select.select([sys.stdin], [], [], 0) finally: fc.quit() <file_sep> // mapping for keypresses for illuminated buttons const int ILLUM_COUNT = 25; int illum_keys[ILLUM_COUNT] = { 8, 4, 5, 11, 6, 9, 32, 10, 16, 15, 14, 17, 0, 13, 18, 12, 19, 26, 27, 24, 23, 25, 22, 21, 20 }; void setup() { Serial.begin(19200); for (int i =0; i < ILLUM_COUNT; i++) { pinMode(illum_keys[i],INPUT); } } void readKeys() { for (int i =0; i < ILLUM_COUNT; i++) { Serial.print((digitalRead(illum_keys[i]) == HIGH)?"1":"0"); } Serial.println(); } void exec(char *buf) { char cmd = buf[0]; switch(cmd) { case 'I': Serial.println("teensypp"); break; case 'r': readKeys(); break; } } char buf[20]; int bidx = 0; void loop() { int r; while ((r=Serial.read()) != -1) { if (r == '\n' || r == '\r') { buf[bidx] = 0; exec(buf); bidx = 0; } else { buf[bidx++] = r; if (bidx > 19) bidx = 19; } } } <file_sep>#!/usr/bin/python -u from tornado.ioloop import IOLoop, PeriodicCallback import tornado.web from tornado import websocket import json import time import random import ScoreTower as tower from os import getenv import threading import logging logging.basicConfig(filename='/var/log/fc/server.log',format='%(asctime)s %(levelname)s:%(message)s',level=logging.DEBUG) gpio_avail = True try: import RPi.GPIO as GPIO except: gpio_avail = False START_GPIO=23 ABORT_GPIO=24 portNum = getenv('SERVER_PORT',2600) class Session: '''A 'session' is a set of games played in sequence-- basically, what the players think of as a complete 'game'. There is a single score for the entire session.''' def __init__(self): self.reset_values() def reset_values(self): self.score = 0 self.state = None self.pos_score = 0 self.neg_score = 0 self.pos_threshold = 50; self.neg_threshold = -50; def start(self): self.reset_values() self.state = 'running' tower.queue_session_begin() for console in Console.consoles.copy(): console.send_session('starting','Future Crew is Go!', self.score) def abort(self): self.session_done(False) def session_done(self,won): logging.info("Game is won: {}".format(won)) self.state = None if won: cmd = 'won' msg = 'Game is won!!!' tower.queue_session_won() # blink won sequence, return to attract else: cmd = 'lost' msg = 'Game is lost!!!' tower.queue_session_lost() # blink lost sequence, return to attract for console in Console.consoles.copy(): console.send_session(cmd, msg, self.score) def game_done(self,won,score,player_console): self.score += score if score > 0: self.pos_score += score else: self.neg_score += score if self.pos_score > self.pos_threshold: self.session_done(True) elif self.neg_score < self.neg_threshold: self.session_done(False) else: n = player_console.name if won: msg = n + ":SUCCESS" else: msg = n + ":FAIL" for console in Console.consoles.copy(): console.send_announcement(n, score, self.score, msg) def heartbeat(self): if self.state == 'running': for c in list(Console.consoles): if c.wants_game(): c.make_new_game() session = Session() class Game: '''A 'game' is a message displayed on one console, and a set of actions performed on a (usually different) console to 'win' the game. Games can time out or be cancelled.''' # maintain map of running games games = {} def __init__(self,message_console,slot_id,play_console,msg): self.message_console = message_console self.play_console = play_console # the console playing the game self.msg = msg self.slot_id = slot_id self.id = (play_console,msg['gameid']) Game.games[self.id] = self def start(self): logging.info(": starting game, {0}:{1} --> {2}:{3}".format( self.play_console.name, self.id[1], self.message_console.name, self.slot_id)) self.play_console.send_control(self.msg,'start') def resolve(self,won,score,resultmsg): # self.play_console.name returns the name of the console. session.game_done(won,score,self.play_console) if won: tower.queue_game_hit(self.play_console.name, session.score) logging.info("+ Game {0} won, {1} points".format(self.id[1],score)) else: tower.queue_game_miss(self.play_console.name, session.score) logging.info("- Game {0} lost, {1} points".format(self.id[1],score)) def send_messages_run(): self.message_console.send_message(resultmsg,self.slot_id) time.sleep(1.5) self.message_console.send_message(None,self.slot_id) t = threading.Thread(target=send_messages_run) t.start() def handle_game_update(self,update): if update['running']: # all is well, just status if update.has_key('message'): self.message_console.send_message(update['message'],self.slot_id) return else: won = update['result'] score = update.get('score',0) resultmsg = update.get('message',None) if not resultmsg: if won: resultmsg = 'SUCCESS' else: resultmsg = 'FAILURE' self.resolve(won,score,resultmsg) del Game.games[(self.play_console,update['gameid'])] class Console: consoles = set() def __init__(self,name,socket): self.name = name self.socket = socket self.timestamp = time.time() Console.consoles.add(self) logging.info("+ Added {0} console".format(self.name)) self.avail_slots = [] self.avail_games = [] self.queued_message = None self.bored = False self.last_game_start = time.time() def send_message(self,message,slot): m_msg = { 'a' : 'message', 'text' : message, 'slotid' : slot } try: self.socket.write_message(json.dumps(m_msg)) except: logging.error("Can't send message; possible that client has dropped!") def send_announcement(self,name,game_score,score,message): m_msg = { 'a' : 'announcement', 'message' : message, 'name': name, 'score': score, 'game_score': game_score } try: self.socket.write_message(json.dumps(m_msg)) except: logging.error("Can't send announcement; possible that client has dropped!") def send_session(self,state,message,score): s_msg = { 'a':'session_update', 'state':state, 'message':message, 'score':score } try: self.socket.write_message(json.dumps(s_msg)) except: logging.error("Can't send message; possible that client has dropped!") def send_control(self,game,operation): p_msg = { 'a' : 'control', 'game' : game, 'operation' : operation } try: self.socket.write_message(json.dumps(p_msg)) except: logging.error("Can't send control; possible that client has dropped!") def remove(self): Console.consoles.remove(self) logging.error("- Removed {0} console".format(self.name)) def wants_game(self): return self.bored and len(self.avail_games) > 0 def has_slot(self): return self.queued_message == None and len(self.avail_slots) > 0 def handle_status(self,msg): self.timestamp = time.time() self.avail_slots = msg.get('avail_slots',[]) self.avail_games = msg.get('avail_games',[]) self.bored = msg.get('bored',False) def make_new_game(self): # minimum interval between games: 2 seconds if (time.time() - self.last_game_start) < 2.0: return False slotavail = [x for x in Console.consoles if x.has_slot()] if not slotavail: # print("... Not enough message slots for bored client") pass else: self.last_game_start = time.time() game = random.choice(self.avail_games) logging.debug("Chose game {} from {}".format(game['gameid'],[g['gameid'] for g in self.avail_games])) messenger = random.choice(slotavail) slot = random.choice(messenger.avail_slots) if game.get('short',False) and slot.get('slow',False): # no short games on slow consoles return False self.bored = False messenger.avail_slots = [x for x in messenger.avail_slots if x != slot] self.avail_games = [x for x in self.avail_games if x != game] slotid = slot['id'] g = Game(messenger,slotid,self,game) g.start() return True return False class SpaceteamSocket(websocket.WebSocketHandler): def open(self): self.console = None self.cmdmap = { 'register': self.on_register, 'status': self.on_status, 'update': self.on_update, 'session_start': self.on_start, 'session_abort': self.on_abort } def on_register(self, message): name = message['name'] self.console = Console(name, self) def on_message(self, message): command = json.loads(message) self.cmdmap[command['a']](command) def on_status(self, message): self.console.handle_status(message) def on_abort(self, message): logging.info("** Aborting game") session.abort() def on_start(self, message): logging.info("** Starting game") session.start() def on_update(self, msg): try: Game.games[(self.console,msg['gameid'])].handle_game_update(msg) except KeyError: logging.error("Update message sent for obsolete game {0}".format(msg['gameid'])) def on_close(self): if self.console: self.console.socket = None try: self.console.remove() except KeyError: # may already have been removed by timeout pass application = tornado.web.Application([ (r"/socket", SpaceteamSocket), ]) TIMEOUT = 10.0 def heartbeat(): # check for client timeouts timestamp = time.time() for console in Console.consoles.copy(): if (timestamp - console.timestamp) > TIMEOUT: logging.error("* Console {0} timed out; closing socket".format(console.name)) console.socket.close() console.remove() session.heartbeat() if gpio_avail: if GPIO.input(START_GPIO) == GPIO.LOW: if not session.state: logging.info("Start button pressed!") session.start() if GPIO.input(ABORT_GPIO) == GPIO.LOW: if session.state == 'running': logging.info("Abort button pressed!") session.abort() if __name__ == "__main__": try: # Try for first ACM serial port; assume that's the # tower (tower has no recognition protocol) import glob ports = glob.glob('/dev/ttyACM*') tower.init(ports[0]) tower.queue_attract() except: logging.error("Could not contact LED tower.") if gpio_avail: logging.info("Scanning GPIO start/abort buttons.") GPIO.setmode(GPIO.BCM) GPIO.setup(START_GPIO, GPIO.IN, pull_up_down=GPIO.PUD_UP) GPIO.setup(ABORT_GPIO, GPIO.IN, pull_up_down=GPIO.PUD_UP) else: logging.error("GPIO disabled.") application.listen(portNum, '0.0.0.0') logging.info("FC server starting; listening on port {0}.".format(portNum)) pc = PeriodicCallback(heartbeat,100,IOLoop.instance()) pc.start() IOLoop.instance().start() tower.shutdown() if gpio_avail: GPIO.cleanup() <file_sep># class for a Matrix Orbital LCD display on a serial port # # Manual for Matrix Orbital LCD2041: # http://www.matrixorbital.ca/manuals/LCDVFD_Series/LCD2041/LCD2041.pdf # # call update() frequently if you want to use time-based stuff like blink import serial import time class MatrixOrbitalLCD(object): # default tty '/dev/ttyAMA0' is the built-in 3.3V serial port on the Pi's GPIO header. # Connect Pi GND to LCD Gnd and Pi Tx to LCD Rx. Don't connect Pi Rx to LCD Tx. # It seems to work fine driving the 5V LCD serial input with no 3.3->5V level converter. # Since we don't care about receiving from LCD, no need to make a level converter for the other direction # default baud rate 19200 is the LCD's default baud rate. def __init__(self, tty='/dev/ttyAMA0', baud=19200): self.port = serial.Serial(tty, baud) self.t = time.time() self.dt = 0 self.blinkDuration = 0 # backlight blink duration in seconds self.blinkTime = 0 # next blink time self.blinkEndTime = 0 self.backlightState = True self.NUM_GPO = 3 self.gpoBlinkDuration = ['X', 0, 0, 0] # GPO outputs are numbered 1, 2, 3 self.gpoBlinkTime = ['X', 0, 0, 0] self.gpoBlinkEndTime = ['X', 0, 0, 0] self.gpoState = ['X', False, False, False] # various config stuff ---- # auto scroll True / False def autoScroll(self, flag): if (flag): self.writeBytes([254,81]) else: self.writeBytes([254,82]) # word wrap True / False def autoWrap(self, flag): if (flag): self.writeBytes([254,67]) else: self.writeBytes([254,68]) # move cursor to X/Y def cursorPos(self, x, y): self.writeBytes([254,71,x,y]) # simple print / println def lcdprint(self, text): self.port.write(text) def lcdprintln(self, text): self.lcdprint(text + chr(10)+chr(13)) # clear screen def cls(self): self.port.write(chr(12)) # backlight on / off (True / False) also disables blink def backlight(self, flag=True, resetBlink=True): if (resetBlink): self.blinkDuration = 0 self.blinkTime = 0 self.blinkEndTime = 0 self.backlightState = flag if (flag): self.writeBytes([254,66,0]) else: self.writeBytes([254,70]) # blink backlight with on and off time = dur (in seconds) - 0 for no blink (light on) # stop blinking after ... 0 for don't stop def blink(self, dur, stopAfter=0): if (dur==0): self.backlight(True) self.blinkEndTime = 0 elif stopAfter > 0: self.blinkEndTime = self.t + stopAfter else: self.blinkEndTime = 0 if (dur != self.blinkDuration): self.blinkDuration = dur self.blinkTime = self.t # backlight brightness 0-255 def brightness(self, b): self.writeBytes([254,153,b]) # GPO outputs 1, 2, 3 ----- # set gpo startup state def gpoStartup(self, which, flag): self.gpoState[which] = flag if (flag): self.writeBytes([254, 195, which, 1]) else: self.writeBytes([254, 195, which, 0]) # set gpo current state def gpo(self, which, flag, resetBlink=True): if (resetBlink): self.gpoBlinkDuration[which] = 0 self.gpoBlinkTime[which] = 0 self.gpoBlinkEndTime[which] = 0 self.gpoState[which] = flag if (flag): self.writeBytes([254,87,which]) else: self.writeBytes([254,86,which]) # blink GPO with on and off time = dur (in seconds) - 0 for no blink (light off) # stop blinking after ... 0 for don't stop def gpoBlink(self, which, dur, stopAfter=0): if (dur==0): self.gpo(which, False) self.gpoBlinkEndTime[which] = 0 elif stopAfter > 0: self.gpoBlinkEndTime[which] = self.t + stopAfter else: self.gpoBlinkEndTime[which] = 0 if (dur != self.gpoBlinkDuration[which]): self.gpoBlinkDuration[which] = dur self.gpoBlinkTime[which] = self.t # write an array or list of bytes def writeBytes(self, bs): for b in bs: self.port.write(chr(b)) # call update() frequently if you want to use time-based stuff like blink def update(self): t = time.time() self.dt = t - self.t self.t = t #print "dur:" + str(self.blinkDuration) + " endAt:" + str(self.blinkEndTime) + " light:" + str(self.backlightState) if (self.blinkDuration > 0 and self.blinkEndTime > 0 and t > self.blinkEndTime): self.blink(0) if (self.blinkDuration > 0 and t > self.blinkTime): self.backlight(not self.backlightState, resetBlink=False) self.blinkTime = t + self.blinkDuration for i in range(1,self.NUM_GPO+1): if (self.gpoBlinkDuration[i] > 0 and self.gpoBlinkEndTime[i] > 0 and t > self.gpoBlinkEndTime[i]): self.gpoBlink(i, 0) if (self.gpoBlinkDuration[i] > 0 and t > self.gpoBlinkTime[i]): self.gpo(i, not self.gpoState[i], resetBlink=False) self.gpoBlinkTime[i] = t + self.gpoBlinkDuration[i] <file_sep>void setup() { for (int i =0; i < 11; i++) { pinMode(i, INPUT_PULLUP); } Serial.begin(19200); Serial.println("initialized serial"); analogReadRes(10); analogReadAveraging(5); analogReference(DEFAULT); } void readValues() { for (int i =0; i < 11; i++) { int j = analogRead(i); if (digitalRead(i) == LOW) { Serial.print("0/"); } else { Serial.print("1/"); } Serial.print(j); Serial.print(" "); } Serial.println(); } void exec(char *buf) { char cmd = buf[0]; switch(cmd) { case 'I': Serial.println("teensy3"); break; case 'r': readValues(); break; } } const int BUFSZ = 500; char buf[BUFSZ]; int bidx = 0; void loop() { int r; while ((r=Serial.read()) != -1) { if (r == '\n' || r == '\r') { buf[bidx] = 0; exec(buf); bidx = 0; } else { buf[bidx++] = r; if (bidx > BUFSZ-1) bidx = BUFSZ-1; } } } <file_sep>#!/usr/bin/python import subprocess from os.path import expanduser homepath = expanduser("~") def remote(ip, start, services): if start: c = "start" else: c = "stop" cmdstr = '; '.join(["sudo /etc/init.d/{} {}".format(x,c) for x in services]) l = ['ssh','-i',homepath+'/pi-key','pi@{}'.format(ip),cmdstr] process = subprocess.Popen(l) process.communicate() nodes={'videdit':('192.168.1.98',["invasion","VidEditConsole"]), 'patch':('192.168.1.97',["invasion","PatchConsole"]), 'piano':('192.168.1.96',["ToyPianoConsole"]), 'teletype':('192.168.1.95',["TeletypeConsole"]), 'rotary':('192.168.1.94',["RotaryConsole"]), 'server':('192.168.1.99',["FCServer"]) } import sys if __name__=='__main__': enable = True for arg in sys.argv: if arg == '-e': enable=True elif arg == '-d': enable=False elif arg == '--off': for (ip,services) in nodes.values(): remote(ip,False,services) elif arg == '--on': for (ip,services) in nodes.values(): remote(ip,True,services) else: if arg in nodes: (ip,services) = nodes[arg] remote(ip,enable,services) <file_sep>buttons = [ 'CRUMPET', 'GOGGLES', 'ROSEBUD', 'PARASOL', 'MAKER', 'JOKER', 'ALPHA', 'BRAVO', 'ROMEO', 'ECHO', 'FOXTROT', 'PERU', 'DELTA', 'NEXUS', 'TANGO', 'VICTOR', 'STARFISH', 'PRIME', 'PRISM', 'POLARIS', 'NEBULA', 'FALCON', 'PELICAN', 'HALO', 'PHANTOM', 'RAPTOR', 'CITADEL', 'TARDIS', 'ENIGMA', 'ZINGER', 'YODEL', 'TWINKIE', 'THOR', 'ATHENA', 'HELIOS', 'OMEGA', 'THETA', 'EPSILON', 'DAKOTA', 'SPARROW', 'PROTON', 'MIRROR', ]; old_buttons = { 9: 'RESET', 8: 'push key punch', 7: 'set to OPTIMA', 6: 'spend a pound', 5: 'LIMIT MINDER', 4: 'engage videoscan', 3: 'set to null', 2: 'quote it', 1: 'go to WARP SPEED', 0: 'TRANSMIT', }; """ Spacebar as "TRANSMIT" Command is "Transmit code word CRUMPET" codewords: CRUMPET GOGGLES ROSEBUD PARASOL MAKER JOKER ALPHA BRAVO ROMEO ECHO FOXTROT PERU DELTA NOVEMBER TANGO VICTOR STARFISH PRIME PRISM POLARIS NEBULA FALCON PELICAN HALO PHANTOM RAPTOR CITADEL TARDIS ENIGMA ZINGER YODEL TWINKIE THOR ATHENA HELIOS OMEGA THETA EPSILON DAKOTA SPARROW PROTON MIRROR """ <file_sep>import serial import time import struct import threading import random from patches import * port = serial.Serial("/dev/tty.usbmodem12341", timeout=3) oldsw = 0 while 1: keys = port.readline().strip() if not keys: next #print keys cons = keys.split(' ') sw = int(cons[0], 16) sw &= ~0x80; # switch 8 is flaky for i in range(0,8): val = (sw >> i) & 1 if ((oldsw >> i) & 1) ^ val: print "Switch: ", switches[i], 'ON' if val else 'OFF' oldsw = sw for con in cons[1:]: fromto = con.split(':') #print fromto[0], '=>', fromto[1] print "Patched: ", \ patches[fromto[0]], \ " to ", \ patches[fromto[1]] <file_sep>#!/bin/sh APP_NAME="invasion" APP_PATH="/home/pi/Future-Crew/consoles/VidEditConsole" APP_USER=pi case "$1" in start) echo -n "Starting $APP_NAME..." start-stop-daemon --start \ --background \ --pidfile /tmp/$APP_NAME.pid \ --chuid $APP_USER \ --chdir $APP_PATH \ --exec invasion.sh echo "$APP_NAME now running." ;; stop) echo -n "Stopping $APP_NAME..." start-stop-daemon -o --stop --pidfile /tmp/$APP_NAME.pid echo "stopped" ;; force-reload|restart) $0 stop $0 start ;; *) echo "Use: /etc/init.d/invasion {start|stop|restart|force-reload}" exit 1 ;; esac exit 0 <file_sep>#!/bin/bash for node in {94..99}; do echo "Updating Future-Crew on 192.168.1.$node" ssh -i ~/pi-key [email protected].$node "cd Future-Crew; git pull" done <file_sep><pre> _______________________ ( Video Editing Console ) ----------------------- \ ^__^ \ (oo)\_______ (__)\ )\/\ ||----w | || || </pre> drawing.png has a guide to all the various switches and levers on the console. panel_teensy is the teensy2.0 sketch for the electronics driving the console. Panel Protocol ============== Commands consist of one byte and then a variable-length payload. 0x00 - reserved 0x01 - set LED. Followed by a one-byte index, and then a one-byte mode. (See below for modes.) 0x02 - set illuminated switch. Followed by a one-byte index, and then a one-byte mode. (See below for modes.) 0x03 - read illuminated switches. Returns a four-byte bitmap in big-endian order with 1s representing currently pressed switches. 0x04 - read non-illuminated buttons. Returns a five-byte bitmap in big-endian order with 1s representing currently pressed switches. 0x05 - read pushswitches. NYD. 0x06 - read encoders. NYD. Installation ============ Install the invasion (display) script: sudo cp invasion.init.d /etc/init.d/invasion sudo update-rc.d invasion defaults 99 Install the VidEditConsole (game) script: sudo cp VidEditConsole.init.d /etc/init.d/VidEditConsole sudo update-rc.d VidEditConsole defaults 99 Reboot: sudo reboot <file_sep>#include <SPI.h> #include <stdint.h> #include <avr/interrupt.h> const int PANEL_R = 4; const int PANEL_G = 5; const int PANEL_B = 6; const int L_COUNT = 3*16; const int LATCH_PIN = 18; enum LEDState { L_OFF =0, L_ON =1, L_FLASH_A =2, L_FLASH_B =3, L_FLICKER =4, L_LAST }; const uint8_t ILLUMINATED_COUNT = 25; const uint8_t illuminated_mapping[ILLUMINATED_COUNT] = { 31, 30, 29, 28, 27, 26, 25, 24, 23, 21, 19, 17, 15, 22, 20, 18, 16, 14, 13, 12, 11, 10, 9, 8, 7 }; const uint8_t LED_COUNT = 12; const uint8_t led_mapping[LED_COUNT] = { 36, 38, 39, 40, 43, 45, 46, 47, 41, 42, 44, 37 }; class LEDMap { private: uint8_t l[L_COUNT]; public: void clear(); void setRaw(int idx, LEDState value); void setLED(int idx, LEDState value); void setIlluminated(int idx, LEDState value); void show(int cycle); }; void LEDMap::clear() { for (int i = 0; i < L_COUNT; i++) l[i] = L_OFF; } void LEDMap::setRaw(int idx, LEDState value) { l[idx] = value; } void LEDMap::setLED(int idx, LEDState value) { l[led_mapping[idx]] = value; } void LEDMap::setIlluminated(int idx, LEDState value) { l[illuminated_mapping[idx]] = value; } void LEDMap::show(int cycle) { digitalWrite(LATCH_PIN,LOW); for (int8_t i = 0; i < L_COUNT; ) { uint8_t b = 0; for (int8_t j = 0; j < 8; j++, i++) { b <<= 1; if (i < L_COUNT) { const uint8_t s = l[i]; if (s == L_ON) b |= 0x01; else if (s == L_FLASH_A && cycle < 128) b |= 0x01; else if (s == L_FLASH_B && cycle >= 128) b |= 0x01; else if (s == L_FLICKER && (cycle % 32) < 17) b |= 0x01; // handle other modes } } SPI.transfer(b); } digitalWrite(LATCH_PIN,HIGH); } LEDMap l; void setup() { l.clear(); pinMode(LATCH_PIN,OUTPUT); digitalWrite(LATCH_PIN,HIGH); // initialize SPI: SPI.begin(); SPI.setClockDivider(SPI_CLOCK_DIV32); Serial.begin(9600); Serial1.begin(19200); // Init timer3: Fast PWM mode, 10-bit (0111) TCCR3A = 0x03; TCCR3B = 0x08 | 0x03; // cs = 3; 1/64 prescaler TCCR3C = 0x00; TIMSK3 = 0x01; // enable overflow interrupt pinMode(PANEL_R,OUTPUT); digitalWrite(PANEL_R,LOW); pinMode(PANEL_G,OUTPUT); digitalWrite(PANEL_G,LOW); pinMode(PANEL_B,OUTPUT); digitalWrite(PANEL_B,LOW); } int parse(char*& buf) { int rv = 0; while (*buf >= '0' && *buf <= '9') { rv *= 10; rv += *buf - '0'; buf++; } return rv; } int hex(char h) { if (h >= '0' && h <= '9') { return h-'0'; } if (h >= 'a' && h <= 'f') { return (h-'a')+10; } if (h >= 'A' && h <= 'F') { return (h-'A')+10; } return 0; } void exec(char *buf) { char cmd = buf[0]; switch(cmd) { case 'I': Serial.println("teensy"); break; case 'p': { char which = buf[1]; char mode = buf[2]; int val; if (mode == '+') { val = HIGH; } else if (mode == '-') { val = LOW; } else { break; } int pin; if (which == 'r') { pin = PANEL_R; } else if (which == 'g') { pin = PANEL_G; } else if (which == 'b') { pin = PANEL_B; } else { break; } digitalWrite(pin,val); } break; case 'r': Serial.println("Read on other teensy."); break; case 'm': { buf++; boolean escaped = false; while (*buf != '\0') { if (escaped) { if (*buf == 'n') Serial1.write('\n'); else if (*buf == 'r') Serial1.write('\r'); else if (*buf == 'x') { buf++; if (*buf == 0) break; int x = hex(*(buf++)); if (*buf == 0) break; x *= 16; x += hex(*buf); Serial1.write(x); } else Serial1.write(*buf); escaped = false; } else if (*buf == '\\') { escaped = true; } else { Serial1.write(*buf); } buf++; } } break; case 'l': { buf++; int i = parse(buf); if (*buf != ':') break; // abort buf++; int j = parse(buf); l.setLED(i,(LEDState)j); } break; case 'i': { buf++; int i = parse(buf); if (*buf != ':') break; // abort buf++; int j = parse(buf); l.setIlluminated(i,(LEDState)j); } break; } } const int BUFSZ = 500; int i = 0; char buf[BUFSZ]; int bidx = 0; void loop() { int r; while ((r=Serial.read()) != -1) { if (r == '\n' || r == '\r') { buf[bidx] = 0; exec(buf); bidx = 0; } else { buf[bidx++] = r; if (bidx > BUFSZ-1) bidx = BUFSZ-1; } } } volatile uint8_t cycle = 0; ISR(TIMER3_OVF_vect) { l.show(cycle++); }
eaed9a0342ca00c61538a70b5934e16a573e1c40
[ "Markdown", "Python", "C++", "Shell" ]
39
Python
nycresistor/Future-Crew
44aba1984d1a9f7e030a6fd32b7e9a920d08eed4
41bf1a4bfe3f25dd203b1b5525b86d0318ae2b30
refs/heads/master
<repo_name>3492PARTs/PARTs_WebAPI<file_sep>/scouting/migrations/0004_schedule_and_event_team_info.py # Generated by Django 4.0.3 on 2023-03-19 19:10 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('scouting', '0003_multiple_notifications'), ] operations = [ migrations.CreateModel( name='EventTeamInfo', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('matches_played', models.IntegerField()), ('qual_average', models.IntegerField()), ('losses', models.IntegerField()), ('wins', models.IntegerField()), ('ties', models.IntegerField()), ('rank', models.IntegerField()), ('dq', models.IntegerField()), ('void_ind', models.CharField(default='n', max_length=1)), ('event_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.event')), ('team_no', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.team')), ], options={ 'unique_together': {('event_id', 'team_no')}, }, ), migrations.CreateModel( name='Schedule', fields=[ ('sch_id', models.AutoField(primary_key=True, serialize=False)), ('st_time', models.DateTimeField()), ('end_time', models.DateTimeField()), ('notified', models.CharField(default='n', max_length=1)), ('void_ind', models.CharField(default='n', max_length=1)), ('event', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.event')), ], ), migrations.CreateModel( name='ScheduleType', fields=[ ('sch_typ', models.CharField(max_length=10, primary_key=True, serialize=False)), ('sch_nm', models.CharField(max_length=255)), ], ), migrations.DeleteModel( name='ScoutPitSchedule', ), migrations.AddField( model_name='schedule', name='sch_typ', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.scheduletype'), ), migrations.AddField( model_name='schedule', name='user', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL), ), ] <file_sep>/tba/views.py from django.shortcuts import render from rest_framework.views import APIView from rest_framework_simplejwt.authentication import JWTAuthentication from rest_framework.permissions import IsAuthenticated from api.api.tba.serializers import EventUpdatedSerializer from api.auth.security import ret_message # Create your views here. class EventScheduleUpdated(APIView): """API endpoint to receive a TBA webhook for event updated""" authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) def save_scout_schedule(self, serializer): """ if serializer.validated_data['st_time'] <= timezone.now(): return ret_message('Start time can\'t be in the past.', True, 'api/scoutAdmin/PostSaveScoutFieldScheduleEntry', self.request.user.id) """ if serializer.validated_data['end_time'] <= serializer.validated_data['st_time']: return ret_message('End time can\'t come before start.', True, 'api/scoutAdmin/PostSaveScoutFieldScheduleEntry', self.request.user.id) if serializer.validated_data.get('scout_field_sch_id', None) is None: serializer.save() return ret_message('Saved schedule entry successfully') else: sfs = ScoutFieldSchedule.objects.get( scout_field_sch_id=serializer.validated_data['scout_field_sch_id']) sfs.red_one = serializer.validated_data.get('red_one', None) sfs.red_two = serializer.validated_data.get('red_two', None) sfs.red_three = serializer.validated_data.get('red_three', None) sfs.blue_one = serializer.validated_data.get('blue_one', None) sfs.blue_two = serializer.validated_data.get('blue_two', None) sfs.blue_three = serializer.validated_data.get('blue_three', None) sfs.st_time = serializer.validated_data['st_time'] sfs.end_time = serializer.validated_data['end_time'] sfs.notified = 'n' sfs.void_ind = serializer.validated_data['void_ind'] sfs.save() return ret_message('Updated schedule entry successfully') def post(self, request, format=None): serializer = EventUpdatedSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, 'api/tba/EventScheduleUpdated', request.user.id, serializer.errors) if has_access(request.user.id, auth_obj): try: req = self.save_scout_schedule(serializer) return req except Exception as e: return ret_message('An error occurred while saving the schedule entry.', True, 'api/scoutAdmin/PostSaveScoutFieldScheduleEntry', request.user.id, e) else: return ret_message('You do not have access.', True, 'api/scoutAdmin/PostSaveScoutFieldScheduleEntry', request.user.id) <file_sep>/scouting/field/views.py from datetime import datetime from pytz import utc from rest_framework_simplejwt.authentication import JWTAuthentication from rest_framework.permissions import IsAuthenticated import form.util from form.models import Question, QuestionAnswer from scouting.models import Season, Event, Team, ScoutFieldSchedule, ScoutField, \ EventTeamInfo, Match from rest_framework.views import APIView from general.security import ret_message, has_access from .serializers import ScoutFieldSerializer, ScoutFieldResultsSerializer, SaveScoutFieldSerializer from django.db.models import Q from rest_framework.response import Response from django.utils import timezone from django.conf import settings auth_obj = 49 auth_view_obj = 52 app_url = 'scouting/field/' class Questions(APIView): """ API endpoint to get scout field inputs """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'get-questions/' def get_questions(self): try: current_season = Season.objects.get(current='y') except Exception as e: return ret_message('No season set, see an admin.', True, app_url + self.endpoint, self.request.user.id, e) scout_questions = form.util.get_questions('field') try: current_event = Event.objects.get( Q(season=current_season) & Q(current='y')) except Exception as e: return ret_message('No event set, see an admin.', True, app_url + self.endpoint, self.request.user.id, e) teams = [] teams = Team.objects.filter( event=current_event).order_by('team_no') sfss = ScoutFieldSchedule.objects.filter(Q(st_time__lte=timezone.now()) & Q( end_time__gte=timezone.now()) & Q(void_ind='n')) sfs = None for s in sfss: sfs = { 'scout_field_sch_id': s.scout_field_sch_id, 'event_id': s.event_id, 'st_time': s.st_time, 'end_time': s.end_time, 'red_one_id': s.red_one, 'red_two_id': s.red_two, 'red_three_id': s.red_three, 'blue_one_id': s.blue_one, 'blue_two_id': s.blue_two, 'blue_three_id': s.blue_three } matches = Match.objects.filter(Q(event=current_event) & Q(comp_level_id='qm') & Q(void_ind='n')) \ .order_by('match_number') parsed_matches = [] for m in matches: parsed_matches.append({ 'match_id': m.match_id, 'event_id': m.event.event_id, 'match_number': m.match_number, 'time': m.time, 'blue_one_id': m.blue_one.team_no if self.get_team_match_field_result(m, m.blue_one.team_no) is None else None, 'blue_two_id': m.blue_two.team_no if self.get_team_match_field_result(m, m.blue_two.team_no) is None else None, 'blue_three_id': m.blue_three.team_no if self.get_team_match_field_result(m, m.blue_three.team_no) is None else None, 'red_one_id': m.red_one.team_no if self.get_team_match_field_result(m, m.red_one.team_no) is None else None, 'red_two_id': m.red_two.team_no if self.get_team_match_field_result(m, m.red_two.team_no) is None else None, 'red_three_id': m.red_three.team_no if self.get_team_match_field_result(m, m.red_three.team_no) is None else None, }) return {'scoutQuestions': scout_questions, 'teams': teams, 'scoutFieldSchedule': sfs, 'matches': parsed_matches} def get_team_match_field_result(self, m, team): try: res = ScoutField.objects.filter(Q(match=m) & Q(team_no=team) & Q(void_ind='n')) if res.count() > 0: return res else: return None except: x = 9 return None def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: req = self.get_questions() if type(req) == Response: return req serializer = ScoutFieldSerializer(req) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while initializing.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) """ class SaveAnswers(APIView): "" API endpoint to save scout field answers "" authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'save-answers/' def save_answers(self, data): try: current_season = Season.objects.get(current='y') except Exception as e: return ret_message('No season set, see an admin.', True, app_url + self.endpoint, self.request.user.id, e) try: current_event = Event.objects.get( Q(season=current_season) & Q(current='y')) except Exception as e: return ret_message('No event set, see an admin', True, app_url + self.endpoint, self.request.user.id, e) sf = ScoutField( event=current_event, team_no_id=data['team'], match_id=data.get('match', None), user_id=self.request.user.id, void_ind='n') sf.save() for d in data['scoutQuestions']: form.util.save_question_answer(d.get('answer', ''), Question.objects.get(question_id=d['question_id']), scout_field=sf) return ret_message('Response saved successfully') def post(self, request, format=None): serializer = SaveScoutFieldSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, serializer.errors) if has_access(request.user.id, auth_obj): try: req = self.save_answers(serializer.data) return req except Exception as e: return ret_message('An error occurred while saving answers.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) """ class Results(APIView): """ API endpoint to get the results of field scouting """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'results/' def get_answers(self, team): return get_field_results(team, self.endpoint, self.request) def get(self, request, format=None): if has_access(request.user.id, auth_obj) or has_access(request.user.id, auth_view_obj): try: req = self.get_answers(request.query_params.get('team', None)) if type(req) == Response: return req serializer = ScoutFieldResultsSerializer(req) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while initializing.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) def get_field_results(team, endpoint, request): try: current_season = Season.objects.get(current='y') except Exception as e: return ret_message('No season set, see an admin.', True, app_url + endpoint, request.user.id, e) try: current_event = Event.objects.get( Q(season=current_season) & Q(current='y') & Q(void_ind='n')) except Exception as e: return ret_message('No event set, see an admin.', True, app_url + endpoint, request.user.id, e) scout_cols = [{ 'PropertyName': 'team', 'ColLabel': 'Team No', 'order': 0 }, { 'PropertyName': 'rank', 'ColLabel': 'Rank', 'order': 1 }, { 'PropertyName': 'match', 'ColLabel': 'Match', 'order': 1 }] scout_answers = [] sqsa = Question.objects.filter(Q(season=current_season) & Q(form_typ_id='field') & Q(form_sub_typ_id='auto') & Q(active='y') & Q(void_ind='n')).order_by('order') sqst = Question.objects.filter(Q(season=current_season) & Q(form_typ_id='field') & Q(form_sub_typ_id='teleop') & Q(active='y') & Q(void_ind='n')) \ .order_by('order') sqso = Question.objects.filter(Q(season=current_season) & Q(form_typ_id='field') & Q(form_sub_typ_id__isnull=True) & Q(active='y') & Q(void_ind='n')) \ .order_by('order') for sqs in [sqsa, sqst, sqso]: for sq in sqs: scout_cols.append({ 'PropertyName': 'ans' + str(sq.question_id), 'ColLabel': ('' if sq.form_sub_typ is None else sq.form_sub_typ.form_sub_typ[ 0:1].upper() + ': ') + sq.question, 'order': sq.order }) scout_cols.append({ 'PropertyName': 'user', 'ColLabel': 'Scout', 'order': 9999999999 }) scout_cols.append({ 'PropertyName': 'time', 'ColLabel': 'Time', 'order': 99999999999 }) if team is not None: # get result for individual team sfs = ScoutField.objects.filter(Q(event=current_event) & Q(team_no_id=team) & Q(void_ind='n')) \ .order_by('-time', '-scout_field_id') else: # get result for all teams if settings.DEBUG: sfs = ScoutField.objects.filter(Q(event=current_event) & Q( void_ind='n')).order_by('-time', '-scout_field_id')[: 10] else: sfs = ScoutField.objects.filter(Q(event=current_event) & Q( void_ind='n')).order_by('-time', '-scout_field_id') for sf in sfs: sfas = QuestionAnswer.objects.filter( Q(scout_field=sf) & Q(void_ind='n')) sa_obj = {} for sfa in sfas: sa_obj['ans' + str(sfa.question_id)] = sfa.answer sa_obj['match'] = sf.match.match_number if sf.match else None sa_obj['user'] = sf.user.first_name + ' ' + sf.user.last_name sa_obj['time'] = sf.time sa_obj['user_id'] = sf.user.id sa_obj['team'] = sf.team_no_id try: eti = EventTeamInfo.objects.get(Q(event=current_event) & Q(team_no=sf.team_no) & Q(void_ind='n')) sa_obj['rank'] = eti.rank except: x = 1 scout_answers.append(sa_obj) return {'scoutCols': scout_cols, 'scoutAnswers': scout_answers} <file_sep>/general/security.py from django.utils import timezone from admin.models import ErrorLog from user.serializers import RetMessageSerializer from rest_framework.response import Response from user.models import User def has_access(user_id, sec_permission): # how to use has_access(self.request.user.id, 36) prmsns = get_user_permissions(user_id) access = False for prmsn in prmsns: if prmsn.id == sec_permission: access = True break return access def get_user_permissions(user_id): user = User.objects.get(id=user_id) user_groups = user.groups.all() prmsns = [] for grp in user_groups: for prmsn in grp.permissions.all(): prmsns.append(prmsn) return prmsns def get_user_groups(user_id): user = User.objects.get(id=user_id) augs = user.groups.all() return augs def ret_message(message, error=False, path='', user_id=0, exception=None): # TODO Make all of these optional in the DB if error: user = User.objects.get(id=user_id) print('----------ERROR START----------') print('Error in: ' + path) print('Message: ' + message) print('Error by: ' + user.username + ' ' + user.first_name + ' ' + user.last_name) print('Exception: ') print(exception) print('----------ERROR END----------') try: ErrorLog(user=user, path=path, message=message, exception=exception, time=timezone.now(), void_ind='n').save() except Exception as e: message += "\nCritical Error: please email the team admin at <EMAIL>492<EMAIL>\nSend them this message:\n" message += e try: ErrorLog(user=0, path=path, message=message, exception=exception, time=timezone.now(), void_ind='n').save() except Exception as e: y = 9 return Response(RetMessageSerializer({'retMessage': message, 'error': error}).data) return Response(RetMessageSerializer({'retMessage': message, 'error': error}).data) <file_sep>/form/migrations/0002_rename_q_id_question_question_id_and_more.py # Generated by Django 4.2 on 2023-06-30 00:19 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('form', '0001_initial'), ] operations = [ migrations.RenameField( model_name='question', old_name='q_id', new_name='question_id', ), migrations.RenameField( model_name='questionanswer', old_name='q_id', new_name='question', ), migrations.RenameField( model_name='questionoption', old_name='q_id', new_name='question', ), migrations.RenameField( model_name='questionoption', old_name='q_opt_id', new_name='question_opt_id', ), ] <file_sep>/form/serializers.py from rest_framework import serializers class QuestionTypeSerializer(serializers.Serializer): question_typ = serializers.CharField() question_typ_nm = serializers.CharField() is_list = serializers.CharField() class FormTypeSerializer(serializers.Serializer): form_typ = serializers.CharField(read_only=True) form_nm = serializers.CharField() class FormSubTypeSerializer(serializers.Serializer): form_sub_typ = serializers.CharField() form_sub_nm = serializers.CharField() form_typ_id = serializers.CharField() class QuestionOptionsSerializer(serializers.Serializer): question_opt_id = serializers.IntegerField(required=False, allow_null=True) question_id = serializers.IntegerField(read_only=True) option = serializers.CharField() active = serializers.CharField() class QuestionSerializer(serializers.Serializer): question_id = serializers.IntegerField(required=False, allow_null=True) season_id = serializers.IntegerField(read_only=True) question = serializers.CharField() order = serializers.IntegerField() required = serializers.CharField(required=False, allow_blank=True, allow_null=True) active = serializers.CharField() question_typ = QuestionTypeSerializer() form_sub_typ = serializers.CharField(required=False, allow_blank=True, allow_null=True) form_sub_nm = serializers.CharField(required=False, allow_blank=True, allow_null=True) form_typ = serializers.CharField() display_value = serializers.CharField(read_only=True) questionoption_set = QuestionOptionsSerializer( required=False, allow_null=True, many=True) answer = serializers.CharField(required=False, allow_null=True, allow_blank=True) class SaveScoutSerializer(serializers.Serializer): question_answers = QuestionSerializer(many=True) team = serializers.CharField() match = serializers.CharField(required=False) form_typ = serializers.CharField() class TeamSerializer(serializers.Serializer): team_no = serializers.IntegerField() team_nm = serializers.CharField() checked = serializers.BooleanField(required=False) class SaveResponseSerializer(serializers.Serializer): question_answers = QuestionSerializer(many=True) form_typ = serializers.CharField() class QuestionInitializationSerializer(serializers.Serializer): questions = QuestionSerializer(many=True) question_types = QuestionTypeSerializer(many=True) form_sub_types = FormSubTypeSerializer(many=True, required=False)<file_sep>/admin/models.py from django.db import models from user.models import User class ErrorLog(models.Model): error_log_id = models.AutoField(primary_key=True) user = models.ForeignKey(User, models.PROTECT) path = models.CharField(max_length=255, blank=True, null=True) message = models.CharField(max_length=1000, blank=True, null=True) exception = models.CharField(max_length=4000, blank=True, null=True) time = models.DateTimeField() void_ind = models.CharField(max_length=1, default='n') def __str__(self): return str(self.error_log_id) + ' user: ' + self.user.first_name + ' ' + self.user.last_name + \ ' location: ' + self.path + ' msg: ' + \ self.message + ' exc: ' + self.exception + ' time: ' + self.time <file_sep>/sponsoring/migrations/0003_historicalitem_img_id_historicalitem_img_ver_and_more.py # Generated by Django 4.2 on 2023-07-24 00:55 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('sponsoring', '0002_historicalitem_active_historicalitem_reset_date_and_more'), ] operations = [ migrations.AddField( model_name='historicalitem', name='img_id', field=models.CharField(blank=True, max_length=500, null=True), ), migrations.AddField( model_name='historicalitem', name='img_ver', field=models.CharField(blank=True, max_length=500, null=True), ), migrations.AddField( model_name='item', name='img_id', field=models.CharField(blank=True, max_length=500, null=True), ), migrations.AddField( model_name='item', name='img_ver', field=models.CharField(blank=True, max_length=500, null=True), ), ] <file_sep>/form/util.py from django.db.models import Q from django.db.models.functions import Lower from form.models import Question, Response, QuestionAnswer, QuestionOption, FormSubType, QuestionType from form.serializers import QuestionSerializer from general.security import ret_message from scouting.models import Season, ScoutField, ScoutPit, Event def get_questions(form_typ: str): questions = [] qs = Question.objects.prefetch_related('questionoption_set').filter( Q(form_typ_id=form_typ) & Q(void_ind='n')).order_by('form_sub_typ__order', 'order') if form_typ == 'field' or form_typ == 'pit': current_season = Season.objects.get(current='y') qs.filter(Q(season=current_season)) for q in qs: questions.append({ 'question_id': q.question_id, 'season_id': q.season_id, 'question': q.question, 'order': q.order, 'required': q.required, 'active': q.active, 'question_typ': q.question_typ, 'form_sub_typ': q.form_sub_typ.form_sub_typ if q.form_sub_typ is not None else None, 'form_sub_nm': q.form_sub_typ.form_sub_nm if q.form_sub_typ is not None else None, 'form_typ': q.form_typ, 'questionoption_set': q.questionoption_set, 'display_value': ('' if q.active == 'y' else 'Deactivated: ') + 'Order ' + str(q.order) + ': ' + (q.form_sub_typ.form_sub_nm + ': ' if q.form_sub_typ is not None else '') + q.question }) return questions def get_question_types(): question_types = QuestionType.objects.filter(void_ind='n').order_by(Lower('question_typ_nm')) return question_types def get_form_sub_types(form_typ: str): sub_types = FormSubType.objects.filter(form_typ=form_typ).order_by('order', Lower('form_sub_nm')) return sub_types def save_question(question): required = question.get('required', 'n') required = required if required != '' else 'n' if question.get('question_id', None) is not None: q = Question.objects.get(question_id=question['question_id']) q.question = question['question'] q.question_typ_id = question['question_typ']['question_typ'] q.form_sub_typ_id = question.get('form_sub_typ', None) q.order = question['order'] q.required = required q.active = question['active'] else: q = Question(question_typ_id=question['question_typ']['question_typ'], form_typ_id=question['form_typ'], form_sub_typ_id=question.get('form_sub_typ', None), question=question['question'], order=question['order'], active=question['active'], required=required, void_ind='n') if question['form_typ'] in ['pit', 'field']: if q.season is None: try: current_season = Season.objects.get(current='y') q.season = current_season except Exception as e: raise Exception('No season set, see an admin.') q.save() # If adding a new question we need to make a null answer for it for all questions already answered match question['form_typ']: case 'pit': questions_answered = ScoutPit.objects.filter(Q(void_ind='n') & Q(event__in=Event.objects.filter(Q(void_ind='n') & Q(season=current_season) ))) for qa in questions_answered: QuestionAnswer(scout_pit=qa, question=q, answer='!EXIST', void_ind='n').save() case 'field': questions_answered = ScoutField.objects.filter(Q(void_ind='n') & Q(event__in=Event.objects.filter(Q(void_ind='n') & Q(season=current_season) ))) for qa in questions_answered: QuestionAnswer(scout_field=qa, question=q, answer='!EXIST', void_ind='n').save() case _: questions_answered = Response.objects.filter(Q(void_ind='n') & Q(form_typ_id=question['form_typ'])) for qa in questions_answered: QuestionAnswer(response=qa, question=q, answer='!EXIST', void_ind='n').save() if question['question_typ']['is_list'] == 'y' and len(question.get('questionoption_set', [])) <= 0: raise Exception('Select questions must have options.') for op in question.get('questionoption_set', []): if op.get('question_opt_id', None) is not None: qop = QuestionOption.objects.get(question_opt_id=op['question_opt_id']) qop.option = op['option'] qop.active = op['active'] qop.save() else: QuestionOption(option=op['option'], question=q, active=op['active'], void_ind='n').save() def save_question_answer(answer: str, question: Question, scout_field: ScoutField = None, scout_pit: ScoutPit = None, response: Response = None): qa = QuestionAnswer(question=question, answer=answer, scout_field=scout_field, scout_pit=scout_pit, response=response, void_ind='n') qa.save() return qa <file_sep>/scouting/migrations/0011_alter_scoutquestion_void_ind.py # Generated by Django 4.0.3 on 2023-04-29 16:52 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('scouting', '0010_scoutfield_match'), ] operations = [ migrations.AlterField( model_name='scoutquestion', name='void_ind', field=models.CharField(default='n', max_length=1), ), ] <file_sep>/user/migrations/0001_initial.py # Generated by Django 4.0.3 on 2022-04-06 06:40 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ('auth', '0012_alter_user_first_name_max_length'), ] operations = [ migrations.CreateModel( name='PhoneType', fields=[ ('phone_type_id', models.AutoField(primary_key=True, serialize=False)), ('carrier', models.CharField(max_length=255)), ('phone_type', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='UserLinks', fields=[ ('user_links_id', models.AutoField(primary_key=True, serialize=False)), ('menu_name', models.CharField(max_length=255)), ('routerlink', models.CharField(max_length=255)), ('order', models.IntegerField()), ('permission', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='auth.permission')), ], ), migrations.CreateModel( name='User', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('password', models.CharField(max_length=128, verbose_name='password')), ('email', models.EmailField(max_length=60, unique=True)), ('username', models.CharField(max_length=30, unique=True)), ('date_joined', models.DateTimeField(auto_now_add=True)), ('last_login', models.DateTimeField(default=None)), ('is_active', models.BooleanField(default=False)), ('is_superuser', models.BooleanField(default=False)), ('first_name', models.CharField(max_length=50)), ('last_name', models.CharField(max_length=50)), ('phone', models.CharField(blank=True, max_length=10, null=True)), ('phone_type', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='user.phonetype')), ], options={ 'abstract': False, }, ), ] <file_sep>/tba/urls.py from django.urls import path from .views import * # Wire up our API using atomic URL routing. # Additionally, we include login URLs for the browsable API. urlpatterns = [ #path('GetInit/', GetInit.as_view()) ] <file_sep>/sponsoring/serializers.py from rest_framework import serializers class SaveItemSerializer(serializers.Serializer): item_id = serializers.IntegerField(required=False) item_nm = serializers.CharField() item_desc = serializers.CharField() quantity = serializers.IntegerField() reset_date = serializers.DateField() active = serializers.CharField() img = serializers.FileField(required=False) class ItemSerializer(serializers.Serializer): item_id = serializers.IntegerField(required=False) item_nm = serializers.CharField() item_desc = serializers.CharField() quantity = serializers.IntegerField() reset_date = serializers.DateField() sponsor_quantity = serializers.IntegerField(required=False) active = serializers.CharField() img_url = serializers.CharField() class SponsorSerializer(serializers.Serializer): sponsor_id = serializers.IntegerField(required=False) sponsor_nm = serializers.CharField() phone = serializers.CharField() email = serializers.CharField() class ItemSponsorSerializer(serializers.Serializer): item_sponsor_id = serializers.IntegerField(required=False) item_id = serializers.IntegerField sponsor_id = serializers.IntegerField quantity = serializers.IntegerField class SaveSponsorOrderSerializer(serializers.Serializer): items = ItemSerializer(many=True) sponsor = SponsorSerializer() <file_sep>/scouting/pit/apps.py from django.apps import AppConfig class ScoutpitConfig(AppConfig): name = 'scoutPit' <file_sep>/form/migrations/0007_rename_type_formtype.py # Generated by Django 4.2 on 2023-07-14 00:04 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('form', '0006_question_required'), ] operations = [ migrations.RenameModel( old_name='Type', new_name='FormType', ), ] <file_sep>/scouting/migrations/0001_initial.py # Generated by Django 4.0.3 on 2022-04-06 06:40 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='CompetitionLevel', fields=[ ('comp_lvl_typ', models.CharField(max_length=50, primary_key=True, serialize=False)), ('comp_lvl_typ_nm', models.CharField(max_length=255)), ('comp_lvl_order', models.IntegerField()), ('void_ind', models.CharField(default='n', max_length=1)), ], ), migrations.CreateModel( name='Event', fields=[ ('event_id', models.AutoField(primary_key=True, serialize=False)), ('event_nm', models.CharField(max_length=255)), ('date_st', models.DateTimeField()), ('event_cd', models.CharField(max_length=10, unique=True)), ('event_url', models.CharField(max_length=255, null=True)), ('address', models.CharField(max_length=255, null=True)), ('city', models.CharField(max_length=255, null=True)), ('state_prov', models.CharField(max_length=255, null=True)), ('postal_code', models.CharField(max_length=255, null=True)), ('location_name', models.CharField(max_length=255, null=True)), ('gmaps_url', models.CharField(max_length=255, null=True)), ('webcast_url', models.CharField(max_length=255, null=True)), ('date_end', models.DateTimeField()), ('timezone', models.CharField(max_length=255, null=True)), ('current', models.CharField(default='n', max_length=1)), ('competition_page_active', models.CharField(default='n', max_length=1)), ('void_ind', models.CharField(default='n', max_length=1)), ], ), migrations.CreateModel( name='Match', fields=[ ('match_id', models.CharField(max_length=50, primary_key=True, serialize=False)), ('match_number', models.IntegerField()), ('red_score', models.IntegerField(null=True)), ('blue_score', models.IntegerField(null=True)), ('time', models.DateTimeField(null=True)), ('void_ind', models.CharField(default='n', max_length=1)), ], ), migrations.CreateModel( name='QuestionOptions', fields=[ ('q_opt_id', models.AutoField(primary_key=True, serialize=False)), ('option', models.CharField(max_length=255)), ('active', models.CharField(blank=True, max_length=1, null=True)), ('void_ind', models.CharField(default='n', max_length=1)), ], ), migrations.CreateModel( name='QuestionType', fields=[ ('question_typ', models.CharField(max_length=50, primary_key=True, serialize=False)), ('question_typ_nm', models.CharField(max_length=255)), ('void_ind', models.CharField(default='n', max_length=1)), ], ), migrations.CreateModel( name='ScoutAuthGroups', fields=[ ('scout_group', models.AutoField(primary_key=True, serialize=False, unique=True)), ], ), migrations.CreateModel( name='ScoutField', fields=[ ('scout_field_id', models.AutoField(primary_key=True, serialize=False)), ('void_ind', models.CharField(default='n', max_length=1)), ], ), migrations.CreateModel( name='ScoutFieldAnswer', fields=[ ('sfa_id', models.AutoField(primary_key=True, serialize=False)), ('answer', models.CharField(blank=True, max_length=1000, null=True)), ('void_ind', models.CharField(default='n', max_length=1)), ], ), migrations.CreateModel( name='ScoutFieldSchedule', fields=[ ('scout_field_sch_id', models.AutoField(primary_key=True, serialize=False)), ('st_time', models.DateTimeField()), ('end_time', models.DateTimeField()), ('notified', models.CharField(default='n', max_length=1)), ('void_ind', models.CharField(default='n', max_length=1)), ], ), migrations.CreateModel( name='ScoutPit', fields=[ ('scout_pit_id', models.AutoField(primary_key=True, serialize=False)), ('img_id', models.CharField(blank=True, max_length=500, null=True)), ('img_ver', models.CharField(blank=True, max_length=500, null=True)), ('void_ind', models.CharField(default='n', max_length=1)), ], ), migrations.CreateModel( name='ScoutPitAnswer', fields=[ ('spa_id', models.AutoField(primary_key=True, serialize=False)), ('answer', models.CharField(blank=True, max_length=1000, null=True)), ('void_ind', models.CharField(default='n', max_length=1)), ], ), migrations.CreateModel( name='ScoutQuestionType', fields=[ ('sq_typ', models.CharField(max_length=10, primary_key=True, serialize=False)), ('sq_nm', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='Season', fields=[ ('season_id', models.AutoField(primary_key=True, serialize=False)), ('season', models.CharField(max_length=45)), ('current', models.CharField(default='n', max_length=1)), ], ), migrations.CreateModel( name='Team', fields=[ ('team_no', models.IntegerField(primary_key=True, serialize=False)), ('team_nm', models.CharField(max_length=255)), ('void_ind', models.CharField(default='n', max_length=1)), ], ), migrations.CreateModel( name='ScoutQuestionSubType', fields=[ ('sq_sub_typ', models.CharField(max_length=10, primary_key=True, serialize=False)), ('sq_sub_nm', models.CharField(max_length=255)), ('sq_typ', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.scoutquestiontype')), ], ), migrations.CreateModel( name='ScoutQuestion', fields=[ ('sq_id', models.AutoField(primary_key=True, serialize=False)), ('question', models.CharField(max_length=1000)), ('order', models.IntegerField()), ('active', models.CharField(max_length=1)), ('void_ind', models.CharField(max_length=1)), ('question_typ', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.questiontype')), ('season', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='scouting.season')), ('sq_sub_typ', models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='scouting.scoutquestionsubtype')), ('sq_typ', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.scoutquestiontype')), ], ), migrations.CreateModel( name='ScoutPitSchedule', fields=[ ('scout_pit_sch_id', models.AutoField(primary_key=True, serialize=False)), ('st_time', models.DateTimeField()), ('end_time', models.DateTimeField()), ('notified', models.CharField(default='n', max_length=1)), ('void_ind', models.CharField(default='n', max_length=1)), ('event', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.event')), ], ), ] <file_sep>/user/serializers.py from rest_framework import serializers from django.contrib.auth.password_validation import validate_password, get_default_password_validators from django.core.validators import ValidationError class PermissionSerializer(serializers.Serializer): id = serializers.IntegerField(read_only=True) name = serializers.CharField() content_type_id = serializers.IntegerField(read_only=True) codename = serializers.CharField() class GroupSerializer(serializers.Serializer): id = serializers.IntegerField(read_only=True) name = serializers.CharField() permissions = PermissionSerializer(many=True, required=False) class PhoneTypeSerializer(serializers.Serializer): phone_type_id = serializers.IntegerField(read_only=True) carrier = serializers.CharField() phone_type = serializers.CharField() class UserSerializer(serializers.Serializer): id = serializers.IntegerField(read_only=True) username = serializers.CharField() email = serializers.CharField() first_name = serializers.CharField() last_name = serializers.CharField() is_active = serializers.BooleanField() phone = serializers.CharField() groups = GroupSerializer(many=True, required=False) phone_type = PhoneTypeSerializer(required=False, allow_null=True) phone_type_id = serializers.IntegerField(required=False, allow_null=True) image = serializers.CharField(required=False) class UserCreationSerializer(serializers.Serializer): """ User serializer, used only for validation of fields upon user registration. """ username = serializers.CharField(required=True) email = serializers.CharField(required=True) password1 = serializers.CharField(required=True) password2 = serializers.CharField(required=True) first_name = serializers.CharField(required=True) last_name = serializers.CharField(required=True) class Meta: fields = ['username', 'email', 'password1', 'password2', 'first_name', 'last_name'] def validate_password1(self, validated_data): try: validate_password( validated_data, password_validators=get_default_password_validators()) except ValidationError as e: raise serializers.ValidationError({'password': str(e)}) return validated_data class UserUpdateSerializer(serializers.Serializer): email = serializers.EmailField(required=False) password = serializers.CharField(required=False) first_name = serializers.CharField(required=False) last_name = serializers.CharField(required=False) image = serializers.ImageField(required=False) class UserLinksSerializer(serializers.Serializer): user_links_id = serializers.IntegerField(read_only=True) permission = PermissionSerializer() menu_name = serializers.CharField() routerlink = serializers.CharField() order = serializers.IntegerField() class RetMessageSerializer(serializers.Serializer): retMessage = serializers.CharField() error = serializers.BooleanField() class GetAlertsSerializer(serializers.Serializer): alert_id = serializers.IntegerField() alert_channel_send_id = serializers.IntegerField() alert_subject = serializers.CharField() alert_body = serializers.CharField() staged_time = serializers.DateTimeField() <file_sep>/refresh_script_uat.sql GRANT ALL ON ALL TABLES IN SCHEMA public to parts_uat; GRANT ALL ON ALL SEQUENCES IN SCHEMA public to parts_uat; GRANT ALL ON ALL FUNCTIONS IN SCHEMA public to parts_uat;<file_sep>/user/migrations/0004_user_discord_user_id.py # Generated by Django 4.0.3 on 2023-03-25 17:32 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('user', '0003_alter_user_last_login'), ] operations = [ migrations.AddField( model_name='user', name='discord_user_id', field=models.CharField(blank=True, max_length=1000, null=True), ), ] <file_sep>/user/urls.py import webpush.views from django.urls import path, include from .views import UserData, UserLinksView, UserGroups, UserProfile, UserEmailConfirmation, UserEmailResendConfirmation, \ UserRequestPasswordReset, UserPasswordReset, UserRequestUsername, \ SaveWebPushInfo, TokenObtainPairView, TokenRefreshView, Alerts from rest_framework_simplejwt import views as jwt_views # Wire up our API using atomic URL routing. # Additionally, we include login URLs for the browsable API. urlpatterns = [ path('token/', TokenObtainPairView.as_view(), name='token_obtain_pair'), path('token/refresh/', jwt_views.TokenRefreshView.as_view(), name='token_refresh'), #TokenRefreshView.as_view() path('user-data/', UserData.as_view()), path('user-links/', UserLinksView.as_view()), path('user-groups/', UserGroups.as_view()), path('profile/', UserProfile.as_view()), path('confirm/', UserEmailConfirmation.as_view()), path('confirm/resend/', UserEmailResendConfirmation.as_view()), path('request-reset-password/', UserRequestPasswordReset.as_view()), path('reset-password/', UserPasswordReset.as_view()), path('request-username/', UserRequestUsername.as_view()), path('alerts/', Alerts.as_view()), path('webpush-save/', SaveWebPushInfo.as_view()), ] <file_sep>/form/migrations/0010_questiontype_is_list.py # Generated by Django 4.2 on 2023-07-14 00:18 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('form', '0009_formsubtype_order'), ] operations = [ migrations.AddField( model_name='questiontype', name='is_list', field=models.CharField(default='n', max_length=1), ), ] <file_sep>/scouting/admin/apps.py from django.apps import AppConfig class ScoutadminConfig(AppConfig): name = 'scoutAdmin' <file_sep>/scouting/portal/views.py import datetime import pytz from django.db.models.functions import Lower from django.utils import timezone from rest_framework_simplejwt.authentication import JWTAuthentication from rest_framework.permissions import IsAuthenticated from general import send_message from user.models import User from .serializers import InitSerializer, ScheduleSaveSerializer from scouting.models import ScoutFieldSchedule, Event, Schedule, ScheduleType from rest_framework.views import APIView from general.security import has_access, ret_message from django.db.models import Q from rest_framework.response import Response auth_obj = 54 scheduling_auth_obj = 57 app_url = 'scouting/portal/' class Init(APIView): """ API endpoint to get the init values for the scout portal """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'init/' def get_init(self): user = self.request.user try: current_event = Event.objects.get(Q(current='y') & Q(void_ind='n')) except Exception as e: return ret_message('No event set, see an admin.', True, app_url + self.endpoint, self.request.user.id, e) users = None all_sfs_parsed = None all_sch_parsed = None schedule_types = None if has_access(self.request.user.id, scheduling_auth_obj): users = User.objects.filter(Q(is_active=True) & Q( date_joined__isnull=False)).order_by(Lower('first_name'), Lower('last_name')) all_sfs = ScoutFieldSchedule.objects.filter( Q(event=current_event) & Q(void_ind='n')) \ .order_by('notification3', 'st_time') all_sfs_parsed = [] for s in all_sfs: all_sfs_parsed.append(self.parse_sfs(s)) all_sch = Schedule.objects.filter(Q(event=current_event) & Q(void_ind='n')).order_by('sch_typ', 'notified', 'st_time') all_sch_parsed = [] for s in all_sch: all_sch_parsed.append(self.parse_sch(s)) schedule_types = ScheduleType.objects.all().order_by('sch_nm') sfs = ScoutFieldSchedule.objects.filter(Q(event=current_event) & Q(end_time__gte=(timezone.now() - datetime.timedelta(hours=1))) & Q(void_ind='n') & Q(Q(red_one=user) | Q(red_two=user) | Q(red_three=user) | Q(blue_one=user) | Q(blue_two=user) | Q(blue_three=user)) ).order_by('notification3', 'st_time') sfs_parsed = [] for s in sfs: sfs_parsed.append(self.parse_sfs(s)) sch = Schedule.objects.filter(Q(event=current_event) & Q(user=user) & Q(end_time__gte=(timezone.now() - datetime.timedelta(hours=1))) & Q(void_ind='n')) \ .order_by('notified', 'st_time') sch_parsed = [] for s in sch: sch_parsed.append(self.parse_sch(s)) return {'fieldSchedule': sfs_parsed, 'schedule': sch_parsed, 'allFieldSchedule': all_sfs_parsed, 'allSchedule': all_sch_parsed, 'users': users, 'scheduleTypes': schedule_types} def parse_sfs(self, s): return { 'scout_field_sch_id': s.scout_field_sch_id, 'event_id': s.event_id, 'st_time': s.st_time, 'end_time': s.end_time, 'notification1': s.notification1, 'notification2': s.notification2, 'notification3': s.notification3, 'red_one_id': s.red_one, 'red_two_id': s.red_two, 'red_three_id': s.red_three, 'blue_one_id': s.blue_one, 'blue_two_id': s.blue_two, 'blue_three_id': s.blue_three, 'scouts': 'R1: ' + ('' if s.red_one is None else s.red_one.first_name + ' ' + s.red_one.last_name[0:1]) + '\nR2: ' + ('' if s.red_two is None else s.red_two.first_name + ' ' + s.red_two.last_name[0:1]) + '\nR3: ' + ('' if s.red_three is None else s.red_three.first_name + ' ' + s.red_three.last_name[0:1]) + '\nB1: ' + ('' if s.blue_one is None else s.blue_one.first_name + ' ' + s.blue_one.last_name[0:1]) + '\nB2: ' + ('' if s.blue_two is None else s.blue_two.first_name + ' ' + s.blue_two.last_name[0:1]) + '\nB3: ' + ('' if s.blue_three is None else s.blue_three.first_name + ' ' + s.blue_three.last_name[0:1]) } def parse_sch(self, s): return { 'sch_id': s.sch_id, 'sch_typ': s.sch_typ.sch_typ, 'sch_nm': s.sch_typ.sch_nm, 'event_id': s.event_id, 'st_time': s.st_time, 'end_time': s.end_time, 'notified': s.notified, 'user': s.user, 'user_name': s.user.first_name + ' ' + s.user.last_name } def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: req = self.get_init() if isinstance(req, Response): return req serializer = InitSerializer(req) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while initializing.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access,', True, app_url + self.endpoint, request.user.id) class SaveScheduleEntry(APIView): """API endpoint to save a schedule entry""" authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'save-schedule-entry/' def save_schedule(self, serializer): """ if serializer.validated_data['st_time'] <= timezone.now(): return ret_message('Start time can\'t be in the past.', True, app_url + self.endpoint, self.request.user.id) """ if serializer.validated_data['end_time'] <= serializer.validated_data['st_time']: return ret_message('End time can\'t come before start.', True, app_url + self.endpoint, self.request.user.id) if serializer.validated_data.get('sch_id', None) is None: serializer.save() return ret_message('Saved schedule entry successfully') else: s = Schedule.objects.get( sch_id=serializer.validated_data['sch_id']) s.user_id = serializer.validated_data.get('user', None) s.sch_typ_id = serializer.validated_data.get('sch_typ', None) s.st_time = serializer.validated_data['st_time'] s.end_time = serializer.validated_data['end_time'] s.void_ind = serializer.validated_data['void_ind'] s.save() return ret_message('Updated schedule entry successfully') def post(self, request, format=None): serializer = ScheduleSaveSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, serializer.errors) if has_access(request.user.id, scheduling_auth_obj): try: req = self.save_schedule(serializer) return req except Exception as e: return ret_message('An error occurred while saving the schedule entry.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class NotifyUser(APIView): """API endpoint to notify users""" authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'notify-user/' def notify_user(self, id): message = '' event = Event.objects.get(Q(current='y') & Q(void_ind='n')) sch = Schedule.objects.get(sch_id=id) date_st_utc = sch.st_time.astimezone(pytz.utc) date_end_utc = sch.end_time.astimezone(pytz.utc) date_st_local = date_st_utc.astimezone(pytz.timezone(event.timezone)) date_end_local = date_end_utc.astimezone(pytz.timezone(event.timezone)) date_st_str = date_st_local.strftime("%m/%d/%Y, %I:%M%p") date_end_str = date_end_local.strftime("%m/%d/%Y, %I:%M%p") try: send_message.send_email( sch.user.phone + sch.user.phone_type.phone_type, 'Pit time!', 'notify_schedule', data) message += 'Phone Notified: ' + sch.user.first_name + ' : ' + sch.sch_typ.sch_nm + '\n' sch.notified = True sch.save() except Exception as e: message += 'Phone unable to notify: ' + \ (sch.user.first_name if sch.user is not None else "pit time user missing") + '\n' discord_message = f'Scheduled time in the pit, for {sch.sch_typ.sch_nm} from ' \ f'{date_st_str} to {date_end_str} : ' discord_message += (f'<@{sch.user.discord_user_id}>' if sch.user.discord_user_id is not None else sch.user.first_name) message += 'Discord Notified: ' + sch.user.first_name + ' : ' + sch.sch_typ.sch_nm + '\n' send_message.send_discord_notification(discord_message) sch.save() return ret_message(message) def get(self, request, format=None): if has_access(request.user.id, scheduling_auth_obj): try: req = self.notify_user(request.query_params.get( 'id', None)) return req except Exception as e: return ret_message('An error occurred while notifying the user.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id)<file_sep>/scouting/migrations/0010_scoutfield_match.py # Generated by Django 4.0.3 on 2023-04-21 00:34 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('scouting', '0009_teamnotes'), ] operations = [ migrations.AddField( model_name='scoutfield', name='match', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='scouting.match'), ), ] <file_sep>/scouting/admin/views.py import datetime import pytz from django.contrib.auth.models import Group from django.db import IntegrityError from django.core.exceptions import ObjectDoesNotExist from rest_framework_simplejwt.authentication import JWTAuthentication from rest_framework.permissions import IsAuthenticated from rest_framework.utils import json from form.models import QuestionAnswer, Question, QuestionOption, QuestionType, FormSubType, FormType from general import send_message from user.models import User, PhoneType from .serializers import * from scouting.models import Season, Event, ScoutAuthGroups, ScoutFieldSchedule, Team, \ CompetitionLevel, Match, EventTeamInfo, ScoutField, ScoutPit from rest_framework.views import APIView from general.security import has_access, ret_message import requests from django.conf import settings from django.db.models.functions import Lower from django.db.models import Q from rest_framework.response import Response import form.util auth_obj = 50 app_url = 'scouting/admin/' class Init(APIView): """ API endpoint to get all the init values for the scout admin screen """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'init/' def init(self): seasons = Season.objects.all().order_by('season') events = Event.objects.filter(void_ind='n').order_by( 'season__season', Lower('event_nm')) try: current_season = Season.objects.get(current='y') except Exception as e: current_season = Season() try: current_event = Event.objects.get( Q(season=current_season) & Q(current='y') & Q(void_ind='n')) except Exception as e: current_event = Event() users = User.objects.filter(Q(is_active=True) & Q( date_joined__isnull=False)).order_by(Lower('first_name'), Lower('last_name')) user_groups = [] try: user_groups = Group.objects.filter(id__in=list( ScoutAuthGroups.objects.all().values_list('auth_group_id', flat=True))).order_by('name') except Exception as e: user_groups = [] phone_types = PhoneType.objects.all().order_by(Lower('carrier')) fieldSchedule = [] fsf = ScoutFieldSchedule.objects.select_related('red_one', 'red_two', 'red_three', 'blue_one', 'blue_two', 'blue_three').filter( event=current_event, void_ind='n').order_by('notification3', 'st_time') for fs in fsf: fieldSchedule.append({ 'scout_field_sch_id': fs.scout_field_sch_id, 'event_id': fs.event_id, 'st_time': fs.st_time, 'end_time': fs.end_time, 'notification1': fs.notification1, 'notification2': fs.notification2, 'notification3': fs.notification3, 'red_one_id': fs.red_one, 'red_two_id': fs.red_two, 'red_three_id': fs.red_three, 'blue_one_id': fs.blue_one, 'blue_two_id': fs.blue_two, 'blue_three_id': fs.blue_three, 'scouts': 'R1: ' + ('' if fs.red_one is None else fs.red_one.first_name + ' ' + fs.red_one.last_name[0:1]) + '\nR2: ' + ('' if fs.red_two is None else fs.red_two.first_name + ' ' + fs.red_two.last_name[0:1]) + '\nR3: ' + ('' if fs.red_three is None else fs.red_three.first_name + ' ' + fs.red_three.last_name[ 0:1]) + '\nB1: ' + ('' if fs.blue_one is None else fs.blue_one.first_name + ' ' + fs.blue_one.last_name[0:1]) + '\nB2: ' + ('' if fs.blue_two is None else fs.blue_two.first_name + ' ' + fs.blue_two.last_name[0:1]) + '\nB3: ' + ('' if fs.blue_three is None else fs.blue_three.first_name + ' ' + fs.blue_three.last_name[ 0:1]) }) teams = Team.objects.filter(void_ind='n').order_by('team_no') scoutQuestionType = FormType.objects.all() return {'seasons': seasons, 'events': events, 'currentSeason': current_season, 'currentEvent': current_event, 'users': users, 'userGroups': user_groups, 'phoneTypes': phone_types, 'fieldSchedule': fieldSchedule, # 'pitSchedule': pitSchedule, 'scoutQuestionType': scoutQuestionType, 'teams': teams} def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: req = self.init() serializer = InitSerializer(req) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while initializing.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class SyncSeason(APIView): """ API endpoint to sync a season """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'sync-season/' def sync_season(self, season_id): season = Season.objects.get(season_id=season_id) insert = [] r = requests.get("https://www.thebluealliance.com/api/v3/team/frc3492/events/" + str(season.season), headers={"X-TBA-Auth-Key": settings.TBA_KEY}) r = json.loads(r.text) for e in r: time_zone = e.get('timezone') if e.get( 'timezone', None) is not None else 'America/New_York' event_ = { 'event_nm': e['name'], 'date_st': datetime.datetime.strptime(e['start_date'], '%Y-%m-%d').astimezone(pytz.timezone(time_zone)), 'date_end': datetime.datetime.strptime(e['end_date'], '%Y-%m-%d').astimezone(pytz.timezone(time_zone)), 'event_cd': e['key'], 'event_url': e.get('event_url', None), 'gmaps_url': e.get('gmaps_url', None), 'address': e.get('address', None), 'city': e.get('city', None), 'state_prov': e.get('state_prov', None), 'postal_code': e.get('postal_code', None), 'location_name': e.get('location_name', None), 'timezone': e.get('timezone', 'America/New_York'), 'webcast_url': e['webcasts'][0]['channel'] if len(e['webcasts']) > 0 else '', 'teams': [], 'teams_to_keep': [] } s = requests.get("https://www.thebluealliance.com/api/v3/event/" + e['key'] + "/teams", headers={ "X-TBA-Auth-Key": settings.TBA_KEY}) s = json.loads(s.text) for t in s: event_['teams'].append({ 'team_no': t['team_number'], 'team_nm': t['nickname'] }) event_['teams_to_keep'].append(t['team_number']) insert.append(event_) messages = '' for e in insert: try: Event(season=season, event_nm=e['event_nm'], date_st=e['date_st'], date_end=e['date_end'], event_cd=e['event_cd'], event_url=e['event_url'], address=e['address'], city=e['city'], state_prov=e['state_prov'], postal_code=e['postal_code'], location_name=e['location_name'], gmaps_url=e['gmaps_url'], webcast_url=e['webcast_url'], timezone=e['timezone'], current='n', competition_page_active='n', void_ind='n').save(force_insert=True) messages += "(ADD) Added event: " + e['event_cd'] + '\n' except IntegrityError: event = Event.objects.get( Q(event_cd=e['event_cd']) & Q(void_ind='n')) event.date_st = e['date_st'] event.event_url = e['event_url'] event.address = e['address'] event.city = e['city'] event.state_prov = e['state_prov'] event.postal_code = e['postal_code'] event.location_name = e['location_name'] event.gmaps_url = e['gmaps_url'] event.webcast_url = e['webcast_url'] event.date_end = e['date_end'] event.timezone = e['timezone'] event.save() messages += "(NO ADD) Already have event: " + \ e['event_cd'] + '\n' # remove teams that have been removed from an event event = Event.objects.get(event_cd=e['event_cd'], void_ind='n') teams = Team.objects.filter( ~Q(team_no__in=e['teams_to_keep']) & Q(event=event)) for team in teams: team.event_set.remove(event) for t in e['teams']: try: Team(team_no=t['team_no'], team_nm=t['team_nm'], void_ind='n').save( force_insert=True) messages += "(ADD) Added team: " + \ str(t['team_no']) + " " + t['team_nm'] + '\n' except IntegrityError: messages += "(NO ADD) Already have team: " + \ str(t['team_no']) + " " + t['team_nm'] + '\n' try: # TODO it doesn't throw an error, but re-linking many to many only keeps one entry in the table for the link team = Team.objects.get(team_no=t['team_no']) team.event_set.add( Event.objects.get(event_cd=e['event_cd'], void_ind='n')) messages += "(ADD) Added team: " + str(t['team_no']) + " " + t['team_nm'] + " to event: " + e[ 'event_cd'] + '\n' except IntegrityError: messages += "(NO ADD) Team: " + str(t['team_no']) + " " + t['team_nm'] + " already at event: " + \ e['event_cd'] + '\n' return messages def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: req = self.sync_season( request.query_params.get('season_id', None)) return ret_message(req) except Exception as e: return ret_message('An error occurred while syncing the season/event/teams.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class SyncMatches(APIView): """ API endpoint to sync a season """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'sync-matches/' def sync_matches(self): event = Event.objects.get(current='y') insert = [] messages = '' r = requests.get("https://www.thebluealliance.com/api/v3/event/" + event.event_cd + "/matches", headers={"X-TBA-Auth-Key": settings.TBA_KEY}) r = json.loads(r.text) match_number = "" try: for e in r: match_number = e.get('match_number', 0) red_one = Team.objects.get( Q(team_no=e['alliances']['red']['team_keys'][0].replace('frc', '')) & Q(void_ind='n')) red_two = Team.objects.get( Q(team_no=e['alliances']['red']['team_keys'][1].replace('frc', '')) & Q(void_ind='n')) red_three = Team.objects.get( Q(team_no=e['alliances']['red']['team_keys'][2].replace('frc', '')) & Q(void_ind='n')) blue_one = Team.objects.get( Q(team_no=e['alliances']['blue']['team_keys'][0].replace('frc', '')) & Q(void_ind='n')) blue_two = Team.objects.get( Q(team_no=e['alliances']['blue']['team_keys'][1].replace('frc', '')) & Q(void_ind='n')) blue_three = Team.objects.get( Q(team_no=e['alliances']['blue']['team_keys'][2].replace('frc', '')) & Q(void_ind='n')) red_score = e['alliances']['red'].get('score', None) blue_score = e['alliances']['blue'].get('score', None) comp_level = CompetitionLevel.objects.get(Q( comp_lvl_typ=e.get('comp_level', ' ')) & Q(void_ind='n')) time = datetime.datetime.fromtimestamp( e['time'], pytz.timezone('America/New_York')) if e['time'] else None match_key = e['key'] try: if (comp_level.comp_lvl_typ == 'qf'): print(e) match = Match.objects.get( Q(match_id=match_key) & Q(void_ind='n')) match.red_one = red_one match.red_two = red_two match.red_three = red_three match.blue_one = blue_one match.blue_two = blue_two match.blue_three = blue_three match.red_score = red_score match.blue_score = blue_score match.comp_level = comp_level match.time = time match.save() messages += '(UPDATE) ' + event.event_nm + \ ' ' + comp_level.comp_lvl_typ_nm + \ ' ' + str(match_number) + ' ' + match_key + '\n' except ObjectDoesNotExist as odne: match = Match(match_id=match_key, match_number=match_number, event=event, red_one=red_one, red_two=red_two, red_three=red_three, blue_one=blue_one, blue_two=blue_two, blue_three=blue_three, red_score=red_score, blue_score=blue_score, comp_level=comp_level, time=time, void_ind='n') match.save() messages += '(ADD) ' + event.event_nm + \ ' ' + comp_level.comp_lvl_typ_nm + \ ' ' + str(match_number) + ' ' + match_key + '\n' except: messages += '(EROR) ' + event.event_nm + \ ' ' + match_number + '\n' return messages def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: req = self.sync_matches() return ret_message(req) except Exception as e: return ret_message('An error occurred while syncing matches.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class SyncEventTeamInfo(APIView): """ API endpoint to sync the info for a teams at an event """ # authentication_classes = (JWTAuthentication,) # permission_classes = (IsAuthenticated,) endpoint = 'sync-event-team-info/' def sync_event_team_info(self): event = Event.objects.get(current='y') insert = [] messages = '' r = requests.get("https://www.thebluealliance.com/api/v3/event/" + event.event_cd + "/rankings", headers={"X-TBA-Auth-Key": settings.TBA_KEY}) r = json.loads(r.text) if r is None: return 'Nothing to sync' for e in r.get('rankings', []): matches_played = e.get('matches_played', 0) qual_average = e.get('qual_average', 0) losses = e.get('record', 0).get('losses', 0) wins = e.get('record', 0).get('wins', 0) ties = e.get('record', 0).get('ties', 0) rank = e.get('rank', 0) dq = e.get('dq', 0) team = Team.objects.get( Q(team_no=e['team_key'].replace('frc', '')) & Q(void_ind='n')) try: eti = EventTeamInfo.objects.get( Q(event=event) & Q(team_no=team) & Q(void_ind='n')) eti.matches_played = matches_played eti.qual_average = qual_average eti.losses = losses eti.wins = wins eti.ties = ties eti.rank = rank eti.dq = dq eti.save() messages += '(UPDATE) ' + event.event_nm + \ ' ' + str(team.team_no) + '\n' except ObjectDoesNotExist as odne: eti = EventTeamInfo(event=event, team_no=team, matches_played=matches_played, qual_average=qual_average, losses=losses, wins=wins, ties=ties, rank=rank, dq=dq) eti.save() messages += '(ADD) ' + event.event_nm + \ ' ' + str(team.team_no) + '\n' return messages def get(self, request, format=None): if True or has_access(request.user.id, auth_obj): try: req = self.sync_event_team_info() return ret_message(req) except Exception as e: return ret_message('An error occurred while syncing event team info.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class SetSeason(APIView): """ API endpoint to set the season """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'set-season/' def set(self, season_id, event_id): msg = "" Season.objects.filter(current='y').update(current='n') season = Season.objects.get(season_id=season_id) season.current = 'y' season.save() msg = "Successfully set the season to: " + season.season if event_id is not None: Event.objects.filter(current='y').update( current='n', competition_page_active='n') event = Event.objects.get(event_id=event_id) event.current = 'y' event.save() msg += "\nSuccessfully set the event to: " + event.event_nm return ret_message(msg) def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: req = self.set(request.query_params.get( 'season_id', None), request.query_params.get('event_id', None)) return req except Exception as e: return ret_message('An error occurred while setting the season.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class ToggleCompetitionPage(APIView): """ API endpoint to toggle a scout field question """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'toggle-competition-page/' def toggle(self, sq_id): try: event = Event.objects.get(Q(current='y') & Q(void_ind='n')) if event.competition_page_active == 'n': event.competition_page_active = 'y' else: event.competition_page_active = 'n' event.save() except ObjectDoesNotExist as odne: return ret_message('No active event, can\'t activate competition page', True, app_url + self.endpoint, self.request.user.id, odne) return ret_message('Successfully activated competition page.') def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: req = self.toggle(request.query_params.get('sq_id', None)) return req except Exception as e: return ret_message('An error occurred while toggling the competition page.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class AddSeason(APIView): """ API endpoint to add a season """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'add-season/' def add(self, season): try: Season.objects.get(season=season) return ret_message('Season not added. Season ' + season + ' already exists.', True, app_url + self.endpoint, self.request.user.id) except Exception as e: Season(season=season, current='n').save() return ret_message('Successfully added season: ' + season) def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: req = self.add(request.query_params.get('season', None)) return req except Exception as e: return ret_message('An error occurred while setting the season.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class AddEvent(APIView): """ API endpoint to add a event """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'add-event/' def post(self, request, format=None): serializer = EventSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, serializer.errors) if has_access(request.user.id, auth_obj): try: serializer.save() return ret_message('Successfully added the event.') except Exception as e: return ret_message('An error occurred while saving the event.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class DeleteEvent(APIView): """ API endpoint to delete an event """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'delete-event/' def delete(self, event_id): e = Event.objects.get(event_id=event_id) teams_at_event = Team.objects.filter(event=e) for t in teams_at_event: t.event_set.remove(e) scout_fields = ScoutField.objects.filter(event=e) for sf in scout_fields: scout_field_answers = QuestionAnswer.objects.filter( scout_field=sf) for sfa in scout_field_answers: sfa.delete() sf.delete() scout_pits = ScoutPit.objects.filter(event=e) for sp in scout_pits: scout_pit_answers = QuestionAnswer.objects.filter(scout_pit=sp) for spa in scout_pit_answers: spa.delete() sp.delete() matches = Match.objects.filter(event=e) for m in matches: m.delete() scout_field_schedules = ScoutFieldSchedule.objects.filter(event=e) for sfs in scout_field_schedules: sfs.delete() """ scout_pit_schedules = ScoutPitSchedule.objects.filter(event=e) for sps in scout_pit_schedules: sps.delete() """ e.delete() return ret_message('Successfully deleted event: ' + e.event_nm) def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: req = self.delete(request.query_params.get('event_id', None)) return req except Exception as e: return ret_message('An error occurred while deleting the event.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class AddTeam(APIView): """ API endpoint to add a event """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'add-team/' def post(self, request, format=None): serializer = TeamCreateSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, serializer.errors) if has_access(request.user.id, auth_obj): try: serializer.save() return ret_message('Successfully added the team.') except Exception as e: return ret_message('An error occurred while saving the team.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class AddTeamToEvent(APIView): """ API endpoint to add a team to an event """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'add-team-to-event/' def link(self, data): messages = '' for t in data.get('teams', []): try: # TODO it doesn't throw an error, but re-linking many to many only keeps one entry in the table for the link if t.get('checked', False): team = Team.objects.get(team_no=t['team_no'], void_ind='n') e = Event.objects.get( event_id=data['event_id'], void_ind='n') team.event_set.add(e) messages += "(ADD) Added team: " + str( t['team_no']) + " " + t['team_nm'] + " to event: " + e.event_cd + '\n' except IntegrityError: messages += "(NO ADD) Team: " + str(t['team_no']) + " " + t['team_nm'] + " already at event: " + \ e.event_cd + '\n' return messages def post(self, request, format=None): serializer = EventToTeamsSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, serializer.errors) if has_access(request.user.id, auth_obj): try: req = self.link(serializer.validated_data) return ret_message(req) except Exception as e: return ret_message('An error occurred while saving the team.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class RemoveTeamToEvent(APIView): """ API endpoint to remove a team from an event """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'remove-team-to-event/' def link(self, data): messages = '' for t in data.get('team_no', []): try: # TODO it doesn't throw an error, but re-linking many to many only keeps one entry in the table for the link if not t.get('checked', True): team = Team.objects.get(team_no=t['team_no'], void_ind='n') e = Event.objects.get( event_id=data['event_id'], void_ind='n') team.event_set.remove(e) messages += "(REMOVE) Removed team: " + str( t['team_no']) + " " + t['team_nm'] + " from event: " + e.event_cd + '\n' except IntegrityError: messages += "(NO REMOVE) Team: " + str(t['team_no']) + " " + t['team_nm'] + " from event: " + \ e.event_cd + '\n' return messages def post(self, request, format=None): serializer = EventTeamSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, serializer.errors) if has_access(request.user.id, auth_obj): try: req = self.link(serializer.validated_data) return ret_message(req) except Exception as e: return ret_message('An error occurred while removing the team.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class DeleteSeason(APIView): """ API endpoint to delete a season """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'delete-season/' def delete(self, season_id): season = Season.objects.get(season_id=season_id) events = Event.objects.filter(season=season) for e in events: teams_at_event = Team.objects.filter(event=e) for t in teams_at_event: t.event_set.remove(e) scout_fields = ScoutField.objects.filter(event=e) for sf in scout_fields: scout_field_answers = QuestionAnswer.objects.filter( scout_field=sf) for sfa in scout_field_answers: sfa.delete() sf.delete() scout_pits = ScoutPit.objects.filter(event=e) for sp in scout_pits: scout_pit_answers = QuestionAnswer.objects.filter(scout_pit=sp) for spa in scout_pit_answers: spa.delete() sp.delete() scout_questions = Question.objects.filter(season=season) for sq in scout_questions: question_options = QuestionOption.objects.filter(question=sq) for qo in question_options: qo.delete() sq.delete() matches = Match.objects.filter(event=e) for m in matches: m.delete() scout_field_schedules = ScoutFieldSchedule.objects.filter(event=e) for sfs in scout_field_schedules: sfs.delete() """ scout_pit_schedules = ScoutPitSchedule.objects.filter(event=e) for sps in scout_pit_schedules: sps.delete() """ e.delete() season.delete() return ret_message('Successfully deleted season: ' + season.season) def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: req = self.delete(request.query_params.get('season_id', None)) return req except Exception as e: return ret_message('An error occurred while deleting the season.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class SaveScoutFieldScheduleEntry(APIView): """API endpoint to save scout schedule entry""" authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'save-scout-field-schedule-entry/' def save_scout_schedule(self, serializer): """ if serializer.validated_data['st_time'] <= timezone.now(): return ret_message('Start time can\'t be in the past.', True, app_url + self.endpoint, self.request.user.id) """ if serializer.validated_data['end_time'] <= serializer.validated_data['st_time']: return ret_message('End time can\'t come before start.', True, app_url + self.endpoint, self.request.user.id) if serializer.validated_data.get('scout_field_sch_id', None) is None: serializer.save() return ret_message('Saved schedule entry successfully') else: sfs = ScoutFieldSchedule.objects.get( scout_field_sch_id=serializer.validated_data['scout_field_sch_id']) sfs.red_one_id = serializer.validated_data.get('red_one_id', None) sfs.red_two_id = serializer.validated_data.get('red_two_id', None) sfs.red_three_id = serializer.validated_data.get('red_three_id', None) sfs.blue_one_id = serializer.validated_data.get('blue_one_id', None) sfs.blue_two_id = serializer.validated_data.get('blue_two_id', None) sfs.blue_three_id = serializer.validated_data.get('blue_three_id', None) sfs.st_time = serializer.validated_data['st_time'] sfs.end_time = serializer.validated_data['end_time'] sfs.void_ind = serializer.validated_data['void_ind'] sfs.save() return ret_message('Updated schedule entry successfully') def post(self, request, format=None): serializer = ScoutFieldScheduleSaveSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, serializer.errors) if has_access(request.user.id, auth_obj): try: req = self.save_scout_schedule(serializer) return req except Exception as e: return ret_message('An error occurred while saving the schedule entry.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class NotifyUsers(APIView): """API endpoint to notify users""" authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'notify-users/' def notify_users(self, id): event = Event.objects.get(Q(current='y') & Q(void_ind='n')) sfs = ScoutFieldSchedule.objects.get(scout_field_sch_id=id) date_st_utc = sfs.st_time.astimezone(pytz.utc) date_end_utc = sfs.end_time.astimezone(pytz.utc) date_st_local = date_st_utc.astimezone(pytz.timezone(event.timezone)) date_end_local = date_end_utc.astimezone(pytz.timezone(event.timezone)) date_st_str = date_st_local.strftime("%m/%d/%Y, %I:%M%p") date_end_str = date_end_local.strftime("%m/%d/%Y, %I:%M%p") data = { 'scout_location': 'Field', 'scout_time_st': date_st_str, 'scout_time_end': date_end_str, 'lead_scout': self.request.user.first_name + ' ' + self.request.user.last_name } message = '' try: send_message.send_email( sfs.red_one.phone + sfs.red_one.phone_type.phone_type, 'Time to Scout!', 'notify_scout', data) message += 'Notified: ' + sfs.red_one.first_name + '\n' except Exception as e: message += 'Unable to notify: ' + \ (sfs.red_one.first_name if sfs.red_one is not None else "red one") + '\n' try: send_message.send_email( sfs.red_two.phone + sfs.red_two.phone_type.phone_type, 'Time to Scout!', 'notify_scout', data) message += 'Notified: ' + sfs.red_two.first_name + '\n' except Exception as e: message += 'Unable to notify: ' + \ (sfs.red_two.first_name if sfs.red_two is not None else "red two") + '\n' try: send_message.send_email( sfs.red_three.phone + sfs.red_three.phone_type.phone_type, 'Time to Scout!', 'notify_scout', data) message += 'Notified: ' + sfs.red_three.first_name + '\n' except Exception as e: message += 'Unable to notify: ' + \ (sfs.red_three.first_name if sfs.red_three is not None else "red three") + '\n' try: send_message.send_email( sfs.blue_one.phone + sfs.blue_one.phone_type.phone_type, 'Time to Scout!', 'notify_scout', data) message += 'Notified: ' + sfs.blue_one.first_name + '\n' except Exception as e: message += 'Unable to notify: ' + \ (sfs.blue_one.first_name if sfs.blue_one is not None else "blue one") + '\n' try: send_message.send_email( sfs.blue_two.phone + sfs.blue_two.phone_type.phone_type, 'Time to Scout!', 'notify_scout', data) message += 'Notified: ' + sfs.blue_two.first_name + '\n' except Exception as e: message += 'Unable to notify: ' + \ (sfs.blue_two.first_name if sfs.blue_two is not None else "blue two") + '\n' try: send_message.send_email( sfs.blue_three.phone + sfs.blue_three.phone_type.phone_type, 'Time to Scout!', 'notify_scout', data) message += 'Notified: ' + sfs.blue_three.first_name + '\n' except Exception as e: message += 'Unable to notify: ' + \ (sfs.blue_three.first_name if sfs.blue_three is not None else "blue three") + '\n' discord_message = f'Scheduled time for scouting from ' \ f'{date_st_str} to {date_end_str} : ' discord_message += ((f'<@{sfs.red_one.discord_user_id}>' if sfs.red_one.discord_user_id is not None else sfs.red_one.first_name) if sfs.red_one is not None else "red one") + ', ' discord_message += ((f'<@{sfs.red_two.discord_user_id}>' if sfs.red_two.discord_user_id is not None else sfs.red_two.first_name) if sfs.red_two is not None else "red two") + ', ' discord_message += ((f'<@{sfs.red_three.discord_user_id}>' if sfs.red_three.discord_user_id is not None else sfs.red_three.first_name) if sfs.red_three is not None else "red three") + ', ' discord_message += ((f'<@{sfs.blue_one.discord_user_id}>' if sfs.blue_one.discord_user_id is not None else sfs.blue_one.first_name) if sfs.blue_one is not None else "blue one") + ', ' discord_message += ((f'<@{sfs.blue_two.discord_user_id}>' if sfs.blue_two.discord_user_id is not None else sfs.blue_two.first_name) if sfs.blue_two is not None else "blue two") + ', ' discord_message += ((f'<@{sfs.blue_three.discord_user_id}>' if sfs.blue_three.discord_user_id is not None else sfs.blue_three.first_name) if sfs.blue_three is not None else "blue three") send_message.send_discord_notification(discord_message) sfs.save() return ret_message(message) def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: req = self.notify_users(request.query_params.get( 'id', None)) return req except Exception as e: return ret_message('An error occurred while notifying the users.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class SavePhoneType(APIView): """API endpoint to save phone types""" authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'save-phone-type/' def save_phone_type(self, data): if data.get('phone_type_id', None) is not None: pt = PhoneType.objects.get(phone_type_id=data['phone_type_id']) pt.phone_type = data['phone_type'] pt.carrier = data['carrier'] pt.save() else: PhoneType(phone_type=data['phone_type'], carrier=data['carrier']).save() return ret_message('Successfully saved phone type.') def post(self, request, format=None): serializer = PhoneTypeSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, serializer.errors) if has_access(request.user.id, auth_obj): try: req = self.save_phone_type(serializer.validated_data) return req except Exception as e: return ret_message('An error occurred while saving phone type.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) <file_sep>/user/models.py import django from django.db import models from django.contrib.auth.models import Permission, AbstractBaseUser, BaseUserManager, PermissionsMixin class UserLinks(models.Model): user_links_id = models.AutoField(primary_key=True) permission = models.ForeignKey(Permission, models.PROTECT) menu_name = models.CharField(max_length=255) routerlink = models.CharField(max_length=255) order = models.IntegerField() def __str__(self): return str(self.user_links_id) + ' ' + self.menu_name class PhoneType(models.Model): phone_type_id = models.AutoField(primary_key=True) carrier = models.CharField(max_length=255) phone_type = models.CharField(max_length=255) def __str__(self): return str(self.phone_type_id) + ' ' + self.carrier + ' ' + self.phone_type class ProfileManager(BaseUserManager): def create_user(self, email, username, password=<PASSWORD>, first_name="", last_name=""): if not email: raise ValueError("Email required") if not username: raise ValueError("Username required") user = self.model( username=username.lower(), email=self.normalize_email(email.lower()), first_name=first_name, last_name=last_name, ) user.set_password(<PASSWORD>) user.save(using=self._db) return user def create_superuser(self, email, username, password, first_name, last_name): user = self.create_user(email=email, username=username, first_name=first_name, last_name=last_name, password=<PASSWORD>) user.is_superuser = True user.is_active = True user.save(using=self._db) return user class User(AbstractBaseUser, PermissionsMixin): # user model required fields email = models.EmailField(max_length=60, unique=True) username = models.CharField(max_length=30, unique=True) date_joined = models.DateTimeField(auto_now_add=True) last_login = models.DateTimeField(default=None, null=True) is_active = models.BooleanField(default=False) is_superuser = models.BooleanField(default=False) # extra info first_name = models.CharField(max_length=50, null=False) last_name = models.CharField(max_length=50, null=False) phone = models.CharField(max_length=10, blank=True, null=True) phone_type = models.ForeignKey( PhoneType, models.PROTECT, blank=True, null=True) discord_user_id = models.CharField(max_length=1000, blank=True, null=True) img_id = models.CharField(max_length=500, blank=True, null=True) img_ver = models.CharField(max_length=500, blank=True, null=True) # sets what the user will log in with USERNAME_FIELD = 'username' # what is required by the model (do not put username and password here, this is used for the create superuser function which has those by default) REQUIRED_FIELDS = ['email', 'first_name', 'last_name'] # user manager class objects = ProfileManager() def __str__(self): return "User name: {}, email: {} ".format(self.username, self.email) def has_perm(self, perm, obj=None): # TODO revisit this return self.is_superuser def has_module_perms(self, app_Label): # TODO revisit this return True <file_sep>/sponsoring/views.py from django.db import transaction from django.db.models import Q from rest_framework.response import Response from rest_framework.permissions import IsAuthenticated from rest_framework.views import APIView from rest_framework_simplejwt.authentication import JWTAuthentication import sponsoring.util from general.security import ret_message, has_access from sponsoring.serializers import ItemSerializer, SponsorSerializer, ItemSponsorSerializer, SaveItemSerializer, \ SaveSponsorOrderSerializer auth_obj = 50 app_url = 'sponsoring/' class GetItems(APIView): """ API endpoint to items """ endpoint = 'get-items/' def get(self, request, format=None): try: items = sponsoring.util.get_items() serializer = ItemSerializer(items, many=True) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while getting items.', True, app_url + self.endpoint, request.user.id, e) class GetSponsors(APIView): """ API endpoint to get sponsors """ endpoint = 'get-sponsors/' def get(self, request, format=None): try: sponsors = sponsoring.util.get_sponsors() serializer = SponsorSerializer(sponsors, many=True) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while getting sponsors.', True, app_url + self.endpoint, request.user.id, e) class SaveSponsor(APIView): """API endpoint to save a sponsor""" endpoint = 'save-sponsor/' def post(self, request, format=None): serializer = SponsorSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, serializer.errors) try: with transaction.atomic(): sponsoring.util.save_sponsor(serializer.validated_data) return ret_message('Saved sponsor data successfully.') except Exception as e: return ret_message('An error occurred while saving sponsor data.', True, app_url + self.endpoint, request.user.id, e) class SaveItem(APIView): """ API endpoint to save an item """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'save-item/' def post(self, request, format=None): serializer = SaveItemSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, serializer.errors) if has_access(request.user.id, auth_obj): try: with transaction.atomic(): sponsoring.util.save_item(serializer.validated_data) return ret_message('Saved item data successfully.') except Exception as e: return ret_message('An error occurred while saving item data.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class SaveSponsorOrder(APIView): """ API endpoint to save a sponsor's order """ endpoint = 'save-sponsor-order/' def post(self, request, format=None): serializer = SaveSponsorOrderSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, serializer.errors) try: with transaction.atomic(): sponsoring.util.save_sponsor_order(serializer.validated_data) return ret_message('Saved data successfully.') except Exception as e: return ret_message('An error occurred while saving data.', True, app_url + self.endpoint, request.user.id, e) <file_sep>/scouting/portal/serializers.py from django.db.models import Q from rest_framework import serializers from scouting.models import Schedule, Event class UserSerializer(serializers.Serializer): id = serializers.IntegerField(read_only=True) username = serializers.CharField() email = serializers.CharField() first_name = serializers.CharField() last_name = serializers.CharField() is_active = serializers.BooleanField() phone = serializers.CharField() phone_type_id = serializers.IntegerField(required=False, allow_null=True) class ScheduleSerializer(serializers.Serializer): sch_id = serializers.IntegerField() sch_typ = serializers.CharField() sch_nm = serializers.CharField() event_id = serializers.IntegerField(read_only=True) st_time = serializers.DateTimeField() end_time = serializers.DateTimeField() notified = serializers.BooleanField() user = UserSerializer(required=False, allow_null=True, read_only=True) user_name = serializers.CharField(read_only=True) class ScoutFieldScheduleSerializer(serializers.Serializer): scout_field_sch_id = serializers.IntegerField() event_id = serializers.IntegerField(read_only=True) st_time = serializers.DateTimeField() end_time = serializers.DateTimeField() notification1 = serializers.BooleanField(read_only=True) notification2 = serializers.BooleanField(read_only=True) notification3 = serializers.BooleanField(read_only=True) red_one_id = UserSerializer(required=False, allow_null=True, read_only=True) red_two_id = UserSerializer(required=False, allow_null=True, read_only=True) red_three_id = UserSerializer(required=False, allow_null=True, read_only=True) blue_one_id = UserSerializer(required=False, allow_null=True, read_only=True) blue_two_id = UserSerializer(required=False, allow_null=True, read_only=True) blue_three_id = UserSerializer( required=False, allow_null=True, read_only=True) scouts = serializers.CharField(read_only=True) class ScheduleTypeSerializer(serializers.Serializer): sch_typ = serializers.CharField() sch_nm = serializers.CharField() class InitSerializer(serializers.Serializer): fieldSchedule = ScoutFieldScheduleSerializer(many=True, required=False) schedule = ScheduleSerializer(many=True, required=False) allFieldSchedule = ScoutFieldScheduleSerializer(many=True, required=False) allSchedule = ScheduleSerializer(many=True, required=False) users = UserSerializer(many=True, required=False) scheduleTypes = ScheduleTypeSerializer(many=True, required=False) class ScheduleSaveSerializer(serializers.Serializer): def create(self, validated_data): event = Event.objects.get(Q(current='y') & Q(void_ind='n')) s = Schedule(event=event, st_time=validated_data['st_time'], end_time=validated_data['end_time'], user_id=validated_data.get('user', None), sch_typ_id=validated_data.get('sch_typ', None), void_ind=validated_data['void_ind']) s.save() return s sch_id = serializers.IntegerField(required=False, allow_null=True) sch_typ = serializers.CharField() st_time = serializers.DateTimeField() end_time = serializers.DateTimeField() notified = serializers.BooleanField(default=False) user = serializers.IntegerField(allow_null=True) void_ind = serializers.CharField(default='n') <file_sep>/user/migrations/0006_userpushnotificationsubscriptionobjects_time.py # Generated by Django 4.0.3 on 2023-04-29 19:18 from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('user', '0005_userpushnotificationsubscriptionobjects'), ] operations = [ migrations.AddField( model_name='userpushnotificationsubscriptionobjects', name='time', field=models.DateTimeField(default=django.utils.timezone.now), ), ] <file_sep>/form/migrations/0009_formsubtype_order.py # Generated by Django 4.2 on 2023-07-14 00:05 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('form', '0008_rename_subtype_formsubtype'), ] operations = [ migrations.AddField( model_name='formsubtype', name='order', field=models.IntegerField(default=0), preserve_default=False, ), ] <file_sep>/user/migrations/0005_userpushnotificationsubscriptionobjects.py # Generated by Django 4.0.3 on 2023-04-29 16:52 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('user', '0004_user_discord_user_id'), ] operations = [ migrations.CreateModel( name='UserPushNotificationSubscriptionObjects', fields=[ ('sub_id', models.AutoField(primary_key=True, serialize=False)), ('endpoint', models.CharField(max_length=2000)), ('p256dh', models.CharField(max_length=255)), ('auth', models.CharField(max_length=255)), ('void_ind', models.CharField(default='n', max_length=1)), ('user', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)), ], ), ] <file_sep>/scouting/matchplanning/apps.py from django.apps import AppConfig class MatchplanningConfig(AppConfig): default_auto_field = 'django.db.models.BigAutoField' name = 'matchplanning' <file_sep>/scouting/pit/views.py from rest_framework_simplejwt.authentication import JWTAuthentication from rest_framework.permissions import IsAuthenticated from form.models import QuestionAnswer, Question from scouting.models import Season, Team, Event, ScoutPit, EventTeamInfo from .serializers import InitSerializer, PitTeamDataSerializer, ScoutAnswerSerializer, ScoutPitResultsSerializer, \ TeamSerializer from rest_framework.views import APIView from general.security import ret_message, has_access import cloudinary import cloudinary.uploader import cloudinary.api from django.db.models import Q, Prefetch from rest_framework.response import Response import form.util auth_obj = 51 auth_view_obj = 53 app_url = 'scouting/pit/' class Questions(APIView): """ API endpoint to get scout pit inputs """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'questions/' def get_questions(self): try: current_season = Season.objects.get(current='y') except Exception as e: return ret_message('No season set, see an admin.', True, app_url + self.endpoint, self.request.user.id, e) scout_questions = form.util.get_questions('pit') try: current_event = Event.objects.get( Q(season=current_season) & Q(current='y')) except Exception as e: return ret_message('No event set, see an admin.', True, app_url + self.endpoint, self.request.user.id, e) teams = [] try: teams = Team.objects.filter(Q(event=current_event) & ~Q(team_no__in=( ScoutPit.objects.filter(Q(event=current_event) & Q(void_ind='n')).values_list('team_no', flat=True))) ).order_by('team_no') except Exception as e: teams.append(Team()) comp_teams = [] try: comp_teams = Team.objects.filter( Q(event=current_event) & Q(team_no__in=(ScoutPit.objects.filter( Q(event=current_event) & Q(void_ind='n')).values_list('team_no', flat=True))) ).order_by('team_no') except Exception as e: comp_teams.append(Team()) return {'scoutQuestions': scout_questions, 'teams': teams, 'comp_teams': comp_teams} def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: req = self.get_questions() if type(req) == Response: return req serializer = InitSerializer(req) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while initializing.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) """ class SaveAnswers(APIView): "" API endpoint to save scout pit answers "" authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'save-answers/' def save_answers(self, data): try: current_season = Season.objects.get(current='y') except Exception as e: return ret_message('No season set, see an admin.', True, app_url + self.endpoint, self.request.user.id, e) try: current_event = Event.objects.get( Q(season=current_season) & Q(current='y')) except Exception as e: return ret_message('No event set, see an admin', True, app_url + self.endpoint, self.request.user.id, e) try: sp = ScoutPit.objects.get(Q(team_no_id=data['team']) & Q(void_ind='n') & Q(event=current_event)) except Exception as e: sp = ScoutPit( event=current_event, team_no_id=data['team'], user_id=self.request.user.id, void_ind='n') sp.save() for d in data['scoutQuestions']: try: spa = QuestionAnswer.objects.get( Q(scout_pit=sp) & Q(question_id=d['question_id']) & Q(void_ind='n')) spa.answer = d.get('answer', '') except Exception as e: form.util.save_question_answer(d.get('answer', ''), Question.objects.get(question_id=d['question_id']), scout_pit=sp) return ret_message('Response saved successfully') def post(self, request, format=None): serializer = ScoutAnswerSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, serializer.errors) if has_access(request.user.id, auth_obj): try: req = self.save_answers(serializer.data) return req except Exception as e: return ret_message('An error occurred while saving answers.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) """ class SavePicture(APIView): """ API endpoint to save a robot picture """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'save-picture/' def save_file(self, file, team_no): try: current_season = Season.objects.get(current='y') except Exception as e: return ret_message('No season set, see an admin.', True, app_url + self.endpoint, self.request.user.id, e) try: current_event = Event.objects.get( Q(season=current_season) & Q(current='y')) except Exception as e: return ret_message('No event set, see an admin.', True, app_url + self.endpoint, self.request.user.id, e) if not allowed_file(file.name): return ret_message('Invalid file type.', True, app_url + self.endpoint, self.request.user.id) try: sp = ScoutPit.objects.get( Q(event=current_event) & Q(team_no_id=team_no) & Q(void_ind='n')) if sp.img_id: response = cloudinary.uploader.upload(file, public_id=sp.img_id) else: response = cloudinary.uploader.upload(file) sp.img_id = response['public_id'] sp.img_ver = str(response['version']) sp.save() except Exception as e: return ret_message('An error occurred while saving the image.', True, app_url + self.endpoint, self.request.user.id, e) return ret_message('Saved Image Successfully.') def post(self, request, format=None): if has_access(request.user.id, auth_obj): try: file_obj = request.FILES['file'] ret = self.save_file(file_obj, request.data.get('team_no', '')) return ret except Exception as e: return ret_message('An error occurred while saving robot picture.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class ResultsInit(APIView): """ API endpoint to get the teams who have already been scouted """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'results-init/' def get_teams(self): try: current_season = Season.objects.get(current='y') except Exception as e: return ret_message('No season set, see an admin.', True, app_url + self.endpoint, self.request.user.id, e) try: current_event = Event.objects.get( Q(season=current_season) & Q(current='y')) except Exception as e: return ret_message('No event set, see an admin.', True, app_url + self.endpoint, self.request.user.id, e) teams = [] try: teams = Team.objects.filter( Q(event=current_event) & Q(team_no__in=(list( ScoutPit.objects.filter(Q(event=current_event) & Q(void_ind='n')).values_list('team_no', flat=True)))) ).order_by('team_no') except Exception as e: x = 1 return teams def get(self, request, format=None): if has_access(request.user.id, auth_obj) or has_access(request.user.id, auth_view_obj): try: req = self.get_teams() serializer = TeamSerializer(req, many=True) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while initializing.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class Results(APIView): """ API endpoint to get scout pit results for the selected teams """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'results/' def get_results(self, teams): return get_pit_results(teams, self.endpoint, self.request) def post(self, request, format=None): if has_access(request.user.id, auth_obj) or has_access(request.user.id, auth_view_obj): try: serializer = TeamSerializer(data=request.data, many=True) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, serializer.errors) ret = self.get_results(serializer.data) if type(ret) == Response: return ret serializer = ScoutPitResultsSerializer(ret, many=True) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while getting pit results.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) def get_pit_results(teams, endpoint, request): try: current_season = Season.objects.get(current='y') except Exception as e: return ret_message('No season set, see an admin.', True, app_url + endpoint, request.user.id, e) try: current_event = Event.objects.get( Q(season=current_season) & Q(current='y')) except Exception as e: return ret_message('No event set, see an admin', True, app_url + endpoint, request.user.id, e) results = [] for t in teams: if t.get('checked', True): team = Team.objects.get(team_no=t['team_no']) try: sp = ScoutPit.objects.get(Q(team_no_id=t['team_no']) & Q( event=current_event) & Q(void_ind='n')) except Exception as e: return ret_message('No pit data for team.', True, app_url + endpoint, request.user.id, e) spas = QuestionAnswer.objects.filter(Q(scout_pit=sp) & Q(void_ind='n') & Q(question__void_ind='n'))\ .order_by('question__order') tmp = { 'teamNo': team.team_no, 'teamNm': team.team_nm, 'pic': cloudinary.CloudinaryImage(sp.img_id, version=sp.img_ver).build_url(), } tmp_questions = [] try: eti = EventTeamInfo.objects.get(Q(event=current_event) & Q(team_no=team.team_no) & Q(void_ind='n')) tmp_questions.append({ 'question': 'Rank', 'answer': eti.rank }) except: x = 1 for spa in spas: tmp_questions.append({ 'question': spa.question.question, 'answer': spa.answer }) tmp['results'] = tmp_questions results.append(tmp) return results class TeamData(APIView): """ API endpoint to get scout pit team data for an individual team, used to get the data for the scouting screen not results screen """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'team-data/' def get_questions(self, team_num): try: current_season = Season.objects.get(current='y') except Exception as e: return ret_message('No season set, see an admin.', True, app_url + self.endpoint, self.request.user.id, e) try: current_event = Event.objects.get( Q(season=current_season) & Q(current='y')) except Exception as e: return ret_message('No event set, see an admin', True, app_url + self.endpoint, self.request.user.id, e) sp = ScoutPit.objects.get(Q(team_no=team_num) & Q(void_ind='n') & Q(event=current_event)) scout_questions = [] # sqs = ScoutQuestion.objects.prefetch_related('questionoption_set').filter( # Q(season=current_season) & Q(sq_typ_id='pit') & Q(active='y') & Q(void_ind='n')).order_by('order') sqs = Question.objects\ .prefetch_related(Prefetch('questionoption_set'), Prefetch('questionanswer_set', queryset=QuestionAnswer.objects.filter(Q(scout_pit=sp)).select_related('question')))\ .filter(Q(season=current_season) & Q(form_typ_id='pit') & Q(active='y') & Q(void_ind='n')).order_by('order') for sq in sqs: try: spa = QuestionAnswer.objects.get( Q(scout_pit=sp) & Q(question=sq)) except Exception as e: spa = QuestionAnswer(answer='') scout_questions.append({ 'question_id': sq.question_id, 'season_id': sq.season_id, 'question': sq.question, 'order': sq.order, 'active': sq.active, 'question_typ': sq.question_typ.question_typ if sq.question_typ is not None else None, 'question_typ_nm': sq.question_typ.question_typ_nm if sq.question_typ is not None else None, 'form_sub_typ': sq.form_sub_typ.form_sub_typ if sq.form_sub_typ is not None else None, 'form_sub_nm': sq.form_sub_typ.form_sub_nm if sq.form_sub_typ is not None else None, 'form_typ': sq.form_typ, 'questionoption_set': sq.questionoption_set, 'answer': spa.answer }) return {'questions': scout_questions, 'pic': cloudinary.CloudinaryImage(sp.img_id, version=sp.img_ver).build_url()} def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: req = self.get_questions( request.query_params.get('team_num', None)) serializer = PitTeamDataSerializer(req,) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while getting team data.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) def allowed_file(filename): """Returns whether a filename's extension indicates that it is an image. :param str filename: A filename. :return: Whether the filename has an recognized image file extension :rtype: bool""" return '.' in filename and \ filename.rsplit('.', 1)[1].lower() in {'png', 'jpg', 'jpeg', 'gif'} <file_sep>/scouting/matchplanning/urls.py from django.urls import path from .views import * urlpatterns = [ path('init/', Init.as_view()), path('save-note/', SaveNote.as_view()), path('plan-match/', PlanMatch.as_view()), path('load-team-notes/', LoadTeamNotes.as_view()), ] <file_sep>/scouting/admin/serializers.py from rest_framework import serializers from scouting.models import Team, Event, ScoutFieldSchedule class SeasonSerializer(serializers.Serializer): season_id = serializers.IntegerField(read_only=True) season = serializers.CharField() current = serializers.CharField() class TeamSerializer(serializers.Serializer): team_no = serializers.IntegerField() team_nm = serializers.CharField() checked = serializers.BooleanField(required=False) class TeamCheckedSerializer(serializers.Serializer): team_no = serializers.IntegerField() team_nm = serializers.CharField() # this is bc I need a default checked team serializer checked = serializers.BooleanField(default=True) class TeamCreateSerializer(serializers.Serializer): team_no = serializers.CharField() team_nm = serializers.CharField() void_ind = serializers.CharField(default='n') def create(self, validated_data): t = Team(team_no=validated_data['team_no'], team_nm=validated_data['team_nm'], void_ind=validated_data['void_ind']) t.save() return t ''' class EventSerializer(serializers.ModelSerializer): # TODO Why did i do this??????? Why is there only one team here? team_no = TeamSerializer(required=False) checked = serializers.BooleanField(required=False) class Meta: model = Event fields = '__all__' ''' class EventSerializer(serializers.Serializer): def update(self, instance, validated_data): pass def create(self, validated_data): e = Event(season_id=validated_data['season_id'], event_nm=validated_data['event_nm'], date_st=validated_data['date_st'], event_cd=validated_data['event_cd'], event_url=validated_data.get('event_url', None), address=validated_data['address'], city=validated_data['city'], state_prov=validated_data['state_prov'], postal_code=validated_data['postal_code'], location_name=validated_data['location_name'], gmaps_url=validated_data.get('gmaps_url', None), webcast_url=validated_data.get('webcast_url', None), date_end=validated_data['date_end'], timezone=validated_data['timezone'], current=validated_data['current'], competition_page_active=validated_data['competition_page_active'], void_ind=validated_data['void_ind']) e.save() return e event_id = serializers.IntegerField(required=False) season_id = serializers.IntegerField() event_nm = serializers.CharField() date_st = serializers.DateTimeField() date_end = serializers.DateTimeField() event_cd = serializers.CharField() event_url = serializers.CharField(required=False) address = serializers.CharField() city = serializers.CharField() state_prov = serializers.CharField() postal_code = serializers.CharField() location_name = serializers.CharField() gmaps_url = serializers.CharField(required=False) webcast_url = serializers.CharField(required=False) timezone = serializers.CharField() current = serializers.CharField() competition_page_active = serializers.CharField() void_ind = serializers.CharField(default='n') """ class EventCreateSerializer(serializers.ModelSerializer): team_no = serializers.ListField(required=False) class Meta: model = Event fields = '__all__' """ class EventTeamSerializer(serializers.Serializer): event_id = serializers.IntegerField() season_id = serializers.IntegerField(read_only=True) event_nm = serializers.CharField() date_st = serializers.DateTimeField() date_end = serializers.DateTimeField() event_cd = serializers.CharField() event_url = serializers.CharField(allow_null=True) address = serializers.CharField() city = serializers.CharField() state_prov = serializers.CharField() postal_code = serializers.CharField() location_name = serializers.CharField() gmaps_url = serializers.CharField(allow_null=True) webcast_url = serializers.CharField(allow_null=True) timezone = serializers.CharField() current = serializers.CharField() competition_page_active = serializers.CharField() team_no = TeamCheckedSerializer(required=False, many=True) """ class CompetitionLevelSerializer(serializers.ModelSerializer): class Meta: model = CompetitionLevel fields = '__all__' class MatchSerializer(serializers.ModelSerializer): comp_level = CompetitionLevelSerializer(read_only=True) class Meta: model = Match fields = '__all__' """ class QuestionTypeSerializer(serializers.Serializer): question_typ = serializers.CharField() question_typ_nm = serializers.CharField() class PermissionSerializer(serializers.Serializer): id = serializers.IntegerField(read_only=True) name = serializers.CharField() content_type_id = serializers.IntegerField(read_only=True) codename = serializers.CharField() class GroupSerializer(serializers.Serializer): id = serializers.IntegerField(read_only=True) name = serializers.CharField() permissions = PermissionSerializer(many=True, required=False) class PhoneTypeSerializer(serializers.Serializer): phone_type_id = serializers.IntegerField(required=False, allow_null=True) carrier = serializers.CharField() phone_type = serializers.CharField() class UserSerializer(serializers.Serializer): id = serializers.IntegerField(read_only=True) username = serializers.CharField() email = serializers.CharField() first_name = serializers.CharField() last_name = serializers.CharField() is_active = serializers.BooleanField() phone = serializers.CharField(required=False, allow_null=True) discord_user_id = serializers.CharField(required=False, allow_null=True) groups = GroupSerializer(many=True, required=False) phone_type = PhoneTypeSerializer(required=False, allow_null=True) phone_type_id = serializers.IntegerField(required=False, allow_null=True) class ScoutFieldScheduleSerializer(serializers.Serializer): scout_field_sch_id = serializers.IntegerField() event_id = serializers.IntegerField(read_only=True) st_time = serializers.DateTimeField() end_time = serializers.DateTimeField() notification1 = serializers.BooleanField(read_only=True) notification2 = serializers.BooleanField(read_only=True) notification3 = serializers.BooleanField(read_only=True) red_one_id = UserSerializer(required=False, allow_null=True, read_only=True) red_two_id = UserSerializer(required=False, allow_null=True, read_only=True) red_three_id = UserSerializer(required=False, allow_null=True, read_only=True) blue_one_id = UserSerializer(required=False, allow_null=True, read_only=True) blue_two_id = UserSerializer(required=False, allow_null=True, read_only=True) blue_three_id = UserSerializer( required=False, allow_null=True, read_only=True) scouts = serializers.CharField(read_only=True) class ScoutFieldScheduleSaveSerializer(serializers.Serializer): def create(self, validated_data): sfs = ScoutFieldSchedule(event_id=validated_data['event_id'], st_time=validated_data['st_time'], end_time=validated_data['end_time'], red_one_id=validated_data.get('red_one_id', None), red_two_id=validated_data.get('red_two_id', None), red_three_id=validated_data.get('red_three_id', None), blue_one_id=validated_data.get('blue_one_id', None), blue_two_id=validated_data.get('blue_two_id', None), blue_three_id=validated_data.get('blue_three_id', None), void_ind=validated_data['void_ind']) sfs.save() return sfs scout_field_sch_id = serializers.IntegerField(required=False, allow_null=True) event_id = serializers.IntegerField() st_time = serializers.DateTimeField() end_time = serializers.DateTimeField() notified = serializers.CharField(default='n') red_one_id = serializers.IntegerField(allow_null=True) red_two_id = serializers.IntegerField(allow_null=True) red_three_id = serializers.IntegerField(allow_null=True) blue_one_id = serializers.IntegerField(allow_null=True) blue_two_id = serializers.IntegerField(allow_null=True) blue_three_id = serializers.IntegerField(allow_null=True) void_ind = serializers.CharField(default='n') class ScoutPitScheduleSerializer(serializers.Serializer): scout_pit_sch_id = serializers.IntegerField(read_only=True) user_id = serializers.IntegerField() event_id = serializers.IntegerField(read_only=True) st_time = serializers.DateTimeField() end_time = serializers.DateTimeField() notified = serializers.CharField() class FormTypeSerializer(serializers.Serializer): form_typ = serializers.CharField(read_only=True) form_nm = serializers.CharField() class FormSubTypeSerializer(serializers.Serializer): form_sub_typ = serializers.CharField() form_sub_nm = serializers.CharField() form_typ_id = serializers.CharField() class QuestionOptionsSerializer(serializers.Serializer): question_opt_id = serializers.IntegerField(required=False, allow_null=True) question_id = serializers.IntegerField(read_only=True) option = serializers.CharField() active = serializers.CharField() class QuestionSerializer(serializers.Serializer): question_id = serializers.IntegerField(required=False, allow_null=True) season_id = serializers.IntegerField(read_only=True) question = serializers.CharField() order = serializers.IntegerField() active = serializers.CharField() question_typ = serializers.CharField() question_typ_nm = serializers.CharField(required=False, allow_blank=True) form_sub_typ = serializers.CharField(required=False, allow_blank=True, allow_null=True) form_sub_nm = serializers.CharField(required=False, allow_blank=True, allow_null=True) form_typ = serializers.CharField() display_value = serializers.CharField(read_only=True) questionoptions_set = QuestionOptionsSerializer( required=False, allow_null=True, many=True) answer = serializers.CharField(required=False, allow_null=True, allow_blank=True) class ScoutAdminQuestionInitSerializer(serializers.Serializer): questionTypes = QuestionTypeSerializer(many=True) scoutQuestionSubTypes = FormSubTypeSerializer( many=True, required=False) scoutQuestions = QuestionSerializer(many=True) class EventToTeamsSerializer(serializers.Serializer): event_id = serializers.IntegerField() teams = TeamSerializer(many=True) class InitSerializer(serializers.Serializer): seasons = SeasonSerializer(many=True) events = EventTeamSerializer(many=True) currentSeason = SeasonSerializer(required=False) currentEvent = EventSerializer(required=False) users = UserSerializer(many=True) userGroups = GroupSerializer(many=True) phoneTypes = PhoneTypeSerializer(many=True) fieldSchedule = ScoutFieldScheduleSerializer(many=True) # pitSchedule = ScoutPitScheduleSerializer(many=True) # pastSchedule = ScoutScheduleSerializer(many=True) scoutQuestionType = FormTypeSerializer(many=True) teams = TeamSerializer(many=True) <file_sep>/admin/views.py from rest_framework_simplejwt.authentication import JWTAuthentication from rest_framework.permissions import IsAuthenticated from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage from user.models import PhoneType from .serializers import ErrorLogSerializer, InitSerializer, SaveUserSerializer from .models import ErrorLog from rest_framework.views import APIView from general.security import has_access, ret_message from django.contrib.auth.models import Group from django.db.models.functions import Lower from rest_framework.response import Response from django.db.models import Q from user.models import User auth_obj = 55 auth_obj_save_user = 56 app_url = 'admin/' class Init(APIView): """ API endpoint to get all the init values for the admin screen """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'init/' def init(self): users = User.objects.filter( Q(date_joined__isnull=False)).order_by(Lower('first_name'), Lower('last_name')) user_groups = Group.objects.all().order_by('name') phone_types = PhoneType.objects.all().order_by('carrier') return {'users': users, 'userGroups': user_groups, 'phoneTypes': phone_types} def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: req = self.init() serializer = InitSerializer(req) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while initializing.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class SaveUser(APIView): """API endpoint to save user data""" authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'save-user/' def save_user(self, data): try: groups = [] user = User.objects.get(username=data['user']['username']) user.first_name = data['user']['first_name'] user.last_name = data['user']['last_name'] user.email = data['user']['email'].lower() user.discord_user_id = data['user']['discord_user_id'] user.phone = data['user']['phone'] user.phone_type_id = data['user'].get('phone_type_id', None) user.is_active = data['user']['is_active'] user.save() for d in data['groups']: groups.append(d['name']) aug = user.groups.filter(name=d['name']).exists() if not aug: group = Group.objects.get(name=d['name']) user.groups.add(group) user_groups = user.groups.all() user_groups = user_groups.filter(~Q(name__in=groups)) for user_group in user_groups: user_group.user_set.remove(user) return ret_message('Saved user successfully') except Exception as e: return ret_message('Can\'t save the user', True, app_url + self.endpoint, self.request.user.id, e) def post(self, request, format=None): serializer = SaveUserSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, serializer.errors) if has_access(request.user.id, auth_obj_save_user): try: req = self.save_user(serializer.validated_data) return req except Exception as e: return ret_message('An error occurred while saving the user.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class ErrorLogView(APIView): """ API endpoint to get errors for the admin screen """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'error-log/' def get_errors(self, pg): errors = ErrorLog.objects.filter(void_ind='n').order_by('-time') paginator = Paginator(errors, 10) try: errors = paginator.page(pg) except PageNotAnInteger: # If page is not an integer, deliver first page. errors = paginator.page(1) except EmptyPage: # If page is out of range (e.g. 9999), # deliver last page of results. errors = paginator.page(paginator.num_pages) previous_pg = None if not errors.has_previous() else errors.previous_page_number() next_pg = None if not errors.has_next() else errors.next_page_number() serializer = ErrorLogSerializer(errors, many=True) data = {'count': paginator.num_pages, 'previous': previous_pg, 'next': next_pg, 'errors': serializer.data} return data def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: req = self.get_errors(request.query_params.get('pg_num', 1)) return Response(req) except Exception as e: return ret_message('An error occurred while getting errors.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) <file_sep>/scouting/migrations/0005_schedule_and_event_team_info_event_field_Rename.py # Generated by Django 4.0.3 on 2023-03-19 19:28 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('scouting', '0004_schedule_and_event_team_info'), ] operations = [ migrations.RenameField( model_name='eventteaminfo', old_name='event_id', new_name='event', ), ] <file_sep>/sponsoring/util.py from datetime import datetime import cloudinary import cloudinary.uploader import cloudinary.api import pytz from django.db.models import Q, Sum from django.db.models.functions import Lower from sponsoring.models import Item, Sponsor, ItemSponsor def get_items(): items = Item.objects.filter(void_ind='n').order_by(Lower('item_nm')) ret = [] for i in items: purchased = i.itemsponsor_set.filter(Q(void_ind='n') & Q(time__gte=i.reset_date)).aggregate(Sum('quantity')) purchased = purchased.get('quantity__sum', 0) purchased = purchased if purchased is not None else 0 ret.append({ 'item_id': i.item_id, 'item_nm': i.item_nm, 'item_desc': i.item_desc, 'quantity': i.quantity, 'sponsor_quantity': purchased, 'reset_date': i.reset_date, 'active': i.active, 'img_url': cloudinary.CloudinaryImage(i.img_id, version=i.img_ver).build_url() }) return ret def get_sponsors(): sponsors = Sponsor.objects.filter(void_ind='n').order_by(Lower('sponsor_nm')) return sponsors def save_sponsor(sponsor): if sponsor.get('sponsor_id', None) is not None: s = Sponsor.objects.get(sponsor_id=sponsor['sponsor_id']) s.sponsor_nm = sponsor['sponsor_nm'] s.phone = sponsor['phone'] s.email = sponsor['email'] else: s = Sponsor(sponsor_nm=sponsor['sponsor_nm'], phone=sponsor['phone'], email=sponsor['email'], void_ind='n') s.save() return s def save_item(item): if item.get('item_id', None) is not None: i = Item.objects.get(item_id=item['item_id']) i.item_nm = item['item_nm'] i.item_desc = item['item_desc'] i.quantity = item['quantity'] i.reset_date = item['reset_date'] else: i = Item(item_nm=item['item_nm'], item_desc=item['item_desc'], quantity=item['quantity'], reset_date=item['reset_date'], void_ind='n') i.save() if item.get('img', None) is not None: if i.img_id: response = cloudinary.uploader.upload(item['img'], public_id=i.img_id) else: response = cloudinary.uploader.upload(item['img']) i.img_id = response['public_id'] i.img_ver = str(response['version']) i.save() return i def save_item_sponsor(item_sponsor): if item_sponsor.get('item_sponsor_id', None) is not None: i = ItemSponsor.objects.get(item_sponsor_id=item_sponsor['item_sponsor_id']) i.item_id.item_id = item_sponsor['item_id'] i.sponsor_id.sponsor_id = item_sponsor['sponsor_id'] i.quantity = item_sponsor['quantity'] else: i = ItemSponsor(item_id_id=item_sponsor['item_id'], sponsor_id_id=item_sponsor['sponsor_id'], quantity=item_sponsor['quantity'], void_ind='n') i.save() return i def save_sponsor_order(sponsor_order): s = save_sponsor(sponsor_order['sponsor']) for i in sponsor_order['items']: item_sponsor = { 'item_id': i['item_id'], 'sponsor_id': s.sponsor_id, 'quantity': i['sponsor_quantity'] } save_item_sponsor(item_sponsor) <file_sep>/admin/serializers.py from rest_framework import serializers from .models import ErrorLog class PermissionSerializer(serializers.Serializer): id = serializers.IntegerField(read_only=True) name = serializers.CharField() content_type_id = serializers.IntegerField(read_only=True) codename = serializers.CharField() class GroupSerializer(serializers.Serializer): id = serializers.IntegerField(read_only=True) name = serializers.CharField() permissions = PermissionSerializer(many=True, required=False) class PhoneTypeSerializer(serializers.Serializer): phone_type_id = serializers.IntegerField(read_only=True) carrier = serializers.CharField() phone_type = serializers.CharField() class UserSerializer(serializers.Serializer): id = serializers.IntegerField(read_only=True) username = serializers.CharField() email = serializers.CharField() first_name = serializers.CharField() last_name = serializers.CharField() is_active = serializers.BooleanField() phone = serializers.CharField(required=False, allow_null=True) discord_user_id = serializers.CharField(required=False, allow_null=True) groups = GroupSerializer(many=True, required=False) phone_type = PhoneTypeSerializer(required=False, allow_null=True) phone_type_id = serializers.IntegerField(required=False, allow_null=True) class InitSerializer(serializers.Serializer): users = UserSerializer(many=True) userGroups = GroupSerializer(many=True) phoneTypes = PhoneTypeSerializer(many=True) class SaveUserSerializer(serializers.Serializer): user = UserSerializer() groups = GroupSerializer(many=True) class ErrorLogSerializer(serializers.Serializer): error_log_id = serializers.IntegerField(read_only=True) path = serializers.CharField() message = serializers.CharField() exception = serializers.CharField() time = serializers.DateTimeField() user = UserSerializer(required=False) <file_sep>/sponsoring/urls.py from django.urls import path from sponsoring.views import GetItems, GetSponsors, SaveSponsor, SaveItem, SaveSponsorOrder urlpatterns = [ path('get-items/', GetItems.as_view()), path('get-sponsors/', GetSponsors.as_view()), path('save-sponsor/', SaveSponsor.as_view()), path('save-item/', SaveItem.as_view()), path('save-sponsor-order/', SaveSponsorOrder.as_view()), ] <file_sep>/scouting/admin/urls.py from django.urls import include from django.urls import path from rest_framework import routers from .views import * # Wire up our API using atomic URL routing. # Additionally, we include login URLs for the browsable API. urlpatterns = [ path('init/', Init.as_view()), path('sync-season/', SyncSeason.as_view()), path('set-season/', SetSeason.as_view()), path('toggle-competition-page/', ToggleCompetitionPage.as_view()), path('sync-matches/', SyncMatches.as_view()), path('sync-event-team-info/', SyncEventTeamInfo.as_view()), path('add-season/', AddSeason.as_view()), path('delete-season/', DeleteSeason.as_view()), path('add-event/', AddEvent.as_view()), path('add-team/', AddTeam.as_view()), path('delete-event/', DeleteEvent.as_view()), path('add-team-to-event/', AddTeamToEvent.as_view()), path('remove-team-to-event/', RemoveTeamToEvent.as_view()), #path('scout-question-init/', QuestionInit.as_view()), #path('save-scout-question/', SaveScoutQuestion.as_view()), #path('update-scout-question/', UpdateScoutQuestion.as_view()), #path('toggle-scout-question/', ToggleScoutQuestion.as_view()), #path('toggle-option/', ToggleOption.as_view()), path('save-scout-field-schedule-entry/', SaveScoutFieldScheduleEntry.as_view()), path('notify-users/', NotifyUsers.as_view()), path('save-phone-type/', SavePhoneType.as_view()), ] <file_sep>/user/migrations/0003_alter_user_last_login.py # Generated by Django 4.0.3 on 2022-04-06 20:56 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('user', '0002_user_groups_user_user_permissions'), ] operations = [ migrations.AlterField( model_name='user', name='last_login', field=models.DateTimeField(default=None, null=True), ), ] <file_sep>/form/views.py from django.db import transaction from django.db.models import Q from rest_framework.response import Response from rest_framework.permissions import IsAuthenticated from rest_framework.views import APIView from rest_framework_simplejwt.authentication import JWTAuthentication import form.util from form.models import Question, QuestionAnswer from form.serializers import QuestionSerializer, SaveResponseSerializer, SaveScoutSerializer, \ QuestionInitializationSerializer from general.security import has_access, ret_message from scouting.models import Event, Season, ScoutField, ScoutPit auth_obj = 50 app_url = 'form/' class GetQuestions(APIView): """ API endpoint to init form editor """ endpoint = 'get-questions/' def get(self, request, format=None): try: questions = form.util.get_questions(request.query_params['form_typ']) serializer = QuestionSerializer(questions, many=True) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while getting questions.', True, app_url + self.endpoint, request.user.id, e) class GetFormInit(APIView): """ API endpoint to init form editor """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'form-init/' def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: questions = form.util.get_questions(request.query_params['form_typ']) question_types = form.util.get_question_types() form_sub_types = form.util.get_form_sub_types(request.query_params['form_typ']) serializer = QuestionInitializationSerializer({ "questions": questions, "question_types": question_types, "form_sub_types": form_sub_types, }) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while initializing.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class SaveQuestion(APIView): """API endpoint to save new questions""" authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'save-question/' def post(self, request, format=None): serializer = QuestionSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, serializer.errors) if has_access(request.user.id, auth_obj): try: with transaction.atomic(): form.util.save_question(serializer.validated_data) return ret_message('Saved question successfully.') except Exception as e: return ret_message('An error occurred while saving the question.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class SaveAnswers(APIView): """ API endpoint to save scout field answers """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'save-answers/' def post(self, request, format=None): success_msg = 'Response saved successfully' if has_access(request.user.id, auth_obj): try: try: current_event = Event.objects.get( Q(season=Season.objects.get(current='y')) & Q(current='y')) except Exception as e: raise Exception('No event set, see an admin') serializer = SaveScoutSerializer(data=request.data) if serializer.is_valid(): with transaction.atomic(): if (serializer.data['form_typ'] == 'field'): sf = ScoutField( event=current_event, team_no_id=serializer.data['team'], match_id=serializer.data.get('match', None), user_id=self.request.user.id, void_ind='n') sf.save() for d in serializer.data.get('question_answers', []): form.util.save_question_answer(d['answer'], Question.objects.get(question_id=d['question_id']), scout_field=sf) else: try: sp = ScoutPit.objects.get(Q(team_no_id=serializer.data['team']) & Q(void_ind='n') & Q(event=current_event)) except Exception as e: sp = ScoutPit(event=current_event, team_no_id=serializer.data['team'], user_id=self.request.user.id, void_ind='n') sp.save() for d in serializer.data.get('question_answers', []): try: spa = QuestionAnswer.objects.get(Q(scout_pit=sp) & Q(question_id=d['question_id']) & Q(void_ind='n')) spa.answer = d.get('answer', '') except Exception as e: form.util.save_question_answer(d.get('answer', ''), Question.objects.get(question_id=d['question_id']), scout_pit=sp) return ret_message(success_msg) serializer = SaveResponseSerializer(data=request.data) if serializer.is_valid(): with transaction.atomic(): r = form.models.Response(form_typ_id=serializer.data['form_typ']) r.save() for d in serializer.data.get('question_answers', []): form.util.save_question_answer(d['answer'], Question.objects.get(question_id=d['question_id']), response=r) return ret_message(success_msg) return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, serializer.errors) except Exception as e: return ret_message('An error occurred while saving answers.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) <file_sep>/alerts/migrations/0001_initial.py # Generated by Django 4.2 on 2023-05-01 14:14 from django.conf import settings from django.db import migrations, models import django.db.models.deletion import django.utils.timezone class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Alert', fields=[ ('alert_id', models.AutoField(primary_key=True, serialize=False)), ('alert_subject', models.CharField(max_length=255)), ('alert_body', models.CharField(max_length=4000)), ('staged_time', models.DateTimeField(default=django.utils.timezone.now)), ('void_ind', models.CharField(default='n', max_length=1)), ('user', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='AlertCommunicationChannelType', fields=[ ('alert_comm_typ', models.CharField(max_length=50, primary_key=True, serialize=False)), ('alert_comm_nm', models.CharField(max_length=255)), ('void_ind', models.CharField(default='n', max_length=1)), ], ), migrations.CreateModel( name='AlertChannelSend', fields=[ ('alert_channel_send_id', models.AutoField(primary_key=True, serialize=False)), ('sent_time', models.DateTimeField(null=True)), ('void_ind', models.CharField(default='n', max_length=1)), ('alert_comm_typ', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='alerts.alertcommunicationchanneltype')), ('alert_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='alerts.alert')), ], ), ] <file_sep>/public/competition/views.py from rest_framework.views import APIView from django.db.models import Q from rest_framework.response import Response from .serializers import CompetitionInformationSerializer from scouting.models import Event, Match, Team from general.security import ret_message # Create your views here. app_url = 'public/competition/' class Init(APIView): """API endpoint to tell the frontend if the competition page is active and its information""" endpoint = 'init/' def get_competition_information(self): try: event = Event.objects.get(Q(current='y') & Q( competition_page_active='y') & Q(void_ind='n')) team3492 = Team.objects.get(team_no=3492) matches = Match.objects.filter(Q(event=event) & Q(void_ind='n') & Q(Q(red_one=team3492) | Q(red_two=team3492) | Q( red_three=team3492) | Q(blue_one=team3492) | Q(blue_two=team3492) | Q(blue_three=team3492))).order_by('comp_level__comp_lvl_order', 'match_number') except Exception as e: event = None matches = None return {'event': event, 'matches': matches} def get(self, request, format=None): try: req = self.get_competition_information() serializer = CompetitionInformationSerializer(req) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while getting competition page information.', True, app_url + self.endpoint, exception=e) <file_sep>/alerts/urls.py from django.urls import include, path from alerts.views import RunAlerts, SendAlerts, DismissAlert urlpatterns = [ path('run/', RunAlerts.as_view()), path('send/', SendAlerts.as_view()), path('dismiss/', DismissAlert.as_view()), ] <file_sep>/scouting/portal/urls.py from django.urls import path from .views import Init, SaveScheduleEntry, NotifyUser # Wire up our API using atomic URL routing. # Additionally, we include login URLs for the browsable API. urlpatterns = [ path('init/', Init.as_view()), path('save-schedule-entry/', SaveScheduleEntry.as_view()), path('notify-user/', NotifyUser.as_view()) ] <file_sep>/alerts/util.py import datetime import django import pytz from django.db.models import Q, ExpressionWrapper, DurationField, F from alerts.models import Alert, AlertChannelSend, AlertCommunicationChannelType from general import send_message from general.security import ret_message from scouting.models import Event, ScoutFieldSchedule, Schedule def stage_all_field_schedule_alerts(): message = '' event = Event.objects.get(Q(current='y') & Q(void_ind='n')) curr_time = datetime.datetime.utcnow().astimezone(pytz.timezone(event.timezone)) sfss_15 = ScoutFieldSchedule.objects.annotate( diff=ExpressionWrapper(F('st_time') - curr_time, output_field=DurationField())) \ .filter(diff__lte=datetime.timedelta(minutes=17)) \ .filter(Q(event=event) & Q(notification1=False) & Q(void_ind='n')) sfss_5 = ScoutFieldSchedule.objects.annotate( diff=ExpressionWrapper(F('st_time') - curr_time, output_field=DurationField())) \ .filter(diff__lte=datetime.timedelta(minutes=7)) \ .filter(Q(event=event) & Q(notification2=False) & Q(void_ind='n')) sfss_now = ScoutFieldSchedule.objects.annotate( diff=ExpressionWrapper(F('st_time') - curr_time, output_field=DurationField())) \ .filter(diff__lt=datetime.timedelta(minutes=5)) \ .filter(Q(event=event) & Q(notification3=False) & Q(void_ind='n')) message += stage_field_schedule_alerts(1, sfss_15, event) message += stage_field_schedule_alerts(2, sfss_5, event) message += stage_field_schedule_alerts(3, sfss_now, event) if message == '': message = 'No notifications' return message def stage_field_schedule_alerts(notification, sfss, event): message = '' for sfs in sfss: date_st_utc = sfs.st_time.astimezone(pytz.utc) date_end_utc = sfs.end_time.astimezone(pytz.utc) date_st_local = date_st_utc.astimezone(pytz.timezone(event.timezone)) date_end_local = date_end_utc.astimezone(pytz.timezone(event.timezone)) date_st_str = date_st_local.strftime("%m/%d/%Y, %I:%M%p") date_end_str = date_end_local.strftime("%m/%d/%Y, %I:%M%p") warning_text = '' match notification: case 1: sfs.notification1 = True warning_text = '15 minute warning' case 2: sfs.notification2 = True warning_text = '5 minute warning' case 3: sfs.notification3 = True warning_text = 'time to scout!' subject = 'Scouting ' + warning_text body = f'You are scheduled to scout from: {date_st_str} to {date_end_str}.\n- PARTs' success_txt = 'Stage scouting alert: ' fail_txt = 'Phone Unable to stage scouting alert: ' staged_alerts = [] try: staged_alerts.append(stage_alert(sfs.red_one, subject, body)) message += success_txt + sfs.red_one.first_name + '\n' except Exception as e: message += fail_txt + (sfs.red_one.first_name if sfs.red_one is not None else "red one") + '\n' try: staged_alerts.append(stage_alert(sfs.red_two, subject, body)) message += success_txt + sfs.red_two.first_name + '\n' except Exception as e: message += fail_txt + (sfs.red_two.first_name if sfs.red_two is not None else "red two") + '\n' try: staged_alerts.append(stage_alert(sfs.red_three, subject, body)) message += success_txt + sfs.red_three.first_name + '\n' except Exception as e: message += fail_txt + (sfs.red_three.first_name if sfs.red_three is not None else "red three") + '\n' try: staged_alerts.append(stage_alert(sfs.blue_one, subject, body)) message += success_txt + sfs.blue_one.first_name + '\n' except Exception as e: message += fail_txt + (sfs.blue_one.first_name if sfs.blue_one is not None else "blue one") + '\n' try: staged_alerts.append(stage_alert(sfs.blue_two, subject, body)) message += success_txt + sfs.blue_two.first_name + '\n' except Exception as e: message += fail_txt + (sfs.blue_two.first_name if sfs.blue_two is not None else "blue two") + '\n' try: staged_alerts.append(stage_alert(sfs.blue_three, subject, body)) message += success_txt + sfs.blue_three.first_name + '\n' except Exception as e: message += fail_txt + (sfs.blue_three.first_name if sfs.blue_three is not None else "blue three") + '\n' for sa in staged_alerts: accts = AlertCommunicationChannelType.objects.filter( Q(void_ind='n') & ~Q(alert_comm_typ__in=['message', 'email'])) for acct in accts: stage_alert_channel_send(sa, acct.alert_comm_typ) sfs.save() return message def stage_schedule_alerts(): message = '' event = Event.objects.get(Q(current='y') & Q(void_ind='n')) curr_time = datetime.datetime.utcnow().astimezone(pytz.timezone(event.timezone)) schs = Schedule.objects.annotate( diff=ExpressionWrapper(F('st_time') - curr_time, output_field=DurationField())) \ .filter(diff__lte=datetime.timedelta(minutes=6)) \ .filter(Q(event=event) & Q(notified=False) & Q(void_ind='n')).order_by('sch_typ__sch_typ', 'st_time') staged_alerts = [] for sch in schs: date_st_utc = sch.st_time.astimezone(pytz.utc) date_end_utc = sch.end_time.astimezone(pytz.utc) date_st_local = date_st_utc.astimezone(pytz.timezone(event.timezone)) date_end_local = date_end_utc.astimezone(pytz.timezone(event.timezone)) date_st_str = date_st_local.strftime("%m/%d/%Y, %I:%M%p") date_end_str = date_end_local.strftime("%m/%d/%Y, %I:%M%p") body = f'You are scheduled in the pit from: {date_st_str} to {date_end_str} for {sch.sch_typ.sch_nm}.\n- PARTs' staged_alerts.append(stage_alert(sch.user, 'Pit time!', body)) message += 'Pit Notified: ' + sch.user.first_name + ' : ' + sch.sch_typ.sch_nm + '\n' for sa in staged_alerts: accts = AlertCommunicationChannelType.objects.filter( Q(void_ind='n') & ~Q(alert_comm_typ__in=['message', 'email'])) for acct in accts: stage_alert_channel_send(sa, acct.alert_comm_typ) if message == '': message = 'No notifications' return message def stage_alert(user, alert_subject: str, alert_body: str): alert = Alert(user=user, alert_subject=alert_subject, alert_body=alert_body) alert.save() return alert def stage_alert_channel_send(alert, alert_comm_typ: str): acs = AlertChannelSend( alert_comm_typ=AlertCommunicationChannelType.objects.get( Q(alert_comm_typ=alert_comm_typ) & Q(void_ind='n')), alert=alert) acs.save() return acs def send_alerts(): message = '' acss = AlertChannelSend.objects.filter(Q(sent_time__isnull=True) & Q(void_ind='n')) for acs in acss: try: match acs.alert_comm_typ.alert_comm_typ: case 'email': send_message.send_email( acs.alert.user.email, acs.alert.alert_subject, 'generic_email', {'message': acs.alert.alert_body, 'user': acs.alert.user}) message += 'Email' case 'message': message += 'message not configured' case 'notification': send_message.send_webpush(acs.alert.user, acs.alert.alert_subject, acs.alert.alert_body, acs.alert.alert_id) message += 'Webpush' case 'txt': send_message.send_email( acs.alert.user.phone + acs.alert.user.phone_type.phone_type, acs.alert.alert_subject, 'generic_text', {'message': acs.alert.alert_body}) message += 'Phone' case 'discord': user = f'@<{acs.alert.user.discord_user_id}>' if acs.alert.user.discord_user_id else acs.alert.user.first_name + ' ' + acs.alert.user.last_name discord_message = acs.alert.alert_subject + ':\n' + user + '\n' + acs.alert.alert_body send_message.send_discord_notification(discord_message) message += 'Discord' acs.sent_time = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) acs.save() message += ' Notified: ' + acs.alert.user.first_name + ' acs id: ' + str(acs.alert_channel_send_id) + '\n' except Exception as e: alert = 'An error occurred while sending alert: ' + acs.alert.user.first_name + ' acs id: ' + str( acs.alert_channel_send_id) message += alert + '\n' return ret_message(alert, True, 'alerts.util.send_alerts', 0, e) if message is '': message = 'No notifications' return message def get_user_alerts(user_id: str, alert_comm_typ_id: str): acs = AlertChannelSend.objects.filter(Q(dismissed_time__isnull=True) & Q(alert_comm_typ_id=alert_comm_typ_id) & Q(void_ind='n') & Q(alert__user_id=user_id) & Q(alert__void_ind='n')).select_related('alert') notifs = [] for a in acs: notifs.append({ 'alert_id': a.alert.alert_id, 'alert_channel_send_id': a.alert_channel_send_id, 'alert_subject': a.alert.alert_subject, 'alert_body': a.alert.alert_body, 'staged_time': a.alert.staged_time }) return notifs def dismiss_alert(alert_channel_send_id: str, user_id: str): acs = AlertChannelSend.objects.get(Q(dismissed_time__isnull=True) & Q(void_ind='n') & Q(alert_channel_send_id=alert_channel_send_id) & Q(alert__user_id=user_id) & Q(alert__void_ind='n')) acs.dismissed_time = datetime.datetime.utcnow().replace(tzinfo=pytz.utc) acs.save() <file_sep>/sponsoring/migrations/0004_itemsponsor_void_ind.py # Generated by Django 4.2 on 2023-07-28 23:36 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('sponsoring', '0003_historicalitem_img_id_historicalitem_img_ver_and_more'), ] operations = [ migrations.AddField( model_name='itemsponsor', name='void_ind', field=models.CharField(default='n', max_length=1), ), ] <file_sep>/general/send_message.py import datetime import pytz import requests from django.core.mail import EmailMultiAlternatives from django.template.loader import get_template from django.conf import settings from webpush import send_user_notification def send_email(to_email: str, subject: str, template: str, cntx: dict): plaintext = get_template('./email_templates/' + template + '.txt') html = get_template('email_templates/' + template + '.html') text_content = plaintext.render(cntx) html_content = html.render(cntx) email = EmailMultiAlternatives( subject, text_content, '<EMAIL>', [to_email]) email.attach_alternative(html_content, "text/html") email.send() def send_discord_notification(message: str): url = settings.DISCORD_NOTIFICATION_WEBHOOK myobj = {'content': message} x = requests.post(url, json=myobj) if not x.ok: raise Exception('discord sending issue') def send_webpush(user, subject: str, body: str, alert_id: int): payload = {'notification': { 'title': subject, 'body': body, "icon": "assets/icons/icon-128x128.png", 'badge': 'badge', 'tag': alert_id, 'requireInteraction': True, 'silent': False, "vibrate": [200, 100, 200], 'timestamp': datetime.datetime.utcnow().replace(tzinfo=pytz.utc).isoformat(), "data": { # i believe this can be anything "dateOfArrival": datetime.datetime.utcnow().replace(tzinfo=pytz.utc).isoformat(), "primaryKey": 1 }, "actions": [{ "action": "explore", "title": "Go to the site" }, { "action": "field-scouting", "title": "Go to scouting", "icon": 'icon' } ] }} send_user_notification(user=user, payload=payload, ttl=1000) <file_sep>/scouting/urls.py from django.urls import include, path from . import views urlpatterns = [ path('portal/', include('scouting.portal.urls')), path('pit/', include('scouting.pit.urls')), path('field/', include('scouting.field.urls')), path('admin/', include('scouting.admin.urls')), path('match-planning/', include('scouting.matchplanning.urls')), ] <file_sep>/sponsoring/migrations/0002_historicalitem_active_historicalitem_reset_date_and_more.py # Generated by Django 4.2 on 2023-07-22 00:57 from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('sponsoring', '0001_initial'), ] operations = [ migrations.AddField( model_name='historicalitem', name='active', field=models.CharField(default='y', max_length=1), ), migrations.AddField( model_name='historicalitem', name='reset_date', field=models.DateField(default=django.utils.timezone.now), ), migrations.AddField( model_name='item', name='active', field=models.CharField(default='y', max_length=1), ), migrations.AddField( model_name='item', name='reset_date', field=models.DateField(default=django.utils.timezone.now), ), ] <file_sep>/alerts/migrations/0002_rename_alert_id_alertchannelsend_alert.py # Generated by Django 4.2 on 2023-05-01 19:06 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('alerts', '0001_initial'), ] operations = [ migrations.RenameField( model_name='alertchannelsend', old_name='alert_id', new_name='alert', ), ] <file_sep>/user/views.py import ast import datetime import cloudinary import webpush.views from django.contrib.auth import get_user_model from django.contrib.auth.backends import ModelBackend from django.contrib.auth.tokens import default_token_generator from django.db.models import Q from django.shortcuts import redirect from django.contrib.sites.shortcuts import get_current_site from django.utils.encoding import force_bytes from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode from pytz import timezone, utc from rest_framework_simplejwt.authentication import JWTAuthentication from rest_framework.permissions import IsAuthenticated from rest_framework.views import APIView from rest_framework_simplejwt.serializers import TokenObtainPairSerializer, TokenRefreshSerializer from webpush import send_user_notification import alerts.util from api.settings import AUTH_PASSWORD_VALIDATORS from .serializers import GroupSerializer, UserCreationSerializer, UserLinksSerializer, UserSerializer, \ UserUpdateSerializer, GetAlertsSerializer from .models import User, UserLinks from general.security import get_user_groups, get_user_permissions, ret_message from django.core.mail import send_mail from django.template.loader import render_to_string from django.core.exceptions import ObjectDoesNotExist import secrets from django.conf import settings from django.contrib.auth.tokens import default_token_generator from django.contrib.auth.password_validation import validate_password, get_default_password_validators from django.core.exceptions import ValidationError from rest_framework.response import Response from rest_framework_simplejwt import views as jwt_views app_url = 'user/' class TokenObtainPairView(APIView): """ API endpoint to get a user token """ endpoint = 'token/' def post(self, request, format=None): try: serializer = TokenObtainPairSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, 0, serializer.errors) return Response(serializer.validated_data) except Exception as e: return ret_message('An error occurred while logging in.', True, app_url + self.endpoint, 0, e) class TokenRefreshView(APIView): """ API endpoint to get a user token """ endpoint = 'token/refresh/' def post(self, request, format=None): try: serializer = TokenRefreshSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, 0, serializer.errors) return Response(serializer.validated_data) except Exception as e: return ret_message('An error occurred while refreshing token.', True, app_url + self.endpoint, 0, e) class UserLogIn(ModelBackend): def authenticate(self, request, **kwargs): UserModel = get_user_model() try: username = kwargs.get('username', None) user = UserModel.objects.get((Q(email=username) | Q(username=username)) & Q(is_active=True)) if user.check_password(kwargs.get('password', None)): return user except UserModel.DoesNotExist: return None return None class UserProfile(APIView): """ Handles registering new users and management of user profiles. """ endpoint = 'profile/' def post(self, request): try: serialized = UserCreationSerializer(data=request.data) if serialized.is_valid(): # user_confirm_hash = abs(hash(serialized.data.date_joined)) if serialized.data.get('password1', 't') != serialized.data.get('password2', 'y'): return ret_message('Passwords don\'t match.', True, app_url + self.endpoint, 0) user_data = serialized.validated_data try: user = User.objects.get( email=user_data.get('email').lower()) return ret_message('User already exists with that email.', True, app_url + self.endpoint, 0, user_data.get('email').lower()) except ObjectDoesNotExist as odne: x = 0 user = User(username=user_data.get('username').lower(), email=user_data.get('email').lower(), first_name=user_data.get('first_name'), last_name=user_data.get('last_name'), date_joined=datetime.datetime.utcnow().replace(tzinfo=utc)) # user = form.save(commit=False) user.is_active = False user.set_password(user_data.get('<PASSWORD>')) user.save() current_site = get_current_site(request) user_confirm_hash = abs(hash(user.date_joined)) cntx = { 'user': user, 'url': request.scheme + '://' + current_site.domain + '/user/confirm/?pk={}&confirm={}'.format( user.username, user_confirm_hash) } send_mail( subject="Activate your PARTs account.", message=render_to_string( "email_templates/acc_active_email.txt", cntx).strip(), html_message=render_to_string( "email_templates/acc_active_email.html", cntx).strip(), from_email="<EMAIL>", recipient_list=[user.email] ) return ret_message('User created') else: ed = serialized._errors.get('password1').get('password') error_list = ast.literal_eval(ed.title()) error_str = '' for e in error_list: error_str += '\n' + e return ret_message('An error occurred while creating user.' + error_str, True, app_url + self.endpoint, 0, serialized._errors) except Exception as e: error_string = str(e) if error_string == 'UNIQUE constraint failed: auth_user.username': error_string = 'A user with that username already exists.' else: error_string = None return ret_message( 'An error occurred while creating user.' + ('\n' + error_string if error_string is not None else ''), True, app_url + self.endpoint, exception=e) def put(self, request, pk=None): try: user = User.objects.get(id=self.request.user.id) if user.is_authenticated: if user is None: return ret_message('An error occurred while updating user data.', True, app_url + self.endpoint, 0) serializer = UserUpdateSerializer(data=request.data) # flag used to email user the user's old email about the change in the event that both the email and # password are updated password_changed = False if serializer.is_valid(): if "password" in serializer.validated_data: try: validate_password( serializer.validated_data["password"], user=request.user, password_validators=get_default_password_validators()) except ValidationError as e: return ret_message('An error occurred changing password.', True, app_url + self.endpoint, request.user.id, e) password_changed = True user.set_password(serializer.validated_data["password"]) cntx = {'user': user, 'message': 'Your password has been updated. If you did not do this, please secure your ' 'account by requesting a password reset as soon as possible.'} send_mail( subject="Password Change", message=render_to_string( 'email_templates/generic_email.txt', cntx).strip(), html_message=render_to_string( 'email_templates/generic_email.html', cntx).strip(), from_email='<EMAIL>', recipient_list=[user.email] ) if "email" in serializer.validated_data and user.email != serializer.validated_data["email"]: old_email = user.email user.email = serializer.validated_data["email"] user.save() # checks for db violations, unique constraints and such cntx = {'user': user, 'message': 'Your email has been updated to "{}", if you did not do this, please secure ' 'your account by changing your password as soon as possible.'.format( user.email)} send_mail( subject="Email Updated", message=render_to_string( 'email_templates/generic_email.txt', cntx).strip(), html_message=render_to_string( 'email_templates/generic_email.html', cntx).strip(), from_email='<EMAIL>', recipient_list=[user.email, old_email] ) if password_changed: cntx = {'user': user, 'message': 'Your password has been updated. If you did not do this, please secure ' 'your account by requesting a password reset as soon as possible.'} send_mail( subject="Password Changed", message=render_to_string( 'email_templates/generic_email.txt', cntx).strip(), html_message=render_to_string( 'email_templates/generic_email.html', cntx).strip(), from_email='<EMAIL>', recipient_list=[user.email] ) if "first_name" in serializer.validated_data: user.first_name = serializer.validated_data["first_name"] if "last_name" in serializer.validated_data: user.last_name = serializer.validated_data["last_name"] if "image" in serializer.validated_data: if user.img_id: response = cloudinary.uploader.upload(serializer.validated_data["image"], public_id=user.img_id) else: response = cloudinary.uploader.upload(serializer.validated_data["image"]) user.img_id = response['public_id'] user.img_ver = str(response['version']) if request.user.is_superuser: # only allow role editing if admin if "is_staff" in serializer.validated_data: user.is_staff = serializer.validated_data["is_staff"] if "is_active" in serializer.validated_data: user.is_active = serializer.validated_data["is_active"] if "is_superuser" in serializer.validated_data: user.is_superuser = serializer.validated_data["is_superuser"] user.save() return ret_message('Successfully updated user info.') else: return ret_message('An error occurred while updating user data.', True, app_url + self.endpoint, user.id, serializer.errors) else: return ret_message('Not authenticated.', True, app_url + self.endpoint) except Exception as e: error_string = str(e) if error_string == 'UNIQUE constraint failed: auth_user.username': error_string = 'A user with that username already exists.' else: error_string = None return ret_message( 'An error occurred while updating user.' + ('\n' + error_string if error_string is not None else ''), True, app_url + self.endpoint, exception=e) """ # there should be no path to this, but leave it here just in case def partial_update(self, request, pk=None): message = ResponseMessage("Not implemented", rep_status.success) return Response(data=message.jsonify(), status=status.HTTP_200_OK) def destroy(self, request, pk=None): self.check_object_permissions(request, self.queryset.get(id=pk)) message = ResponseMessage("Not implemented", rep_status.success) # TODO work out later return Response(data=message.jsonify(), status=status.HTTP_200_OK) def retrieve(self, request, pk=None): pk = int(pk) self.check_object_permissions(request, self.queryset.get(id=pk)) if request.user.is_superuser and (request.user.id != pk): # don't show admins the user's secrets. serializer = UserProfileSerializer else: serializer = CompleteUserProfileSerializer profile = self.queryset.get(id=pk) if profile is not None: serialized = serializer(instance=profile) return Response(data=serialized.data, status=status.HTTP_200_OK) else: return Response(ResponseMessage("User does not exist", rep_status.not_found).jsonify(), status=status.HTTP_404_NOT_FOUND) """ class UserEmailConfirmation(APIView): endpoint = 'confirm/' def get(self, request): try: req = self.confirm_email(request) return req except Exception as e: return ret_message('Failed to activate user\'s account.', True, app_url + self.endpoint, exception=e) def confirm_email(self, request, pk=None): """ Confirms the user's email by checking the user provided hash with the server calculated one. Allows user to login if successful. """ try: user = User.objects.get(username=request.GET.get('pk')) user_confirm_hash = abs(hash(user.date_joined)) if int(request.GET.get('confirm')) == user_confirm_hash: user.is_active = True user.save() return redirect(settings.FRONTEND_ADDRESS + "/login?page=activationConfirm") else: ret_message('An error occurred while confirming the user\'s account.', True, app_url + self.endpoint, user.id) return redirect(settings.FRONTEND_ADDRESS + "/login?page=activationFail") except ObjectDoesNotExist as o: ret_message( 'An error occurred while confirming the user\'s account.', True, app_url + self.endpoint, 0, o) return redirect(settings.FRONTEND_ADDRESS + "/login?page=activationFail") class UserEmailResendConfirmation(APIView): endpoint = 'confirm/resend/' def post(self, request): try: req = self.resend_confirmation_email(request) return req except Exception as e: return ret_message('Failed to resend user confirmation email.', True, app_url + self.endpoint, exception=e) def resend_confirmation_email(self, request): user = User.objects.get(email=request.data['email'].lower()) current_site = get_current_site(request) user_confirm_hash = abs(hash(user.date_joined)) cntx = { 'user': user, 'url': request.scheme + '://' + current_site.domain + '/user/confirm/?pk={}&confirm={}'.format( user.username, user_confirm_hash) } send_mail( subject="Activate your PARTs account.", message=render_to_string( "email_templates/acc_active_email.txt", cntx).strip(), html_message=render_to_string( "email_templates/acc_active_email.html", cntx).strip(), from_email="<EMAIL>", recipient_list=[user.email] ) return ret_message('If a matching user was found you will receive an email shortly.') class UserRequestPasswordReset(APIView): endpoint = 'request-reset-password/' def post(self, request): try: req = self.request_reset_password(request) return req except Exception as e: return ret_message('Failed to request password reset.', True, app_url + self.endpoint, exception=e) def request_reset_password(self, request): email = request.data.get('email') try: user = User.objects.get(email=email.lower()) if user.is_active: # if the user has confirmed their email user.reset_token = secrets.token_urlsafe(24) user.reset_requested_at = datetime.datetime.utcnow().replace(tzinfo=utc) user.save() cntx = { 'user': user, 'url': settings.FRONTEND_ADDRESS + 'login/?page=resetConfirm&uuid={}&token={}'.format( urlsafe_base64_encode(force_bytes(user.pk)), default_token_generator.make_token(user)) } send_mail( subject="Password Reset Requested", message=render_to_string( "email_templates/password_reset_email.txt", cntx).strip(), html_message=render_to_string( "email_templates/password_reset_email.html", cntx).strip(), from_email="<EMAIL>", recipient_list=[user.email] ) except Exception as e: ret_message('Failed user reset attempt', True, app_url + self.endpoint, exception=e) # regardless if we find a user or not, send back the same info. Prevents probing for user emails. return ret_message('If a matching user was found you will receive an email shortly.') class UserPasswordReset(APIView): endpoint = 'reset-password/' def post(self, request): try: req = self.reset_password(request) return req except Exception as e: return ret_message('Failed to reset password.', True, app_url + self.endpoint, exception=e) def reset_password(self, request): try: # username = request.data['username'] uuid = request.data['uuid'] token = request.data['token'] password = request.data['password'] user_id = urlsafe_base64_decode(uuid) user = User.objects.get(id=user_id) if token == None or uuid == None: # prevents return ret_message('Reset token required.', True, app_url + self.endpoint, exception=request.data['email']) # TODO Add time component back and ((user.reset_requested_at + timedelta(hours=1)) > timezone.now()): if (default_token_generator.check_token(user, token)): try: validate_password( password, user) except ValidationError as e: return ret_message('Password invalid' + str(e), True, app_url + self.endpoint, user.id, e) user.set_password(<PASSWORD>) user.save() return ret_message('Password updated successfully.') else: return ret_message('Invalid token or request timed out.', True, app_url + self.endpoint, user.id) except KeyError as e: e = str(e) e = e.strip("'") return ret_message(e + " missing from request but is required", True, app_url + self.endpoint, exception=e) class UserRequestUsername(APIView): endpoint = 'request-username/' def post(self, request): try: req = self.forgot_username(request) return req except Exception as e: return ret_message('Failed to request username.', True, app_url + self.endpoint, exception=e) def forgot_username(self, request): email = request.data.get('email') try: user = User.objects.get(email=email.lower()) if user.is_active: # if the user has confirmed their email cntx = { 'user': user } send_mail( subject="Username Requested", message=render_to_string( "email_templates/forgot_username.txt", cntx).strip(), html_message=render_to_string( "email_templates/forgot_username.html", cntx).strip(), from_email="<EMAIL>", recipient_list=[user.email] ) except Exception as e: ret_message('Failed to request username.', True, app_url + self.endpoint, exception=e) # regardless if we find a user or not, send back the same info. Prevents probing for user emails. return ret_message('If a matching user was found you will receive an email shortly.') class UserData(APIView): """ API endpoint """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'user-data/' def get_user(self): user = User.objects.get(id=self.request.user.id) user = { 'id': user.id, 'username': user.username, 'email': user.email, 'first_name': user.first_name, 'last_name': user.last_name, 'is_active': user.is_active, 'phone': user.phone, 'groups': user.groups, 'phone_type': user.phone_type, 'phone_type_id': user.phone_type_id, 'image': cloudinary.CloudinaryImage(user.img_id, version=user.img_ver).build_url() } return user def get(self, request, format=None): try: req = self.get_user() serializer = UserSerializer(req) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while getting user data.', True, app_url + self.endpoint, request.user.id, e) class UserLinksView(APIView): """ API endpoint to get links a user has based on permissions """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'user-links/' def get_links(self): permissions = get_user_permissions(self.request.user.id) user_links = UserLinks.objects.filter( permission__in=[per.id for per in permissions]).order_by('order') return user_links def get(self, request, format=None): try: req = self.get_links() serializer = UserLinksSerializer(req, many=True) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while getting user links.', True, app_url + self.endpoint, request.user.id, e) class UserGroups(APIView): """ API endpoint to get groups a user has based on permissions """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'user-groups/' def get_groups(self, user_id): return get_user_groups(user_id) def get(self, request, format=None): try: req = self.get_groups(request.query_params.get('user_id', None)) serializer = GroupSerializer(req, many=True) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while getting user groups.', True, app_url + self.endpoint, request.user.id, e) class Alerts(APIView): """ API endpoint to get a user's notifications """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'get-notifications/' def get(self, request, format=None): try: req = alerts.util.get_user_alerts(request.user.id, request.query_params.get('alert_comm_typ_id', None)) serializer = GetAlertsSerializer(req, many=True) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while getting user alerts.', True, app_url + self.endpoint, request.user.id, e) class SaveWebPushInfo(APIView): """ API endpoint to save a user push notification subscription object """ authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'webpush-save/' def post(self, request, format=None): try: response = webpush.views.save_info(request) if response.status_code == 400: return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, response) return ret_message('Successfully subscribed to push notifications.') except Exception as e: return ret_message('An error occurred while subscribing to push notifications.', True, app_url + self.endpoint, request.user.id, e) <file_sep>/form/migrations/0003_rename_qa_id_questionanswer_question_answer_id.py # Generated by Django 4.2 on 2023-06-30 00:20 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('form', '0002_rename_q_id_question_question_id_and_more'), ] operations = [ migrations.RenameField( model_name='questionanswer', old_name='qa_id', new_name='question_answer_id', ), ] <file_sep>/form/migrations/0012_alter_question_active_alter_questionoption_active.py # Generated by Django 4.2 on 2023-07-22 00:57 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('form', '0011_alter_questionanswer_answer'), ] operations = [ migrations.AlterField( model_name='question', name='active', field=models.CharField(default='y', max_length=1), ), migrations.AlterField( model_name='questionoption', name='active', field=models.CharField(default='y', max_length=1), ), ] <file_sep>/admin/migrations/0001_initial.py # Generated by Django 4.0.3 on 2022-04-06 06:40 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='ErrorLog', fields=[ ('error_log_id', models.AutoField(primary_key=True, serialize=False)), ('path', models.CharField(blank=True, max_length=255, null=True)), ('message', models.CharField(blank=True, max_length=1000, null=True)), ('exception', models.CharField(blank=True, max_length=4000, null=True)), ('time', models.DateTimeField()), ('void_ind', models.CharField(default='n', max_length=1)), ], ), ] <file_sep>/scripts/notify-users.sh #!/bin/bash newline=$'\n' timestamp=$(date) output=$(curl https://parts.bduke.dev/public/notify-users/) echo "$timestamp" "$output" "$newline" >> /home/brandon/PARTs_WebAPI/logs/log-notify-users.txt<file_sep>/sponsoring/migrations/0001_initial.py # Generated by Django 4.2 on 2023-07-20 16:58 from django.conf import settings from django.db import migrations, models import django.db.models.deletion import simple_history.models class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Item', fields=[ ('item_id', models.AutoField(primary_key=True, serialize=False)), ('item_nm', models.CharField(max_length=255)), ('item_desc', models.TextField()), ('quantity', models.IntegerField()), ('void_ind', models.CharField(default='n', max_length=1)), ], ), migrations.CreateModel( name='Sponsor', fields=[ ('sponsor_id', models.AutoField(primary_key=True, serialize=False)), ('sponsor_nm', models.CharField(max_length=255)), ('phone', models.CharField(max_length=255)), ('email', models.CharField(max_length=255)), ('void_ind', models.CharField(default='n', max_length=1)), ], ), migrations.CreateModel( name='ItemSponsor', fields=[ ('item_sponsor_id', models.AutoField(primary_key=True, serialize=False)), ('quantity', models.IntegerField()), ('item_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='sponsoring.item')), ('sponsor_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='sponsoring.sponsor')), ], ), migrations.CreateModel( name='HistoricalSponsor', fields=[ ('sponsor_id', models.IntegerField(blank=True, db_index=True)), ('sponsor_nm', models.CharField(max_length=255)), ('phone', models.CharField(max_length=255)), ('email', models.CharField(max_length=255)), ('void_ind', models.CharField(default='n', max_length=1)), ('history_id', models.AutoField(primary_key=True, serialize=False)), ('history_date', models.DateTimeField(db_index=True)), ('history_change_reason', models.CharField(max_length=100, null=True)), ('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)), ('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ], options={ 'verbose_name': 'historical sponsor', 'verbose_name_plural': 'historical sponsors', 'ordering': ('-history_date', '-history_id'), 'get_latest_by': ('history_date', 'history_id'), }, bases=(simple_history.models.HistoricalChanges, models.Model), ), migrations.CreateModel( name='HistoricalItem', fields=[ ('item_id', models.IntegerField(blank=True, db_index=True)), ('item_nm', models.CharField(max_length=255)), ('item_desc', models.TextField()), ('quantity', models.IntegerField()), ('void_ind', models.CharField(default='n', max_length=1)), ('history_id', models.AutoField(primary_key=True, serialize=False)), ('history_date', models.DateTimeField(db_index=True)), ('history_change_reason', models.CharField(max_length=100, null=True)), ('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)), ('history_user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)), ], options={ 'verbose_name': 'historical item', 'verbose_name_plural': 'historical items', 'ordering': ('-history_date', '-history_id'), 'get_latest_by': ('history_date', 'history_id'), }, bases=(simple_history.models.HistoricalChanges, models.Model), ), ] <file_sep>/scouting/matchplanning/serializers.py from rest_framework import serializers class TeamSerializer(serializers.Serializer): team_no = serializers.IntegerField() team_nm = serializers.CharField() class EventSerializer(serializers.Serializer): event_id = serializers.IntegerField(read_only=True) season_id = serializers.IntegerField(read_only=True) event_nm = serializers.CharField() date_st = serializers.DateTimeField() date_end = serializers.DateTimeField() event_cd = serializers.CharField() event_url = serializers.CharField() address = serializers.CharField() city = serializers.CharField() state_prov = serializers.CharField() postal_code = serializers.CharField() location_name = serializers.CharField() gmaps_url = serializers.CharField() webcast_url = serializers.CharField() timezone = serializers.CharField() current = serializers.CharField() competition_page_active = serializers.CharField() class CompetitionLevelSerializer(serializers.Serializer): comp_lvl_typ = serializers.CharField() comp_lvl_typ_nm = serializers.CharField() comp_lvl_order = serializers.IntegerField() class MatchSerializer(serializers.Serializer): match_id = serializers.CharField(read_only=True) event_id = serializers.IntegerField(read_only=True) match_number = serializers.IntegerField() red_score = serializers.IntegerField() blue_score = serializers.IntegerField() time = serializers.DateTimeField() blue_one_id = serializers.IntegerField() blue_one_rank = serializers.IntegerField(allow_null=True) blue_two_id = serializers.IntegerField() blue_two_rank = serializers.IntegerField(allow_null=True) blue_three_id = serializers.IntegerField() blue_three_rank = serializers.IntegerField(allow_null=True) red_one_id = serializers.IntegerField() red_one_rank = serializers.IntegerField(allow_null=True) red_two_id = serializers.IntegerField() red_two_rank = serializers.IntegerField(allow_null=True) red_three_id = serializers.IntegerField() red_three_rank = serializers.IntegerField(allow_null=True) comp_level = CompetitionLevelSerializer(read_only=True) class InitSerializer(serializers.Serializer): event = EventSerializer() matches = MatchSerializer(many=True, required=False) teams = TeamSerializer(many=True, required=False) class ScoutPitResultAnswerSerializer(serializers.Serializer): question = serializers.CharField() answer = serializers.CharField(required=False, allow_null=True) class ScoutPitResultsSerializer(serializers.Serializer): teamNo = serializers.CharField() teamNm = serializers.CharField() pic = serializers.CharField() results = ScoutPitResultAnswerSerializer(many=True) class ScoutColSerializer(serializers.Serializer): PropertyName = serializers.CharField() ColLabel = serializers.CharField() order = serializers.CharField() class ScoutResultAnswerSerializer(serializers.BaseSerializer): def to_representation(self, instance): return instance class TeamNoteSerializer(serializers.Serializer): team_note_id = serializers.IntegerField(read_only=True) team_no = TeamSerializer() match = serializers.IntegerField(read_only=True) note = serializers.CharField() time = serializers.DateTimeField() class MatchPlanningSerializer(serializers.Serializer): team = TeamSerializer() pitData = ScoutPitResultsSerializer() fieldCols = ScoutColSerializer(many=True) fieldAnswers = ScoutResultAnswerSerializer(many=True) notes = TeamNoteSerializer(many=True) class SaveTeamNoteSerializer(serializers.Serializer): team_note_id = serializers.IntegerField(read_only=True) team_no = serializers.IntegerField() match = serializers.IntegerField(allow_null=True, required=False) note = serializers.CharField() <file_sep>/form/migrations/0011_alter_questionanswer_answer.py # Generated by Django 4.2 on 2023-07-15 22:07 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('form', '0010_questiontype_is_list'), ] operations = [ migrations.AlterField( model_name='questionanswer', name='answer', field=models.TextField(blank=True, null=True), ), ] <file_sep>/user/migrations/0007_user_img_id_user_img_ver_and_more.py # Generated by Django 4.0.3 on 2023-04-30 03:42 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('user', '0006_userpushnotificationsubscriptionobjects_time'), ] operations = [ migrations.AddField( model_name='user', name='img_id', field=models.CharField(blank=True, max_length=500, null=True), ), migrations.AddField( model_name='user', name='img_ver', field=models.CharField(blank=True, max_length=500, null=True), ), migrations.DeleteModel( name='UserPushNotificationSubscriptionObjects', ), ] <file_sep>/scouting/pit/serializers.py from rest_framework import serializers class TeamSerializer(serializers.Serializer): team_no = serializers.IntegerField() team_nm = serializers.CharField() checked = serializers.BooleanField(required=False) class QuestionOptionsSerializer(serializers.Serializer): question_opt_id = serializers.IntegerField(required=False, allow_null=True) question_id = serializers.IntegerField(read_only=True) option = serializers.CharField() active = serializers.CharField() class QuestionTypeSerializer(serializers.Serializer): question_typ = serializers.CharField() question_typ_nm = serializers.CharField() is_list = serializers.CharField() class QuestionSerializer(serializers.Serializer): question_id = serializers.IntegerField(required=False, allow_null=True) season_id = serializers.IntegerField(read_only=True) question = serializers.CharField() order = serializers.IntegerField() active = serializers.CharField() question_typ = QuestionTypeSerializer() form_sub_typ = serializers.CharField(required=False, allow_blank=True, allow_null=True) form_sub_nm = serializers.CharField(required=False, allow_blank=True, allow_null=True) form_typ = serializers.CharField() questionoptions_set = QuestionOptionsSerializer( required=False, allow_null=True, many=True) answer = serializers.CharField(required=False, allow_null=True, allow_blank=True) class InitSerializer(serializers.Serializer): scoutQuestions = QuestionSerializer(many=True) teams = TeamSerializer(many=True, required=False) comp_teams = TeamSerializer(many=True, required=False) class ScoutAnswerSerializer(serializers.Serializer): scoutQuestions = QuestionSerializer(many=True) teams = TeamSerializer(many=True, required=False) team = serializers.CharField(required=False) class ScoutPitResultAnswerSerializer(serializers.Serializer): question = serializers.CharField() answer = serializers.CharField(required=False, allow_null=True) class ScoutPitResultsSerializer(serializers.Serializer): teamNo = serializers.CharField() teamNm = serializers.CharField() pic = serializers.CharField() results = ScoutPitResultAnswerSerializer(many=True) class PitTeamDataSerializer(serializers.Serializer): questions = QuestionSerializer(required=False, many=True) pic = serializers.CharField() <file_sep>/public/views.py import datetime import pytz import requests from django.db.models import Q, ExpressionWrapper, F, DurationField from rest_framework.response import Response from rest_framework.views import APIView from general import send_message from general.security import ret_message from scouting.models import Event, ScoutFieldSchedule, Schedule app_url = 'public/' class APIStatus(APIView): """ API endpoint to get if the api is available """ def get(self, request, format=None): return Response(200) class NotifyUsers(APIView): """API endpoint to notify users""" endpoint = 'notify-users/' def notify_users(self): message = '' event = Event.objects.get(Q(current='y') & Q(void_ind='n')) curr_time = datetime.datetime.utcnow().astimezone(pytz.timezone(event.timezone)) sfss_15 = ScoutFieldSchedule.objects.annotate( diff=ExpressionWrapper(F('st_time') - curr_time, output_field=DurationField())) \ .filter(diff__lte=datetime.timedelta(minutes=17)) \ .filter(Q(event=event) & Q(notification1=False) & Q(void_ind='n')) sfss_5 = ScoutFieldSchedule.objects.annotate( diff=ExpressionWrapper(F('st_time') - curr_time, output_field=DurationField())) \ .filter(diff__lte=datetime.timedelta(minutes=7)) \ .filter(Q(event=event) & Q(notification2=False) & Q(void_ind='n')) sfss_now = ScoutFieldSchedule.objects.annotate( diff=ExpressionWrapper(F('st_time') - curr_time, output_field=DurationField())) \ .filter(diff__lt=datetime.timedelta(minutes=5)) \ .filter(Q(event=event) & Q(notification3=False) & Q(void_ind='n')) message += self.send_scout_notification(1, sfss_15, event) message += self.send_scout_notification(2, sfss_5, event) message += self.send_scout_notification(3, sfss_now, event) schs = Schedule.objects.annotate( diff=ExpressionWrapper(F('st_time') - curr_time, output_field=DurationField())) \ .filter(diff__lte=datetime.timedelta(minutes=6)) \ .filter(Q(event=event) & Q(notified=False) & Q(void_ind='n')).order_by('sch_typ__sch_typ', 'st_time') discord_message = '' sch_typ = '' st_time = None for i in range(len(schs) + 1): if len(schs) == 0: break sch = None try: sch = schs[i] except IndexError: sch = None finally: if sch is None or \ (sch_typ != '' and sch_typ != sch.sch_typ.sch_typ) or \ (st_time is not None and st_time != sch.st_time): sch_typ = '' st_time = None discord_message = discord_message[0:len(discord_message) - 2] send_message.send_discord_notification(discord_message) discord_message = '' message += 'Discord Message Sent\n' if sch is None: break date_st_utc = sch.st_time.astimezone(pytz.utc) date_end_utc = sch.end_time.astimezone(pytz.utc) date_st_local = date_st_utc.astimezone(pytz.timezone(event.timezone)) date_end_local = date_end_utc.astimezone(pytz.timezone(event.timezone)) date_st_str = date_st_local.strftime("%m/%d/%Y, %I:%M%p") date_end_str = date_end_local.strftime("%m/%d/%Y, %I:%M%p") data = { 'location': sch.sch_typ.sch_nm, 'scout_time_st': date_st_str, 'scout_time_end': date_end_str, 'lead_scout': 'automated_message' } try: send_message.send_email( sch.user.phone + sch.user.phone_type.phone_type, 'Pit time!', 'notify_schedule', data) message += 'Phone Notified: ' + sch.user.first_name + ' : ' + sch.sch_typ.sch_nm + '\n' sch.notified = True sch.save() except Exception as e: message += 'Phone unable to notify: ' + \ (sch.user.first_name if sch.user is not None else "pit time user missing") + '\n' if sch_typ == '' or st_time is None: sch_typ = sch.sch_typ.sch_typ st_time = sch.st_time discord_message = f'Scheduled time in the pit, for {sch.sch_typ.sch_nm} from ' \ f'{date_st_str} to {date_end_str} : ' if sch_typ == sch.sch_typ.sch_typ and st_time == sch.st_time: discord_message += (f'<@{sch.user.discord_user_id}>' if sch.user.discord_user_id is not None else sch.user.first_name) + ', ' sch.notified = True sch.save() message += 'Discord Notified: ' + sch.user.first_name + ' : ' + sch.sch_typ.sch_nm + '\n' if message == '': message = 'No notifications' return ret_message(message) def send_scout_notification(self, notification, sfss, event): message = '' for sfs in sfss: date_st_utc = sfs.st_time.astimezone(pytz.utc) date_end_utc = sfs.end_time.astimezone(pytz.utc) date_st_local = date_st_utc.astimezone(pytz.timezone(event.timezone)) date_end_local = date_end_utc.astimezone(pytz.timezone(event.timezone)) date_st_str = date_st_local.strftime("%m/%d/%Y, %I:%M%p") date_end_str = date_end_local.strftime("%m/%d/%Y, %I:%M%p") data = { 'scout_location': 'Field', 'scout_time_st': date_st_str, 'scout_time_end': date_end_str, 'lead_scout': 'automated_message' } try: send_message.send_email( sfs.red_one.phone + sfs.red_one.phone_type.phone_type, 'Time to Scout!', 'notify_scout', data) message += 'Phone Notified scouting: ' + sfs.red_one.first_name + '\n' except Exception as e: message += 'Phone Unable to notify scouting: ' + \ (sfs.red_one.first_name if sfs.red_one is not None else "red one") + '\n' try: send_message.send_email( sfs.red_two.phone + sfs.red_two.phone_type.phone_type, 'Time to Scout!', 'notify_scout', data) message += 'Phone Notified scouting: ' + sfs.red_two.first_name + '\n' except Exception as e: message += 'Phone Unable to notify scouting: ' + \ (sfs.red_two.first_name if sfs.red_two is not None else "red two") + '\n' try: send_message.send_email( sfs.red_three.phone + sfs.red_three.phone_type.phone_type, 'Time to Scout!', 'notify_scout', data) message += 'Phone Notified scouting: ' + sfs.red_three.first_name + '\n' except Exception as e: message += 'Phone Unable to notify scouting: ' + \ (sfs.red_three.first_name if sfs.red_three is not None else "red three") + '\n' try: send_message.send_email( sfs.blue_one.phone + sfs.blue_one.phone_type.phone_type, 'Time to Scout!', 'notify_scout', data) message += 'Phone Notified scouting: ' + sfs.blue_one.first_name + '\n' except Exception as e: message += 'Phone Unable to notify scouting: ' + \ (sfs.blue_one.first_name if sfs.blue_one is not None else "blue one") + '\n' try: send_message.send_email( sfs.blue_two.phone + sfs.blue_two.phone_type.phone_type, 'Time to Scout!', 'notify_scout', data) message += 'Phone Notified scouting: ' + sfs.blue_two.first_name + '\n' except Exception as e: message += 'Phone Unable to notify scouting: ' + \ (sfs.blue_two.first_name if sfs.blue_two is not None else "blue two") + '\n' try: send_message.send_email( sfs.blue_three.phone + sfs.blue_three.phone_type.phone_type, 'Time to Scout!', 'notify_scout', data) message += 'Phone Notified scouting: ' + sfs.blue_three.first_name + '\n' except Exception as e: message += 'Phone Unable to notify scouting: ' + \ (sfs.blue_three.first_name if sfs.blue_three is not None else "blue three") + '\n' warning_text = '' match notification: case 1: sfs.notification1 = True warning_text = ', 15 minute warning, ' case 2: sfs.notification2 = True warning_text = ', 5 minute warning, ' case 3: sfs.notification3 = True discord_message = f'Scheduled time for scouting{warning_text}from ' \ f'{date_st_str} to {date_end_str} : ' discord_message += ((f'<@{sfs.red_one.discord_user_id}>' if sfs.red_one.discord_user_id is not None else sfs.red_one.first_name) if sfs.red_one is not None else "red one") + ', ' discord_message += ((f'<@{sfs.red_two.discord_user_id}>' if sfs.red_two.discord_user_id is not None else sfs.red_two.first_name) if sfs.red_two is not None else "red two") + ', ' discord_message += ((f'<@{sfs.red_three.discord_user_id}>' if sfs.red_three.discord_user_id is not None else sfs.red_three.first_name) if sfs.red_three is not None else "red three") + ', ' discord_message += ((f'<@{sfs.blue_one.discord_user_id}>' if sfs.blue_one.discord_user_id is not None else sfs.blue_one.first_name) if sfs.blue_one is not None else "blue one") + ', ' discord_message += ((f'<@{sfs.blue_two.discord_user_id}>' if sfs.blue_two.discord_user_id is not None else sfs.blue_two.first_name) if sfs.blue_two is not None else "blue two") + ', ' discord_message += ((f'<@{sfs.blue_three.discord_user_id}>' if sfs.blue_three.discord_user_id is not None else sfs.blue_three.first_name) if sfs.blue_three is not None else "blue three") send_message.send_discord_notification(discord_message) sfs.save() return message def get(self, request, format=None): try: req = self.notify_users() return req except Exception as e: return ret_message('An error occurred while notifying the users.', True, app_url + self.endpoint, 0, e) <file_sep>/form/migrations/0001_initial.py # Generated by Django 4.2 on 2023-06-30 00:01 from django.db import migrations, models import django.db.models.deletion import django.utils.timezone class Migration(migrations.Migration): initial = True dependencies = [ ('scouting', '0011_alter_scoutquestion_void_ind'), ] operations = [ migrations.CreateModel( name='FormResponse', fields=[ ('fr_id', models.AutoField(primary_key=True, serialize=False)), ('time', models.DateTimeField(default=django.utils.timezone.now)), ], ), migrations.CreateModel( name='Question', fields=[ ('q_id', models.AutoField(primary_key=True, serialize=False)), ('question', models.CharField(max_length=1000)), ('order', models.IntegerField()), ('active', models.CharField(max_length=1)), ('void_ind', models.CharField(default='n', max_length=1)), ], ), migrations.CreateModel( name='QuestionType', fields=[ ('question_typ', models.CharField(max_length=50, primary_key=True, serialize=False)), ('question_typ_nm', models.CharField(max_length=255)), ('void_ind', models.CharField(default='n', max_length=1)), ], ), migrations.CreateModel( name='Type', fields=[ ('form_typ', models.CharField(max_length=10, primary_key=True, serialize=False)), ('form_nm', models.CharField(max_length=255)), ], ), migrations.CreateModel( name='SubType', fields=[ ('form_sub_typ', models.CharField(max_length=10, primary_key=True, serialize=False)), ('form_sub_nm', models.CharField(max_length=255)), ('form_typ', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='form.type')), ], ), migrations.CreateModel( name='QuestionOption', fields=[ ('q_opt_id', models.AutoField(primary_key=True, serialize=False)), ('option', models.CharField(max_length=255)), ('active', models.CharField(blank=True, max_length=1, null=True)), ('void_ind', models.CharField(default='n', max_length=1)), ('q_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='form.question')), ], ), migrations.CreateModel( name='QuestionAnswer', fields=[ ('qa_id', models.AutoField(primary_key=True, serialize=False)), ('answer', models.CharField(blank=True, max_length=1000, null=True)), ('void_ind', models.CharField(default='n', max_length=1)), ('form_response', models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='form_response', to='form.formresponse')), ('q_id', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='form.question')), ('scout_field', models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='scout_field', to='scouting.scoutfield')), ('scout_pit', models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='scout_pit', to='scouting.scoutfield')), ], ), migrations.AddField( model_name='question', name='form_sub_typ', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='form.subtype'), ), migrations.AddField( model_name='question', name='form_typ', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='form.type'), ), migrations.AddField( model_name='question', name='question_typ', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='form.questiontype'), ), migrations.AddField( model_name='question', name='season', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, to='scouting.season'), ), ] <file_sep>/scouting/field/serializers.py from rest_framework import serializers class TeamSerializer(serializers.Serializer): team_no = serializers.IntegerField() team_nm = serializers.CharField() checked = serializers.BooleanField(required=False) class MatchSerializer(serializers.Serializer): match_id = serializers.CharField(read_only=True) event_id = serializers.IntegerField(read_only=True) match_number = serializers.IntegerField() time = serializers.DateTimeField() blue_one_id = serializers.IntegerField() blue_two_id = serializers.IntegerField() blue_three_id = serializers.IntegerField() red_one_id = serializers.IntegerField() red_two_id = serializers.IntegerField() red_three_id = serializers.IntegerField() class UserSerializer(serializers.Serializer): id = serializers.IntegerField(read_only=True) username = serializers.CharField() email = serializers.CharField() first_name = serializers.CharField() last_name = serializers.CharField() is_active = serializers.BooleanField() phone = serializers.CharField() phone_type_id = serializers.IntegerField(required=False, allow_null=True) class ScoutFieldScheduleSerializer(serializers.Serializer): scout_field_sch_id = serializers.IntegerField() event_id = serializers.IntegerField(read_only=True) st_time = serializers.DateTimeField() end_time = serializers.DateTimeField() red_one_id = UserSerializer(required=False, allow_null=True, read_only=True) red_two_id = UserSerializer(required=False, allow_null=True, read_only=True) red_three_id = UserSerializer(required=False, allow_null=True, read_only=True) blue_one_id = UserSerializer(required=False, allow_null=True, read_only=True) blue_two_id = UserSerializer(required=False, allow_null=True, read_only=True) blue_three_id = UserSerializer( required=False, allow_null=True, read_only=True) class QuestionOptionsSerializer(serializers.Serializer): question_opt_id = serializers.IntegerField(required=False, allow_null=True) question_id = serializers.IntegerField(read_only=True) option = serializers.CharField() active = serializers.CharField() class QuestionTypeSerializer(serializers.Serializer): question_typ = serializers.CharField() question_typ_nm = serializers.CharField() is_list = serializers.CharField() class QuestionSerializer(serializers.Serializer): question_id = serializers.IntegerField(required=False, allow_null=True) season_id = serializers.IntegerField(read_only=True) question = serializers.CharField() order = serializers.IntegerField() active = serializers.CharField() question_typ = QuestionTypeSerializer() form_sub_typ = serializers.CharField(required=False, allow_blank=True, allow_null=True) form_sub_nm = serializers.CharField(required=False, allow_blank=True, allow_null=True) form_typ = serializers.CharField() questionoptions_set = QuestionOptionsSerializer( required=False, allow_null=True, many=True) answer = serializers.CharField(required=False, allow_null=True, allow_blank=True) class ScoutFieldSerializer(serializers.Serializer): scoutQuestions = QuestionSerializer(many=True) teams = TeamSerializer(many=True, required=False) #team = serializers.CharField(required=False) scoutFieldSchedule = ScoutFieldScheduleSerializer() matches = MatchSerializer(many=True, required=False) class SaveScoutFieldSerializer(serializers.Serializer): scoutQuestions = QuestionSerializer(many=True) team = serializers.CharField() match = serializers.CharField(required=False) class ScoutColSerializer(serializers.Serializer): PropertyName = serializers.CharField() ColLabel = serializers.CharField() order = serializers.CharField() class ScoutResultAnswerSerializer(serializers.BaseSerializer): def to_representation(self, instance): return instance class ScoutFieldResultsSerializer(serializers.Serializer): scoutCols = ScoutColSerializer(many=True) scoutAnswers = ScoutResultAnswerSerializer(many=True) <file_sep>/admin/urls.py from django.urls import path from .views import Init, SaveUser, ErrorLogView urlpatterns = [ path('init/', Init.as_view()), path('save-user/', SaveUser.as_view()), path('error-log/', ErrorLogView.as_view()), ] <file_sep>/scouting/migrations/0002_initial.py # Generated by Django 4.0.3 on 2022-04-06 06:40 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('scouting', '0001_initial'), ('auth', '0012_alter_user_first_name_max_length'), ] operations = [ migrations.AddField( model_name='scoutpitschedule', name='user', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL), ), migrations.AddField( model_name='scoutpitanswer', name='scout_pit', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.scoutpit'), ), migrations.AddField( model_name='scoutpitanswer', name='sq', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.scoutquestion'), ), migrations.AddField( model_name='scoutpit', name='event', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.event'), ), migrations.AddField( model_name='scoutpit', name='team_no', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.team'), ), migrations.AddField( model_name='scoutpit', name='user', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL), ), migrations.AddField( model_name='scoutfieldschedule', name='blue_one', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='blue_one_user', to=settings.AUTH_USER_MODEL), ), migrations.AddField( model_name='scoutfieldschedule', name='blue_three', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='blue_three_user', to=settings.AUTH_USER_MODEL), ), migrations.AddField( model_name='scoutfieldschedule', name='blue_two', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='blue_two_user', to=settings.AUTH_USER_MODEL), ), migrations.AddField( model_name='scoutfieldschedule', name='event', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.event'), ), migrations.AddField( model_name='scoutfieldschedule', name='red_one', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='red_one_user', to=settings.AUTH_USER_MODEL), ), migrations.AddField( model_name='scoutfieldschedule', name='red_three', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='red_three_user', to=settings.AUTH_USER_MODEL), ), migrations.AddField( model_name='scoutfieldschedule', name='red_two', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='red_two_user', to=settings.AUTH_USER_MODEL), ), migrations.AddField( model_name='scoutfieldanswer', name='scout_field', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.scoutfield'), ), migrations.AddField( model_name='scoutfieldanswer', name='sq', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.scoutquestion'), ), migrations.AddField( model_name='scoutfield', name='event', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.event'), ), migrations.AddField( model_name='scoutfield', name='team_no', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.team'), ), migrations.AddField( model_name='scoutfield', name='user', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL), ), migrations.AddField( model_name='scoutauthgroups', name='auth_group_id', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='auth.group'), ), migrations.AddField( model_name='questionoptions', name='sq', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.scoutquestion'), ), migrations.AddField( model_name='match', name='blue_one', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='blue_one_team', to='scouting.team'), ), migrations.AddField( model_name='match', name='blue_three', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='blue_three_team', to='scouting.team'), ), migrations.AddField( model_name='match', name='blue_two', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='blue_two_team', to='scouting.team'), ), migrations.AddField( model_name='match', name='comp_level', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.competitionlevel'), ), migrations.AddField( model_name='match', name='event', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.event'), ), migrations.AddField( model_name='match', name='red_one', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='red_one_team', to='scouting.team'), ), migrations.AddField( model_name='match', name='red_three', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='red_three_team', to='scouting.team'), ), migrations.AddField( model_name='match', name='red_two', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='red_two_team', to='scouting.team'), ), migrations.AddField( model_name='event', name='season', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='scouting.season'), ), migrations.AddField( model_name='event', name='team_no', field=models.ManyToManyField(to='scouting.team'), ), ] <file_sep>/requirements.txt cloudinary>=1.29.0 django>=4.0.3 django-cors-headers>=3.11.0 django-filter>=21.1 django-webpush djangorestframework>=3.13.1 djangorestframework-simplejwt>=5.1.0 pytz>=2022.1 requests>=2.27.1 gunicorn six wheel cryptography dj-database-url uwsgi Pillow psycopg2-binary PyJWT>=2.3.0 sqlparse>=0.4.2 toml>=0.10.2 urllib3>=1.26.9 django-spa django-simple-history<file_sep>/form/migrations/0006_question_required.py # Generated by Django 4.2 on 2023-07-13 01:12 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('form', '0005_response_form_typ_response_void_ind_and_more'), ] operations = [ migrations.AddField( model_name='question', name='required', field=models.CharField(default='', max_length=1), preserve_default=False, ), ] <file_sep>/tba/serializers.py from rest_framework import serializers class EventUpdatedMessageSerializer(serializers.Serializer): event_name = serializers.CharField() first_match_time = serializers.CharField() event_key = serializers.CharField() class EventUpdatedSerializer(serializers.Serializer): message_data = EventUpdatedMessageSerializer() message_type = serializers.CharField() <file_sep>/form/migrations/0004_rename_formresponse_response_and_more.py # Generated by Django 4.2 on 2023-06-30 00:23 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('form', '0003_rename_qa_id_questionanswer_question_answer_id'), ] operations = [ migrations.RenameModel( old_name='FormResponse', new_name='Response', ), migrations.RenameField( model_name='questionanswer', old_name='form_response', new_name='response', ), migrations.RenameField( model_name='response', old_name='fr_id', new_name='response_id', ), ] <file_sep>/form/migrations/0005_response_form_typ_response_void_ind_and_more.py # Generated by Django 4.2 on 2023-07-12 23:46 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('scouting', '0011_alter_scoutquestion_void_ind'), ('form', '0004_rename_formresponse_response_and_more'), ] operations = [ migrations.AddField( model_name='response', name='form_typ', field=models.ForeignKey(default='', on_delete=django.db.models.deletion.PROTECT, to='form.type'), preserve_default=False, ), migrations.AddField( model_name='response', name='void_ind', field=models.CharField(default='n', max_length=1), ), migrations.AlterField( model_name='questionanswer', name='scout_pit', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='scout_pit', to='scouting.scoutpit'), ), ] <file_sep>/api/settings.py """ Django settings for api project. Generated by 'django-admin startproject' using Django 4.0.3. For more information on this file, see https://docs.djangoproject.com/en/4.0/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/4.0/ref/settings/ """ from datetime import timedelta import os from pathlib import Path # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/4.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = os.getenv('SECRET_KEY') # SECURITY WARNING: don't run with debug turned on in production! DEBUG = os.getenv('DEBUG', 'True').lower() in ('true', '1', 't') DEBUG_PROPAGATE_EXCEPTIONS = os.getenv('DEBUG', 'True').lower() in ('true', '1', 't') ALLOWED_HOSTS = ['parts.bduke.dev', '192.168.1.41', 'partsuat.bduke.dev'] FRONTEND_ADDRESS = os.getenv('FRONTEND_ADDRESS') # Application definition INSTALLED_APPS = [ 'whitenoise.runserver_nostatic', 'django.contrib.staticfiles', 'admin.apps.AdminConfig', 'alerts.apps.AlertsConfig', 'public.apps.PublicConfig', 'scouting.apps.ScoutingConfig', 'user.apps.UserConfig', 'django.contrib.auth', 'django.contrib.contenttypes', 'corsheaders', 'rest_framework', 'webpush' ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'whitenoise.middleware.WhiteNoiseMiddleware', 'spa.middleware.SPAMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'corsheaders.middleware.CorsMiddleware' ] CORS_ORIGIN_WHITELIST = [ 'https://parts3492.org', 'https://www.parts3492.org', 'https://parts3492uat.bduke.dev', 'https://www.parts3492uat.bduke.dev', 'http://192.168.1.41:49156' ] ROOT_URLCONF = 'api.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'api.wsgi.application' # Database # https://docs.djangoproject.com/en/4.0/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': os.getenv('DB_NAME'), 'USER': os.getenv('DB_USER'), 'PASSWORD': os.getenv('DB_PASSWORD'), 'HOST': os.getenv('DB_HOST'), 'PORT': os.getenv('DB_PORT'), } } """ sequence reset for postgres python3 manage.py sqlsequencereset user """ # Password validation # https://docs.djangoproject.com/en/4.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] REST_FRAMEWORK = { 'DEFAULT_AUTHENTICATION_CLASSES': [ 'rest_framework_simplejwt.authentication.JWTAuthentication', ], } SIMPLE_JWT = { 'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5), 'REFRESH_TOKEN_LIFETIME': timedelta(days=1), 'ROTATE_REFRESH_TOKENS': True, 'BLACKLIST_AFTER_ROTATION': True, 'UPDATE_LAST_LOGIN': True, 'ALGORITHM': 'RS512', 'SIGNING_KEY': open(os.path.join(BASE_DIR, 'keys/jwt-key')).read(), 'VERIFYING_KEY': open(os.path.join(BASE_DIR, 'keys/jwt-key.pub')).read(), 'AUDIENCE': None, 'ISSUER': None, 'AUTH_HEADER_TYPES': ('Bearer',), 'USER_ID_FIELD': 'id', 'USER_ID_CLAIM': 'user_id', 'AUTH_TOKEN_CLASSES': ('rest_framework_simplejwt.tokens.AccessToken',), 'TOKEN_TYPE_CLAIM': 'token_type', 'JTI_CLAIM': 'jti', 'SLIDING_TOKEN_REFRESH_EXP_CLAIM': 'refresh_exp', 'SLIDING_TOKEN_LIFETIME': timedelta(minutes=5), 'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=1), } AUTH_USER_MODEL = 'user.User' AUTHENTICATION_BACKENDS = ['user.views.UserLogIn'] # Internationalization # https://docs.djangoproject.com/en/4.0/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/4.0/howto/static-files/ STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles') STATIC_URL = '/static/' # Extra places for collectstatic to find static files. STATICFILES_DIRS = ( os.path.join(BASE_DIR, 'static'), ) # Simplified static file serving. # https://warehouse.python.org/project/whitenoise/ STATICFILES_STORAGE = 'spa.storage.SPAStaticFilesStorage' # Default primary key field type # https://docs.djangoproject.com/en/4.0/ref/settings/#default-auto-field DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' # Email and SMTP settings DEFAULT_FROM_EMAIL = os.getenv('EMAIL_FROM') EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_USE_TLS = True EMAIL_HOST = os.getenv('EMAIL_HOST') EMAIL_HOST_USER = os.getenv('EMAIL_HOST_USER') EMAIL_HOST_PASSWORD = os.getenv('EMAIL_HOST_PASSWORD') EMAIL_PORT = os.getenv('EMAIL_PORT') # Cloudinary os.environ["CLOUDINARY_URL"] = os.getenv('CLOUDINARY_URL', '') TBA_KEY = '<KEY>' DISCORD_NOTIFICATION_WEBHOOK = os.getenv('DISCORD_NOTIFICATION_WEBHOOK', '') WEBPUSH_SETTINGS = { "VAPID_PUBLIC_KEY": os.getenv('VAPID_PUBLIC_KEY'), "VAPID_PRIVATE_KEY": os.getenv('VAPID_PRIVATE_KEY'), "VAPID_ADMIN_EMAIL": "<EMAIL>" } <file_sep>/scouting/field/apps.py from django.apps import AppConfig class ScoutfieldConfig(AppConfig): name = 'scoutField' <file_sep>/scouting/portal/apps.py from django.apps import AppConfig class ScoutportalConfig(AppConfig): name = 'scoutPortal' <file_sep>/scouting/pit/urls.py from django.urls import path from .views import * urlpatterns = [ path('questions/', Questions.as_view()), #path('save-answers/', SaveAnswers.as_view()), path('save-picture/', SavePicture.as_view()), path('results-init/', ResultsInit.as_view()), path('results/', Results.as_view()), path('team-data/', TeamData.as_view()) ] <file_sep>/alerts/views.py from rest_framework.permissions import IsAuthenticated from rest_framework.views import APIView from rest_framework_simplejwt.authentication import JWTAuthentication from alerts.util import stage_all_field_schedule_alerts, stage_schedule_alerts, send_alerts, dismiss_alert from general.security import ret_message app_url = 'alerts/' class RunAlerts(APIView): """API endpoint to stage user alerts""" endpoint = 'run/' def get(self, request, format=None): try: ret = 'all field alerts\n' ret += stage_all_field_schedule_alerts() ret += 'schedule alerts\n' ret += stage_schedule_alerts() return ret_message(ret) except Exception as e: return ret_message('An error occurred while running alerts.', True, app_url + self.endpoint, 0, e) class SendAlerts(APIView): """API endpoint to notify users""" endpoint = 'send/' def get(self, request, format=None): try: ret = 'send alerts\n' ret += send_alerts() return ret_message(ret) except Exception as e: return ret_message('An error occurred while sending alerts.', True, app_url + self.endpoint, 0, e) class DismissAlert(APIView): """API endpoint to dismiss an alert""" authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) endpoint = 'dismiss/' def get(self, request, format=None): try: dismiss_alert(request.query_params.get('alert_channel_send_id', None), request.user.id) return ret_message('') except Exception as e: return ret_message('An error occurred while dismissing alert.', True, app_url + self.endpoint, 0, e) <file_sep>/scouting/matchplanning/views.py from cloudinary.templatetags import cloudinary from django.db.models import Q from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response from rest_framework.views import APIView from rest_framework_simplejwt.authentication import JWTAuthentication from general.security import ret_message, has_access from scouting.field.views import get_field_results from scouting.matchplanning.serializers import InitSerializer, SaveTeamNoteSerializer, MatchPlanningSerializer, \ TeamSerializer, TeamNoteSerializer from scouting.models import Event, Team, Match, Season, TeamNotes from scouting.pit.views import get_pit_results auth_obj = 58 auth_view_obj_scout_field = 52 app_url = 'scouting/match-planning/' class Init(APIView): """API endpoint to tell the frontend if the competition page is active and its information""" endpoint = 'init/' authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) def get_competition_information(self): current_event = Event.objects.get(Q(current='y') & Q(void_ind='n')) team3492 = Team.objects.get(team_no=3492) matches = Match.objects.filter(Q(event=current_event) & Q(void_ind='n') & Q(Q(red_one=team3492) | Q(red_two=team3492) | Q(red_three=team3492) | Q(blue_one=team3492) | Q(blue_two=team3492) | Q(blue_three=team3492))) \ .order_by('comp_level__comp_lvl_order', 'match_number') parsed_matches = [] for m in matches: try: eti_blue_one = m.blue_one.eventteaminfo_set.get(Q(event=current_event) & Q(void_ind='n')) except: eti_blue_one = None try: eti_blue_two = m.blue_two.eventteaminfo_set.get(Q(event=current_event) & Q(void_ind='n')) except: eti_blue_two = None try: eti_blue_three = m.blue_three.eventteaminfo_set.get(Q(event=current_event) & Q(void_ind='n')) except: eti_blue_three = None try: eti_red_one = m.red_one.eventteaminfo_set.get(Q(event=current_event) & Q(void_ind='n')) except: eti_red_one = None try: eti_red_two = m.red_two.eventteaminfo_set.get(Q(event=current_event) & Q(void_ind='n')) except: eti_red_two = None try: eti_red_three = m.red_three.eventteaminfo_set.get(Q(event=current_event) & Q(void_ind='n')) except: eti_red_three = None parsed_matches.append({ 'match_id': m.match_id, 'event_id': m.event.event_id, 'match_number': m.match_number, 'red_score': m.red_score, 'blue_score': m.blue_score, 'time': m.time, 'blue_one_id': m.blue_one.team_no, 'blue_one_rank': None if eti_blue_one is None else eti_blue_one.rank, 'blue_two_id': m.blue_two.team_no, 'blue_two_rank': None if eti_blue_two is None else eti_blue_two.rank, 'blue_three_id': m.blue_three.team_no, 'blue_three_rank': None if eti_blue_three is None else eti_blue_three.rank, 'red_one_id': m.red_one.team_no, 'red_one_rank': None if eti_red_one is None else eti_red_one.rank, 'red_two_id': m.red_two.team_no, 'red_two_rank': None if eti_red_two is None else eti_red_two.rank, 'red_three_id': m.red_three.team_no, 'red_three_rank': None if eti_red_three is None else eti_red_three.rank, 'comp_level': m.comp_level }) teams = Team.objects.filter(event=current_event).order_by('team_no') return {'event': current_event, 'matches': parsed_matches, 'teams': teams} def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: req = self.get_competition_information() serializer = InitSerializer(req) return Response(serializer.data) except Exception as e: return ret_message('An error occurred initializing match planning.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class SaveNote(APIView): """API endpoint to tell the frontend if the competition page is active and its information""" endpoint = 'save-note/' authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) def save_note(self, data): current_event = Event.objects.get(Q(current='y') & Q(void_ind='n')) note = TeamNotes(event=current_event, team_no_id=data['team_no'], match_id=data.get('match', None), user=self.request.user, note=data['note']) note.save() return ret_message('Note saved successfully') def post(self, request, format=None): serializer = SaveTeamNoteSerializer(data=request.data) if not serializer.is_valid(): return ret_message('Invalid data', True, app_url + self.endpoint, request.user.id, serializer.errors) if has_access(request.user.id, auth_obj): try: req = self.save_note(serializer.data) return req except Exception as e: return ret_message('An error occurred while saving note.', True, app_url + self.endpoint, request.user.id, e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class PlanMatch(APIView): """API endpoint to tell the frontend if the competition page is active and its information""" endpoint = 'plan-match/' authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) def get_match_information(self, match_id): try: current_season = Season.objects.get(current='y') except Exception as e: return ret_message('No season set, see an admin.', True, app_url + self.endpoint, self.request.user.id, e) try: current_event = Event.objects.get( Q(season=current_season) & Q(current='y')) except Exception as e: return ret_message('No event set, see an admin', True, app_url + self.endpoint, self.request.user.id, e) match = Match.objects.get(match_id=match_id) teams = [match.red_one, match.red_two, match.red_three, match.blue_one, match.blue_two, match.blue_three] results = [] for t in teams: # Pit Data st = TeamSerializer(t).data pit = get_pit_results([st], self.endpoint, self.request) if type(pit) is list: pit = pit[0] #if pit.data.get('error', False): else: pit = None # Field Data team_results = get_field_results(t, self.endpoint, self.request) field_cols = team_results['scoutCols'] field_answers = team_results['scoutAnswers'] # notes notes = TeamNotes.objects.filter(Q(void_ind='n') & Q(team_no=t)).order_by('-time') results.append({'team': t, 'pitData': pit, 'fieldCols': field_cols, 'fieldAnswers': field_answers, 'notes': notes}) return results def get(self, request, format=None): if has_access(request.user.id, auth_obj): try: req = self.get_match_information(request.query_params.get('match_id', None)) serializer = MatchPlanningSerializer(req, many=True) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while getting match information.', True, app_url + self.endpoint, exception=e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) class LoadTeamNotes(APIView): """API endpoint to get team notes""" endpoint = 'load-team-notes/' authentication_classes = (JWTAuthentication,) permission_classes = (IsAuthenticated,) def get_team_notes(self, team_no): try: current_season = Season.objects.get(current='y') except Exception as e: return ret_message('No season set, see an admin.', True, app_url + self.endpoint, self.request.user.id, e) try: current_event = Event.objects.get( Q(season=current_season) & Q(current='y')) except Exception as e: return ret_message('No event set, see an admin', True, app_url + self.endpoint, self.request.user.id, e) team = Team.objects.get(Q(void_ind='n') & Q(team_no=team_no)) notes = TeamNotes.objects.filter(Q(void_ind='n') & Q(team_no=team) & Q(event=current_event))\ .order_by('-time') return notes def get(self, request, format=None): if has_access(request.user.id, auth_obj) or has_access(request.user.id, auth_view_obj_scout_field): try: req = self.get_team_notes(request.query_params.get('team_no', None)) serializer = TeamNoteSerializer(req, many=True) return Response(serializer.data) except Exception as e: return ret_message('An error occurred while getting team notes.', True, app_url + self.endpoint, exception=e) else: return ret_message('You do not have access.', True, app_url + self.endpoint, request.user.id) <file_sep>/public/competition/serializers.py from rest_framework import serializers class EventSerializer(serializers.Serializer): event_id = serializers.IntegerField(read_only=True) season_id = serializers.IntegerField(read_only=True) event_nm = serializers.CharField() date_st = serializers.DateTimeField() date_end = serializers.DateTimeField() event_cd = serializers.CharField() event_url = serializers.CharField() address = serializers.CharField() city = serializers.CharField() state_prov = serializers.CharField() postal_code = serializers.CharField() location_name = serializers.CharField() gmaps_url = serializers.CharField() webcast_url = serializers.CharField() timezone = serializers.CharField() current = serializers.CharField() competition_page_active = serializers.CharField() class CompetitionLevelSerializer(serializers.Serializer): comp_lvl_typ = serializers.CharField() comp_lvl_typ_nm = serializers.CharField() comp_lvl_order = serializers.IntegerField() class MatchSerializer(serializers.Serializer): match_id = serializers.CharField(read_only=True) event_id = serializers.IntegerField(read_only=True) match_number = serializers.IntegerField() red_score = serializers.IntegerField() blue_score = serializers.IntegerField() time = serializers.DateTimeField() blue_one_id = serializers.IntegerField() blue_two_id = serializers.IntegerField() blue_three_id = serializers.IntegerField() red_one_id = serializers.IntegerField() red_two_id = serializers.IntegerField() red_three_id = serializers.IntegerField() comp_level = CompetitionLevelSerializer(read_only=True) class CompetitionInformationSerializer(serializers.Serializer): event = EventSerializer() matches = MatchSerializer(many=True, required=False) <file_sep>/scripts/refresh-event-team-info.sh #!/bin/bash newline=$'\n' timestamp=$(date) output=$(curl https://parts.bduke.dev/scouting/admin/sync-event-team-info/) echo "$timestamp" "$output" "$newline" >> /home/brandon/PARTs_WebAPI/logs/log-sync-event-team-info.txt <file_sep>/scouting/migrations/0006_alter_eventteaminfo_dq_alter_eventteaminfo_losses_and_more.py # Generated by Django 4.0.3 on 2023-03-19 19:50 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('scouting', '0005_schedule_and_event_team_info_event_field_Rename'), ] operations = [ migrations.AlterField( model_name='eventteaminfo', name='dq', field=models.IntegerField(null=True), ), migrations.AlterField( model_name='eventteaminfo', name='losses', field=models.IntegerField(null=True), ), migrations.AlterField( model_name='eventteaminfo', name='matches_played', field=models.IntegerField(null=True), ), migrations.AlterField( model_name='eventteaminfo', name='qual_average', field=models.IntegerField(null=True), ), migrations.AlterField( model_name='eventteaminfo', name='rank', field=models.IntegerField(null=True), ), migrations.AlterField( model_name='eventteaminfo', name='ties', field=models.IntegerField(null=True), ), migrations.AlterField( model_name='eventteaminfo', name='wins', field=models.IntegerField(null=True), ), ] <file_sep>/scouting/migrations/0008_alter_schedule_notified.py # Generated by Django 4.0.3 on 2023-03-25 14:50 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('scouting', '0007_scoutfield_time'), ] operations = [ migrations.AlterField( model_name='schedule', name='notified', field=models.BooleanField(default=False), ), ] <file_sep>/scouting/field/urls.py from django.urls import path from .views import * urlpatterns = [ path('questions/', Questions.as_view()), #path('save-answers/', SaveAnswers.as_view()), path('results/', Results.as_view()), ] <file_sep>/public/urls.py from django.urls import include, path from .views import APIStatus, NotifyUsers urlpatterns = [ path('api-status/', APIStatus.as_view()), path('competition/', include('public.competition.urls')), # path('notify-users/', NotifyUsers.as_view()), ] <file_sep>/scouting/migrations/0003_multiple_notifications.py # Generated by Django 4.0.3 on 2023-03-11 22:02 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('scouting', '0002_initial'), ] operations = [ migrations.RemoveField( model_name='scoutfieldschedule', name='notified', ), migrations.AddField( model_name='scoutfieldschedule', name='notification1', field=models.BooleanField(default=False), ), migrations.AddField( model_name='scoutfieldschedule', name='notification2', field=models.BooleanField(default=False), ), migrations.AddField( model_name='scoutfieldschedule', name='notification3', field=models.BooleanField(default=False), ), ] <file_sep>/form/urls.py from django.urls import path from .views import SaveAnswers, GetFormInit, SaveQuestion, GetQuestions urlpatterns = [ path('get-questions/', GetQuestions.as_view()), path('save-answers/', SaveAnswers.as_view()), path('form-init/', GetFormInit.as_view()), path('save-question/', SaveQuestion.as_view()), ]
136cea89514b6f77b3563ebd4f237265cba11a7d
[ "SQL", "Python", "Text", "Shell" ]
87
Python
3492PARTs/PARTs_WebAPI
152ae3aae3e5412deaaab84429419914864b4656
7c7107f4ee5d505bb7446294e59eb414a2deea01
refs/heads/master
<repo_name>ishotjr/OneWayWillie<file_sep>/pinout.md #### OLED <-> MKR `GND` to `GND` `VCC` to `VCC` `SCL` to `SCK` (`D9`) `SDA` to `MOSI` (`D8`) `RES` to `D7` `DC` to `D6` `CS` to `D4` #### Piezo <-> MKR `GND` to `GND` `VCC` to `D5` #### GP2Y0A60SZLF <-> MKR `GND` to `GND` `VCC` to `5V` `OUT` to `A0` <file_sep>/OneWayWillie.ino #include <Adafruit_GFX.h> #include <Adafruit_SSD1331.h> #include <SPI.h> // *courtesy Adafruit SSD1331 library example* // OLED pins on MKR WiFi 1010 #define sclk 9 #define mosi 8 #define cs 4 #define rst 7 #define dc 6 // Color definitions #define BLACK 0x0000 #define BLUE 0x001F #define RED 0xF800 #define GREEN 0x07E0 #define CYAN 0x07FF #define MAGENTA 0xF81F #define YELLOW 0xFFE0 #define WHITE 0xFFFF // using hardware SPI pins Adafruit_SSD1331 display = Adafruit_SSD1331(&SPI, cs, dc, rst); enum Distance: byte { FAR, NEAR, NEARER, NEAREST, BEHIND }; const int distanceColors[] = {YELLOW, YELLOW, MAGENTA, RED, RED}; const int piezo = 5; int proximity = FAR; int priorProximity = BEHIND; int priorScreen = CYAN; void setup() { Serial.begin(9600); pinMode(piezo, OUTPUT); Serial.println("Initializing..."); display.begin(); display.fillScreen(priorScreen); } void loop() { // Sharp GP2Y0A60SZLF Analog Distance Sensor 10-150cm, 5V int sensorValue = analogRead(A0); Serial.print("sensorValue: "); Serial.println(sensorValue); if (sensorValue == 1023) { proximity = BEHIND; } else if (sensorValue > 800) { proximity = NEAREST; } else if (sensorValue > 400) { proximity = NEARER; } else if (sensorValue > 220) { proximity = NEAR; } else { proximity = FAR; } Serial.print("proximity: "); Serial.println(proximity); // ignore movement starting behind sensor if (proximity == BEHIND) { priorProximity = BEHIND; } else if (proximity < priorProximity) { priorProximity = proximity; } Serial.print("priorProximity: "); Serial.println(priorProximity); // buzz when too close, escalating pitch and speed with proximity // but only movement toward sensor if (priorProximity > 0) { noTone(piezo); drawFace(BEHIND); } else { if (proximity > 0) { tone(piezo, proximity * 220, 30 * proximity * proximity); } drawFace(proximity); } // for sensor, not tone (which uses own timer) delay(250); } void drawFace(byte distance) { const uint16_t eyeRadius = 4; const uint16_t mouthRadius = 6; if (distance == BEHIND) { if (priorScreen != BLACK) { display.fillScreen(BLACK); priorScreen = BLACK; } display.setCursor(2,2); display.setTextColor(distanceColors[distance]); display.setTextSize(2); display.println("DO NOT ENTER"); } else { if (priorScreen != WHITE) { display.fillScreen(WHITE); priorScreen = WHITE; } // head display.fillCircle(display.width() / 2, display.height() / 2, display.height() / 2, distanceColors[distance]); // eyes display.fillCircle(display.width() / 3, 2 * display.height() / 5, eyeRadius, BLACK); display.fillCircle(2 * display.width() / 3, 2 * display.height() / 5, eyeRadius, BLACK); if (distance == NEAREST) { // mask eyes to look angry display.fillTriangle(display.width() / 3, 2 * display.height() / 5 - eyeRadius, 2 * display.width() / 3, 2 * display.height() / 5 - eyeRadius, display.width() / 2, display.height() / 2, distanceColors[distance]); } // mouth display.fillCircle(display.width() / 2, 4 * display.height() / 5, mouthRadius, BLACK); if ((distance == NEAREST) || (distance == NEARER)) { // mask mouth circle to look worried display.fillRect(display.width() / 2 - mouthRadius, 4 * display.height() / 5, mouthRadius * 2 + 1, mouthRadius + 1, distanceColors[distance]); } else if (distance == FAR) { // mask mouth circle to create smile display.fillRect(display.width() / 2 - mouthRadius, 4 * display.height() / 5 - mouthRadius, mouthRadius * 2 + 1, mouthRadius, distanceColors[distance]); } } } <file_sep>/OneWayWillie.jscad // OneWayWillie v0.0.3 let h = 2; let walls = 21 * h; let size = 20; function main () { return willie(); } function willie() { head = cylinder({r: size, h: walls, center: false}); brain = cylinder({r: size - h, h: walls - h, center: false}).translate([0,0,h]); // 0.95" 96x64 color OLED oled = cube({size: [22, 15, walls]}).translate([-22/2,-15/2,0]); pins = cube({size: [22, 5, walls]}).translate([-22/2,-15/2-5,h/2]); head = difference(head, brain, oled, pins); tummy = cylinder({r: 1.5 * size, h: walls, center: false}).translate([0,2*size,0]); stomach = cylinder({r: 1.5 * size - h, h: walls - h, center: false}).translate([0,2*size,h]); // GP2Y0A60SZLF IR sensor ir = cube({size: [24, 10, walls]}).translate([-24/2,2*size-10-10,0]); piezo = cylinder({r: 22.5/2, h: walls, center: false}).translate([0,2.5*size,0]); tummy = difference(tummy, stomach, ir, piezo); stand = cube({size: [24, 2*h, walls]}).translate([-24/2,3.5*size-2*h,0]); body = union(head, tummy, stand); // pcb diff'd with body union and +2 pcb height to delete intersection pcb = cube({size: [28, 31+2, walls]}).translate([-28/2,-15/2-5,h]); usb = cube({size: [10, 2*h, walls/2]}).translate([-10/2,3.5*size-2*h,walls/2]); body = difference(body,pcb, usb); return body; }
de49a8c22ad7ec3238fb9e802d0dbe04e4dc92ab
[ "Markdown", "JavaScript", "C++" ]
3
Markdown
ishotjr/OneWayWillie
1675f82aac05a7602b3a9c6059b48606e87dd082
3eb18c4d27fec6a05144c845ab35683717b3c0ec
refs/heads/master
<file_sep>package com.nbw.common.util; import java.util.Hashtable; import org.jdom.Element; /** * xml串处理接口,由业务系统实现 * * @author songyj * */ public interface CallBack { /** * 注册执行xml属性 * * 每个功能转换的方法可能不一样所以使用接口 * 这样保证如果需要执行xml则不需要重复写代码 * @param xmlFieldName */ public String xmlHandleElement(Hashtable<String,Object> ht); /** * 注册执行xml属性 * * 每个功能转换的方法可能不一样所以使用回调 * 这样保证如果需要执行xml则不需要重复写代码 * @param xmlFieldName */ public String xmlHandleElement(Element ro); } <file_sep>XmlUtils ======== 使处理xml的代码和业务代码分离,减少代码重复率。 使用示例: ByteArrayInputStream xmlStream = null; try { xmlStream = new ByteArrayInputStream(users.getBytes("GBK")); } catch (UnsupportedEncodingException e) { logger.error( "findUsersRepeat-ByteArrayInputStream解析编码出现问题", e); } boolean yes = XmlUtils.handleXml(xmlStream, new CallBack() { public String xmlHandleElement(Hashtable<String, Object> ht) { lm.findRepeat(ht, filter); return null; } public String xmlHandleElement(Element ro) { return null; } });
bb30c72dea6c147f00672f5c63b5ff29e70b6b35
[ "Markdown", "Java" ]
2
Java
picnic106/XmlUtils
2c4241bd8513c821a4f2f4fd18be84a792be6176
fd487e48042ab0c75bfb7cf1ced91d9db9ff7e31
refs/heads/master
<repo_name>JSGrondin/revisited-baselines<file_sep>/pipeline.py # Importing relevant packages from data_load import load_imdb from data_load import report from data_load import lower_text from sklearn.pipeline import Pipeline from sklearn.model_selection import RandomizedSearchCV from textprocessing import remove_urls from textprocessing import remove_emails from sklearn.model_selection import train_test_split from textprocessing import lemmatization_stem_and_stopwords import string from sklearn.feature_extraction.text import CountVectorizer from nbsvm import NBSVM # Extracting raw features (X) and targets(y) X_train, y_train = load_imdb('train') X_test, y_test= load_imdb('test') ########################################################### # Text Pre-processing # ########################################################### # Removing URLs X_train = remove_urls(X_train) X_text = remove_urls(X_test) # Removing email addresses # X_train = remove_emails(X_train) # X_test = remove_emails(X_text) # # Removing special characters # X_train = char_preprocessing(X_train) # X_test = char_preprocessing(X_test) # with open('your_file.txt', 'w', encoding="utf-8") as f: # for item in X_train: # f.write("%s\n" % item) # Processing text features on Xtrain, X_val and X_test X_train = lemmatization_stem_and_stopwords(X_train,True,False,False) X_test = lemmatization_stem_and_stopwords(X_test,True,False,False) # Removing capital letters, i.e lowering all strings X_train = lower_text(X_train) X_test = lower_text(X_test) # # seed=123 # # Splitting the training set into a training and validation sets # X_train, X_val, y_train, y_val = train_test_split(X_train, y_train, # train_size=0.90, \ # test_size=0.1, # random_state=seed) # ########################################################### # NBSVM # ########################################################### #token_pattern = r'[a-zA-Z]+' #token_pattern = r'\w+' # token_pattern = r'[a-zA-Z]+|[\d+\/\d+]+' #token_pattern = r'\w+|[%s]' % string.punctuation # token_pattern = r'[a-zA-Z]+|[\d+\/\d+]+|[%s]' % string.punctuation token_pattern = r'[\d+\/\d+]+|\w+|[%s]' % string.punctuation pclf_NBSVM = Pipeline([ ('vect', CountVectorizer(ngram_range=(1,3), token_pattern=token_pattern, binary=True)), ('clf', NBSVM(beta=0.3, C=1, alpha=1.0, fit_intercept=False)) ]) pclf_NBSVM.fit(X_train, y_train) print('Test Accuracy: %s' % pclf_NBSVM.score(X_test, y_test)) params = {"vect__ngram_range": [(1,2)], "vect__binary": [True], "clf__alpha": [1], "clf__C": [1], "clf__beta": [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0], "clf__fit_intercept": [False] } # Perform randomized search CV to find best hyperparameters random_search = RandomizedSearchCV(pclf_NBSVM, param_distributions = params, cv=3, verbose = 30, random_state = 123, n_iter = 10) random_search.fit(X_train, y_train) # Report results report(random_search.cv_results_) <file_sep>/README.md # revisited-baselines Improved baselines for sentence and document representations This mini-project was undertaken as part of COMP-551 at McGill University. The goal of this project was to revisit statements made in the work of Le & al with regard to the performance of Paragraph vectors in natural language processing applications. The authors claimed that Paragraph vectors achieved state-of-the-art results on text classification and sentiment analysis tasks. To verify this statement, the best baselines referenced in this report were reproduced. All comparisons were made on the IMDB sentiment dataset. A NB-SVM baseline was used and improved. The latter achieved an accuracy of 92.096% on the test set. This is 0.876% above the baseline reported in the original article. The following scripts were used: data_load.py : to load review comments textprocessing.py : to remove special characters, stop words, lemmatize or stem words, etc pipeline.py : main file used to generate predictions See the writeup.pdf for details on the methodology and results.
c3a865928dd534181ea633b6b4821489df2aab71
[ "Markdown", "Python" ]
2
Python
JSGrondin/revisited-baselines
de6304378a0aa9473a3074f4115cb62fd9e0cf37
500c61060a962367739b8169071907e2f19a5540
refs/heads/master
<repo_name>GeosoftInc/gxpy<file_sep>/geosoft/gxapi/GXARCPY.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXARCPY(gxapi_cy.WrapARCPY): """ GXARCPY class. This library allows legacy GX code to call back into arcpy methods in the Geosoft AddIn for ArcGIS Pro. """ def __init__(self, handle=0): super(GXARCPY, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXARCPY <geosoft.gxapi.GXARCPY>` :returns: A null `GXARCPY <geosoft.gxapi.GXARCPY>` :rtype: GXARCPY """ return GXARCPY() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def add_message(cls, message): """ Add informational message to output of current script :param message: Message :type message: str .. versionadded:: 9.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapARCPY._add_message(GXContext._get_tls_geo(), message.encode()) @classmethod def add_warning(cls, message): """ Add warning message to output of current script :param message: Message :type message: str .. versionadded:: 9.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapARCPY._add_warning(GXContext._get_tls_geo(), message.encode()) @classmethod def add_error(cls, message): """ Add error message to output of current script :param message: Message :type message: str .. versionadded:: 9.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapARCPY._add_error(GXContext._get_tls_geo(), message.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXIPGUI.rst .. _GXIPGUI: GXIPGUI class ================================== .. autoclass:: geosoft.gxapi.GXIPGUI :members: <file_sep>/geosoft/gxapi/GXVECTOR3D.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXVECTOR3D(gxapi_cy.WrapVECTOR3D): """ GXVECTOR3D class. `GXVECTOR3D <geosoft.gxapi.GXVECTOR3D>` Display object. """ def __init__(self, handle=0): super(GXVECTOR3D, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXVECTOR3D <geosoft.gxapi.GXVECTOR3D>` :returns: A null `GXVECTOR3D <geosoft.gxapi.GXVECTOR3D>` :rtype: GXVECTOR3D """ return GXVECTOR3D() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def get_itr(self, itr): """ Get the `GXITR <geosoft.gxapi.GXITR>` of the `GXVECTOR3D <geosoft.gxapi.GXVECTOR3D>` :param itr: `GXITR <geosoft.gxapi.GXITR>` object :type itr: GXITR .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_itr(itr) def set_itr(self, itr): """ Set the `GXITR <geosoft.gxapi.GXITR>` of the `GXVECTOR3D <geosoft.gxapi.GXVECTOR3D>` :param itr: `GXITR <geosoft.gxapi.GXITR>` object :type itr: GXITR .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_itr(itr) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXDGW.rst .. _GXDGW: GXDGW class ================================== .. autoclass:: geosoft.gxapi.GXDGW :members: .. _DGW_OBJECT: DGW_OBJECT constants ----------------------------------------------------------------------- Dialog object defines INFO TYPE EDIT FEDIT LEDIT CEDIT EBUT ========= ===== ===== ===== ===== ===== LABEL RW RW RW RW RW R - can use GetInfo_DGW TEXT RW RW RW RW . W - can use `set_info <geosoft.gxapi.GXDGW.set_info>` PATH . RW . . . FILEPATH . RW . . . LISTVAL . . R . . LISTALIAS . . RW . . .. autodata:: geosoft.gxapi.DGW_LABEL :annotation: .. autoattribute:: geosoft.gxapi.DGW_LABEL .. autodata:: geosoft.gxapi.DGW_TEXT :annotation: .. autoattribute:: geosoft.gxapi.DGW_TEXT .. autodata:: geosoft.gxapi.DGW_PATH :annotation: .. autoattribute:: geosoft.gxapi.DGW_PATH .. autodata:: geosoft.gxapi.DGW_FILEPATH :annotation: .. autoattribute:: geosoft.gxapi.DGW_FILEPATH .. autodata:: geosoft.gxapi.DGW_LISTVAL :annotation: .. autoattribute:: geosoft.gxapi.DGW_LISTVAL .. autodata:: geosoft.gxapi.DGW_LISTALIAS :annotation: .. autoattribute:: geosoft.gxapi.DGW_LISTALIAS .. autodata:: geosoft.gxapi.DGW_EXT :annotation: .. autoattribute:: geosoft.gxapi.DGW_EXT .. autodata:: geosoft.gxapi.DGW_HIDE :annotation: .. autoattribute:: geosoft.gxapi.DGW_HIDE <file_sep>/docs/GXEXT.rst .. _GXEXT: GXEXT class ================================== .. autoclass:: geosoft.gxapi.GXEXT :members: <file_sep>/geosoft/gxpy/vox.py """ Geosoft voxel (voxset) handling. :Classes: ============ ========================================================================== :class:`Vox` Geosoft voxel (voxset), subclass of `geosoft.gxpy.spatialdata.SpatialData` ============ ========================================================================== :Constants: :Z_ELEVATION: 0, z values are elevation :Z_DEPTH: 1, z values are depth :MODE_READ: `geosoft.gxpy.spatialdata.MODE_READ` :MODE_READWRITE: `geosoft.gxpy.spatialdata.MODE_READWRITE` :MODE_NEW: `geosoft.gxpy.spatialdata.MODE_NEW` :INTERP_NEAREST: `geosoft.gxapi.VOXE_EVAL_NEAR` :INTERP_LINEAR: `geosoft.gxapi.VOXE_EVAL_INTERP` :INTERP_SMOOTH: `geosoft.gxapi.VOXE_EVAL_BEST` .. seealso:: `geosoft.gxpy.spatialdata`, `geosoft.gxpy.vox_display`, `geosoft.gxapi.GXVOX` .. note:: Regression tests provide usage examples: `Tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_vox.py>`_ .. versionadded:: 9.3.1 """ import os import numpy as np from collections.abc import Sequence import geosoft import geosoft.gxapi as gxapi from . import gx as gx from . import coordinate_system as gxcs from . import vv as gxvv from . import utility as gxu from . import spatialdata as gxspd from . import geometry as gxgm from . import gdb as gxgdb __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) class VoxException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.vox`. """ pass def _vox_file_name(name, vectorvoxel=False): ext = os.path.splitext(name)[1].lower() if (ext == '.geosoft_voxel') or (ext == '.geosoft_vectorvoxel'): return name if vectorvoxel: return name + '.geosoft_vectorvoxel' return name + '.geosoft_voxel' def _vox_name(name): basename = os.path.basename(name) return os.path.splitext(basename)[0] def delete_files(vox_name): """ Delete all files associated with this vox name. :param vox_name: name of the vox file .. versionadded:: 9.3.1 """ gxspd.delete_files(vox_name) def locations_from_cells(cells, ref=0.0): """ Return the cell center locations from an array of cell sizes. :param cells: array of cell sizes :param ref: reference (origin) added to values :return: location array .. versionadded:: 9.3.1 """ if isinstance(cells, gxvv.GXvv): cells = list(cells.np) locations = list(cells) locations[0] = ref for i in range(1, len(cells)): locations[i] = locations[i - 1] + (cells[i - 1] + cells[i]) * 0.5 return locations def elevation_from_depth(depth_origin, depth_cells): """ Return elevation origin and elevation cells sizes from a depth origin and depth cell-sizes :param depth_origin: top vox z origin as depth below 0 :param depth_cells: cell sizes with depth :return: elevation origin (bottom cell), cell sizes up from origin .. versionadded:: 9.3.1 """ vv = False if isinstance(depth_cells, gxvv.GXvv): depth_cells = list(depth_cells.np) vv = True # elevation origin is the deepest cell elevation_origin = -locations_from_cells(depth_cells, depth_origin)[len(depth_cells) - 1] elevation_cells = list(reversed(depth_cells)) if vv: return elevation_origin, gxvv.GXvv(elevation_cells) return elevation_origin, list(reversed(depth_cells)) # constants INTERP_NEAREST = gxapi.VOXE_EVAL_NEAR INTERP_LINEAR = gxapi.VOXE_EVAL_INTERP INTERP_SMOOTH = gxapi.VOXE_EVAL_BEST MODE_READ = gxspd.MODE_READ MODE_READWRITE = gxspd.MODE_READWRITE MODE_NEW = gxspd.MODE_NEW Z_ELEVATION = 0 Z_DEPTH = 1 class Vox(gxspd.SpatialData, Sequence): """ Vox (voxset) class. :Constructors: ======================= ============================================ :meth:`open` open an existing vox dataset :meth:`new` create a new vox dataset ======================= ============================================ A vox instance supports iteration that yields (x, y, z, vox_value) by cell-centered points along horizontal rows, then columns, then depth slices starting at minimum z. For example, the following prints the x, y, z, vox_value of every non-dummy cell in a vox: .. code:: import geosoft.gxpy.vox as gxvox with gxvox.Vox.open('some.geosoft_voxel') as g: for x, y, z, v in g: if v is not None: print(x, y, z, v) Specific vox cell values can be indexed (null vox values are None): .. code:: import geosoft.gxpy.vox as gxvox with gxvox.Vox.open('some_voxel') as vox: for iz in range(vox.nz): for iy in range(vox.ny): for ix in range(vox.nx): x, y, z, v = vox[ix, iy, iz] if v is not None: print(x, y, z, v) .. versionadded:: 9.3.1 """ def _close(self, pop=True): if hasattr(self, '_open'): if self._open: self._gxvoxe = None self._gxvox = None self._pg = None self._origin = None self._locations = None self._cells = None self._uniform_cell_size = None self._buffer_np = None super(Vox, self)._close() def __init__(self, name=None, gxvox=None, dtype=None, mode=None, overwrite=False): self._file_name = _vox_file_name(name) self._name = _vox_name(self._file_name) super().__init__(name=self._name, file_name=self._file_name, mode=mode, overwrite=overwrite, gxobj=gxvox) self._gxvox = gxvox self._gxvoxe = None self._next = self._next_x = self._next_y = self._next_z = 0 self._locations = None self._cells = None self._pg = None self._buffered_plane = self._buffered_row = None self._is_depth = False ityp = gxapi.int_ref() iarr = gxapi.int_ref() nx = gxapi.int_ref() ny = gxapi.int_ref() nz = gxapi.int_ref() self._gxvox.get_info(ityp, iarr, nx, ny, nz) if dtype is None: self._dtype = gxu.dtype_gx(ityp.value) else: self._dtype = dtype self._return_int = gxu.is_int(gxu.gx_dtype(self._dtype)) self._dim = (nx.value, ny.value, nz.value) self._max_iter = nx.value * ny.value * nz.value # location self._setup_locations() def __len__(self): return self._max_iter def __iter__(self): return self def __next__(self): if self._next >= self._max_iter: self._next = 0 raise StopIteration else: v = self.__getitem__(self._next) self._next += 1 return v def __getitem__(self, item): if isinstance(item, int): iz = item // (self.nx * self.ny) item -= iz * self.nx * self.ny ix = item % self.nx iy = item // self.nx else: ix, iy, iz = item x, y, z = self.xyz(ix, iy, iz) if self.is_depth: iz = self.nz - iz - 1 if (self._buffered_plane != iz) or (self._buffered_row != iy): self._buffered_plane = iz self._buffered_row = iy if self.is_vectorvox: vv = gxvv.GXvv(dtype=self._dtype, dim=3) else: vv = gxvv.GXvv(dtype=self._dtype) self.gxpg.read_row_3d(iz, iy, 0, self._dim[0], vv.gxvv) self._buffer_np = vv.np v = self._buffer_np[ix] if self._return_int: v = int(v) if v == gxapi.iDUMMY: v = None else: if self.is_vectorvox: vx = None if np.isnan(v[0]) else v[0] vy = None if np.isnan(v[1]) else v[1] vz = None if np.isnan(v[2]) else v[2] v = (vx, vy, vz) elif np.isnan(v): v = None return x, y, z, v @classmethod def open(cls, name, gxapi_vox=None, dtype=None, mode=MODE_READ, depth=False): """ Open an existing vox. :param name: name of the vox. If a name only the vox is resolved from the project. If a file name or complete path, the vox is resolved from the file system outside of the current project. :param gxapi_vox: `gxapi.GXVOX` instance to create from GXVOX instance. :param dtype: working dtype for retrieving data. :param depth: True to work with z as depth (positive down), origin at the top of the vox. The default is False, z is elevation (positive up), origin at the bottom of the vox. :param mode: open mode: ================= ========================================================== MODE_READ only read the vox, properties cannot be changed MODE_READWRITE vox stays the same, but properties and metadata may change ================= ========================================================== .. versionadded:: 9.3.1 """ if gxapi_vox is None: gxapi_vox = gxapi.GXVOX.create(_vox_file_name(name)) vox = cls(name, gxapi_vox, dtype=dtype, mode=mode) vox.is_depth = depth return vox @classmethod def new(cls, name, data, temp=False, overwrite=False, dtype=None, origin=(0., 0., 0.), cell_size=None, coordinate_system=None, depth=False): """ Create a new vox dataset :param name: dataset name, or a path to a persistent file. A file with extension `.geosoft_voxel` or `geosoft_vectorvoxel` will be created for vox instances that will persist (`temp=True`). :param data: data to place in the vox, must have 3 dimensions (nz, ny, nx) for simple scalar data, or (nx, ny, nz, 3) for vector data.f :param temp: True to create a temporary vox which will be removed after use :param overwrite: True to overwrite existing persistent vox :param dtype: data type, default is the same as data, or np.float64 of no data. :param origin: (x0, y0, z0) location of the **center** of the origin voxel cell. :param cell_size: uniform cell size, or (dx, dy, dz) cell sizes in the x, y and z directions. The default is (1., 1., 1.). For variable cell size on a dimension, provide an array of the cell sizes along that dimension. The array length must match the data dimension along that axis. For example: `cell_size=((1, 2.5, 1.5), (1, 1, 1, 1), (5, 4, 3, 2, 1))` will create a vox with (x, y, z) dimension (3, 4, 5) and sizes as specified in each dimension. :param coordinate_system: coordinate system as required to create from `geosoft.gxpy.Coordinate_system` :param depth: True to work with z as depth (positive down). The default is False, z is elevation (positive up) .. versionadded:: 9.3.1 """ if not isinstance(data, np.ndarray): data = np.array(data, dtype=np.float32) vec_dim = 1 if data.ndim == 4: if data.shape[3] != 3: raise VoxException(_t('Data appears to be vector data, but last dimension is not 3.')) if data.dtype != np.float32: data = np.array(data, dtype=np.float32) vec_dim = 3 elif data.ndim != 3: raise VoxException(_t('Data must have 3 or 4 dimensions, this data has {} dimensions').format(data.ndim)) if not temp: file_name = _vox_file_name(name, vectorvoxel=(vec_dim == 3)) if not overwrite: if os.path.isfile(file_name): raise VoxException(_t('Cannot overwrite existing vox {}'.format(file_name))) else: if vec_dim == 1: file_name = gx.gx().temp_file('.geosoft_voxel') else: file_name = gx.gx().temp_file('.geosoft_vectorvoxel') dimension = (data.shape[2], data.shape[1], data.shape[0]) if dtype is None: dtype = data.dtype if cell_size is None: cell_size = (1., 1., 1.) elif isinstance(cell_size, int): cell_size = (cell_size, cell_size, cell_size) dvv = list(cell_size) for i in range(3): if hasattr(dvv[i], '__iter__'): dvv[i] = gxvv.GXvv(dvv[i], dtype=np.float64) else: dvv[i] = gxvv.GXvv(np.zeros((dimension[i],)) + dvv[i], dtype=np.float64) x0, y0, z0 = origin cx, cy, cz = dvv # dimensions must match if dimension != (cx.length, cy.length, cz.length): raise VoxException(_t('Vox dimension {} and variable_cell_size dimensions {} do not match' ).format(dimension, (cx.length, cy.length, cz.length))) if depth: z0, cz = elevation_from_depth(z0, cz) if dtype is None: dtype = np.float64 pg = gxapi.GXPG.create_3d(cz.length, cy.length, cx.length, gxu.gx_dtype_dimension(dtype, vec_dim)) vv = gxvv.GXvv(dtype=dtype, dim=vec_dim) vv.length = cx.length for s in range(cz.length): for iy in range(cy.length): vv.set_data(data[s, iy, :]) if depth: sz = cz.length - s - 1 else: sz = s pg.write_row_3d(sz, iy, 0, vv.length, vv.gxvv) gxvox = gxapi.GXVOX.generate_pgvv(file_name, pg, x0, y0, z0, cx.gxvv, cy.gxvv, cz.gxvv, gxcs.Coordinate_system(coordinate_system).gxipj, gxapi.GXMETA.create()) vox = cls(name, gxvox, mode=MODE_NEW, overwrite=overwrite) vox._file_name = file_name vox.is_depth = depth return vox @classmethod def copy_vox(cls, name, source_vox, data=None, temp=False, overwrite=False, dtype=None): """ Create a new vox dataset to match a source vox, with optional new data. :param name: dataset name, or a path to a persistent file. A file with extension `.geosoft_voxel` will be created for vox instances that will persist (`temp=True`). :param source_vox: `Vox` instance of the source vox :param data: data to place in the vox, must have 3 dimensions (nz, ny, nx). If not specified a copy of source+vox data is used. Data arrays are indexed (z, y, x). :param temp: True to create a temporary vox :param overwrite: True to overwrite existing persistent vox :param dtype: data type, default is the same as data, or np.float64 of no data. .. versionadded:: 9.3.1 """ if data is None: data = source_vox.np() vox = Vox.new(name, data, overwrite=overwrite, temp=temp, dtype=dtype, origin=(source_vox.origin_x, source_vox.origin_y, source_vox.origin_z,), cell_size=(source_vox.cells_x, source_vox.cells_y, source_vox.cells_z), coordinate_system=source_vox.coordinate_system, depth=source_vox.is_depth) return vox @property def gxvox(self): """`gxapi.GXVOX` instance handle""" return self._gxvox @property def is_vectorvox(self): """True if this is a vector voxel.""" return bool(self.gxvox.is_vector_voxel()) @property def dtype(self): """Working dtype for the data.""" return self._dtype @property def nx(self): """ number of cells in vox X direction""" return self._dim[0] @property def ny(self): """ number of cells in vox Y direction""" return self._dim[1] @property def nz(self): """ number of cells in vox Z direction""" return self._dim[2] @property def dx(self): """constant X cell size, None if not constant, in which case use `cells_x`""" if self._uniform_cell_size[0] == gxapi.rDUMMY: return None return self._uniform_cell_size[0] @property def dy(self): """constant Y cell size, None if not constant, in which case use `cells_y`""" if self._uniform_cell_size[1] == gxapi.rDUMMY: return None return self._uniform_cell_size[1] @property def dz(self): """constant Z cell size, None if not constant, in which case use `cells_z`""" if self._uniform_cell_size[2] == gxapi.rDUMMY: return None return self._uniform_cell_size[2] @property def origin_x(self): """X location of the center of the vox origin cell.""" return self._origin[0] @property def origin_y(self): """Y location of the center of the vox origin cell.""" return self._origin[1] @property def origin_z(self): """Z location of the center of the vox origin cell, top for depth=True, bottom for depth=False""" return self.locations_z[0] @property def uniform_dx(self): """True if X cell sizes are constant""" return self.dx is not None @property def uniform_dy(self): """True if Y cell sizes are constant""" return self.dy is not None @property def uniform_dz(self): """True if Z cell sizes are constant""" return self.dz is not None @property def extent(self): """ extent to the outer-cell edges of the vox as a `geosoft.gxpy.geometry.Point2`.""" rx0 = gxapi.float_ref() ry0 = gxapi.float_ref() rz0 = gxapi.float_ref() rx1 = gxapi.float_ref() ry1 = gxapi.float_ref() rz1 = gxapi.float_ref() self.gxvox.get_area(rx0, ry0, rz0, rx1, ry1, rz1) if self.is_depth: return gxgm.Point2(((rx0.value, ry0.value, -rz1.value), (rx1.value, ry1.value, -rz0.value))) return gxgm.Point2(((rx0.value, ry0.value, rz0.value), (rx1.value, ry1.value, rz1.value)), self.coordinate_system) def _setup_locations(self): xvv = gxvv.GXvv() yvv = gxvv.GXvv() zvv = gxvv.GXvv() self.gxvox.get_location_points(xvv.gxvv, yvv.gxvv, zvv.gxvv) self._locations = (list(xvv.np), list(yvv.np), list(zvv.np)) x0 = gxapi.float_ref() y0 = gxapi.float_ref() z0 = gxapi.float_ref() self.gxvox.get_location(x0, y0, z0, xvv.gxvv, yvv.gxvv, zvv.gxvv) self._origin = (x0.value, y0.value, z0.value) self._cells = (list(xvv.np), list(yvv.np), list(zvv.np)) dx = gxapi.float_ref() dy = gxapi.float_ref() dz = gxapi.float_ref() self._gxvox.get_simple_location(x0, y0, z0, dx, dy, dz) self._uniform_cell_size = (dx.value, dy.value, dz.value) @property def locations_x(self): """Return array of X cell-center locations""" return self._locations[0] @property def locations_y(self): """Return array of Y cell-center locations""" return self._locations[1] @property def locations_z(self): """Return array of Z cell-center locations""" if self.is_depth: return [-z for z in reversed(self._locations[2])] return self._locations[2] @property def cells_x(self): """Return array of X cell sizes""" return self._cells[0] @property def cells_y(self): """Return array of Y cell sizes""" return self._cells[1] @property def cells_z(self): """Return array of Z cell sizes""" if self.is_depth: return list(reversed(self._cells[2])) return self._cells[2] @property def gxpg(self): """ `geosoft.gxapi.GXPG` instance (3D) for this vox. The GXPG will always index z from minimum elevation (bottom of the vox). .. versionadded:: 9.3.1 """ if self._pg is None: self._pg = self.gxvox.create_pg() return self._pg @property def gxvoxe(self): """Return a `gxapi.GXVOXE` instance""" if self._gxvoxe is None: self._gxvoxe = gxapi.GXVOXE.create(self.gxvox) return self._gxvoxe @property def is_depth(self): """True if z is depth. Can be set.""" return self._is_depth @is_depth.setter def is_depth(self, b): self._is_depth = bool(b) @property def is_elevation(self): """True if z is elevation. Can be set.""" return not self._is_depth @is_elevation.setter def is_elevation(self, b): self._is_depth = not(bool(b)) def _checkindex(self, ix, iy, iz): if (ix < 0) or (ix >= self.nx) or (iy < 0) or (iy >= self.ny) or (iz < 0) or (iz >= self.nz): raise IndexError( _t("Voxel index ({}, {}, {}) out of range ({}, {}, {}).").format( ix, iy, iz, self.nx, self.ny, self.nz)) def xyz(self, ix, iy, iz): """ Return the spatial location of a the center of a cell in the vox. Raises error if our of range of the data :param ix: x index :param iy: y index :param iz: z index, from bottom for elevation, from top for depth :return: (x, y, elevation) or (x, y, depth) .. versionadded:: 9.3 """ self._checkindex(ix, iy, iz) return self.locations_x[ix], self.locations_y[iy], self.locations_z[iz] def value_at_location(self, xyz, interpolate=INTERP_LINEAR): """ Vox at a location. :param xyz: tuple (x, y, z) location in the vox coordinate system :param interpolate: method by which to interpolate between cell centers: ============== ============================================================= INTERP_NEAREST same as value inside a cell. INTERP_LINEAR linear interpolation between neighboring cell centers. INTERP_SMOOTH smooth interpolation (slower than INTERP_LINEAR). ============== ============================================================= :returns: vox value at that location .. versionadded:: 9.3.1 """ x, y, z = xyz if self.is_depth: z = -z v = self.gxvoxe.value(x, y, z, interpolate) if v == gxapi.rDUMMY: return None return v def np(self, subset=None, dtype=None): """ Return vox subset in a 3D numpy array. :param subset: define a subset ((start_x, start_y, start_z),(nx, ny, nz)). If not specified a numpy array of the entire vox is returned. Missing items are calculated from the vox, and negative indexes in start indicate a value from the last cell. start=(None, None) equivalent: start=((0, 0, 0), (nx, ny, nz)) start=((4, 6, 11), None) equivalent: start=((4, 6, 11), (nx - 4, ny - 6, nz - 11)) start=((4, 6, 11), (None, None, 1) equivalent: start=((4, 6, 11), (nx - 4, ny - 6, 1)) start=((0, 0, -1), None equivalent: start=((0, 0, nx - 1), (nx, ny, 1)) :param dtype: desired np.dtype, default is same as vox dtype. :return: numpy array of shape (nz, ny, nx). The order of z depends on is_depth property setting. .. versionadded:: 9.3.1 """ def set_0(n, nn): if n is None: return 0 if n < 0: nr = nn + n else: nr = n if nr < 0 or nr >= nn: raise VoxException(_t("Invalid start ({}) for axis dimension ({})").format(n, nn)) return nr def set_d(o, n, nn): if n is None: return nn - o return n if subset: start, dimension = subset else: start = (0, 0, 0) dimension = None # start if start is None: x0 = y0 = z0 = 0 else: x0, y0, z0 = start x0 = set_0(x0, self.nx) y0 = set_0(y0, self.ny) z0 = set_0(z0, self.nz) # dimensions if dimension is None: nx = self.nx - x0 ny = self.ny - y0 nz = self.nz - z0 else: nx, ny, nz = dimension nx = set_d(x0, nx, self.nx) ny = set_d(y0, ny, self.ny) nz = set_d(z0, nz, self.nz) gxpg = self.gxpg if dtype is None: dtype = self._dtype if self.is_vectorvox: shape = (nz, ny, nx, 3) dim = 3 else: shape = (nz, ny, nx) dim = 1 npv = np.empty(shape, dtype=dtype) vv = gxvv.GXvv(dtype=dtype, dim=dim) vv.length = nx if self.is_depth: z0 = self.nz - (z0 + nz) i = 1 for iz in range(z0, z0 + nz): for iy in range(y0, y0 + ny): gxpg.read_row_3d(iz, iy, x0, nx, vv.gxvv) npv[nz - i, iy - y0, :] = vv.np i += 1 else: for iz in range(z0, z0 + nz): for iy in range(y0, y0 + ny): gxpg.read_row_3d(iz, iy, x0, nx, vv.gxvv) npv[iz - z0, iy - y0, :] = vv.np return npv @classmethod def _rbf(cls, data, file_name=None, overwrite=False, max_segments=1000, coordinate_system=None, cs=None, tolerance=None, max_iterations=200, unit_of_measure=None): """ STUB for future release... Create a vox using a radial-basis function. :param data: list of [(x, y, z, value), ...] or a callback that returns lists, or a tuple (gdb, value_channel, x_channel, y_channel, z_channel) where x_channel, y_channel and z_channel, if not specified, default to the current database (x, y, z) channels. See below. :param file_name: name of the geosoft voxel file, None for a temporary vox. :param overwrite: True to overwrite existing file :param max_segments: Maximum number of line segments if using a callback, defaults to 1000. :param coordinate_system: coordinate system :param cs: The voxel cell size in reference system units. :param tolerance: The tolerance required to fit the rbf function to the data values. The default is 0.1 percent of the range of the data. :param max_iterations: Maximum number of iterations to use in solving the rbf function. The default is 200 iterations. Increase for a more accurate grid. A value of 1000 is typically sufficient for maximum accuracy. :param unit_of_measure: string descriptor for the data unit of measure **The** `data` **parameter:** The data can be provided to the rbf algorithm either as a list array, a callback function that returns list array segments, or a `geosoft.gxpy.gdb.Geosoft_database` instance. In the case of a list or a callback, a temporary database is constructed internally. A callback is passed a sequence number, 0, 1, 2, ... and is expected to return a list array with each call or None when there is no more data. See the example below. When a callback is used, the `max_segments` parameter sets the maximum number of lines for the temporary database as each return from the callback will create a new line in the internal temporary database. If a database instance is passed it must be the first item in a tuple of 2 or 5 items: (gdb_instance, value_channel) or (gdb_instance, value_channel, x_channel, y_channel, z_channel). In the first case the default spatial (x, y, z) channels in the database are assumed. Examples: .. code:: import numpy as np import geosoft.gxpy.vox as gxvox # from a simple data array of (x, y, z, value) xyzv = [(45., 10., 0., 100), (60., 25., 0., 77.), (50., 8., 5., 80.), (55., 18., 12., 90.) ] vox = gxvox.Vox.rbf(xyzv) # or from a numpy array vox = gxvox.vox.rbf(np.array(xyzv)) # from a database, vox to a cell size of 100 import geosoft.gxpy.gdb as gxgdb gdb = gxgdb.Geosoft_database.open('density_data.gdb') vox = gxvox.vox.rbf((gdb, 'density'), cs=100) # a callback, used for very large data, or to feed data efficiently from some other source. nxyzv = np.array([[(45., 10., 0., 100), (60., 25., 10., 77.), (50., 8., 10., 81.), (55., 11., 25., 66.)], [(20., 15., 5., 108), (25., 5., 12., 77.), (33., 9., 10., np.nan), (28., 2., 20., 22.)], [(35., 18., 8., 110), (40., 31., 18., 77.), (13., 4., 10., 83.), (44., 4., 18., 7.)]]) def feed_data(n): if n >= len(nxyzv): return None return nxyzv[n] vox = gxvox.vox.rbf(feed_data, cs=1.) .. versionadded:: 9.4 """ def gdb_from_callback(callback): _gdb = gxgdb.Geosoft_gdb.new(max_lines=max_segments) channels = ('x', 'y', 'z', 'v') il = 0 xyzv_list = callback(il) while xyzv_list is not None: _gdb.write_line('L{}'.format(il), xyzv_list, channels=channels) il += 1 xyzv_list = callback(il) _gdb.xyz_channels = channels[:3] return _gdb def gdb_from_data(_d): def _data(i): if i == 0: return _d else: return None return gdb_from_callback(_data) # create a database from the data xc, yc, zc = ('x', 'y', 'z') discard = False if callable(data): gdb = gdb_from_callback(data) vc = 'v' discard = True elif isinstance(data, tuple): gdb = data[0] vc = data[1] if len(data) == 5: xc = data[2] yc = data[3] zc = data[4] else: xc, yc, zc = gdb.xyz_channels discard = True else: gdb = gdb_from_data(data) vc = 'v' gdb.xyz_channels = (xc, yc, zc) if tolerance is None: tolerance = 0.1 # TODO calculate sd of data if tolerance and float(tolerance) <= 0.: tolerance = 1.0e-25 if file_name is None: file_name = gx.gx().temp_file('geosoft_voxel') elif os.path.exists(file_name): if overwrite: gxu.delete_files_by_root(file_name) else: raise VoxException(_t('Cannot overwrite existing file: {}').format(file_name)) gxapi.GXMULTIGRID3DUTIL.generate_rbf(gdb.gxdb, file_name, vc, cs, tolerance, max_iterations, 1, gxapi.RBFKERNEL_GUASSIAN, 0.5) vox = cls.open(file_name) if coordinate_system is None: coordinate_system = gdb.coordinate_system vox.coordinate_system = coordinate_system if unit_of_measure is None: unit_of_measure = gxgdb.Channel(gdb, vc).unit_of_measure vox.unit_of_measure = unit_of_measure if discard: gdb.close(discard=True) return vox <file_sep>/docs/GXIMG.rst .. _GXIMG: GXIMG class ================================== .. autoclass:: geosoft.gxapi.GXIMG :members: .. _IMG_DISPLAY_PROPERTY: IMG_DISPLAY_PROPERTY constants ----------------------------------------------------------------------- Image display property .. autodata:: geosoft.gxapi.IMG_SHADING_INCLINATION :annotation: .. autoattribute:: geosoft.gxapi.IMG_SHADING_INCLINATION .. autodata:: geosoft.gxapi.IMG_SHADING_DECLINATION :annotation: .. autoattribute:: geosoft.gxapi.IMG_SHADING_DECLINATION .. autodata:: geosoft.gxapi.IMG_SHADING_SCALE :annotation: .. autoattribute:: geosoft.gxapi.IMG_SHADING_SCALE .. autodata:: geosoft.gxapi.IMG_SHADING_CONTRAST :annotation: .. autoattribute:: geosoft.gxapi.IMG_SHADING_CONTRAST .. autodata:: geosoft.gxapi.IMG_SHADING_BRIGHTNESS :annotation: .. autoattribute:: geosoft.gxapi.IMG_SHADING_BRIGHTNESS .. autodata:: geosoft.gxapi.IMG_SHADING_WETLOOK :annotation: .. autoattribute:: geosoft.gxapi.IMG_SHADING_WETLOOK .. autodata:: geosoft.gxapi.IMG_COLOURS_REVERSED :annotation: .. autoattribute:: geosoft.gxapi.IMG_COLOURS_REVERSED .. autodata:: geosoft.gxapi.IMG_SMOOTHING_ENABLED :annotation: .. autoattribute:: geosoft.gxapi.IMG_SMOOTHING_ENABLED .. autodata:: geosoft.gxapi.IMG_SHADING_ENABLED :annotation: .. autoattribute:: geosoft.gxapi.IMG_SHADING_ENABLED .. _IMG_FAULT: IMG_FAULT constants ----------------------------------------------------------------------- Fault type .. autodata:: geosoft.gxapi.IMG_FAULT_POLYLINE :annotation: .. autoattribute:: geosoft.gxapi.IMG_FAULT_POLYLINE .. autodata:: geosoft.gxapi.IMG_FAULT_POLYGON :annotation: .. autoattribute:: geosoft.gxapi.IMG_FAULT_POLYGON .. _IMG_FILE: IMG_FILE constants ----------------------------------------------------------------------- Image open modes .. autodata:: geosoft.gxapi.IMG_FILE_READONLY :annotation: .. autoattribute:: geosoft.gxapi.IMG_FILE_READONLY .. autodata:: geosoft.gxapi.IMG_FILE_READWRITE :annotation: .. autoattribute:: geosoft.gxapi.IMG_FILE_READWRITE .. autodata:: geosoft.gxapi.IMG_FILE_READORWRITE :annotation: .. autoattribute:: geosoft.gxapi.IMG_FILE_READORWRITE .. _IMG_QUERY: IMG_QUERY constants ----------------------------------------------------------------------- Information to Query .. autodata:: geosoft.gxapi.IMG_QUERY_iWRITE :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_iWRITE .. autodata:: geosoft.gxapi.IMG_QUERY_iPG :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_iPG .. autodata:: geosoft.gxapi.IMG_QUERY_iWRITEPG :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_iWRITEPG .. autodata:: geosoft.gxapi.IMG_QUERY_iIMGTYPE :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_iIMGTYPE .. autodata:: geosoft.gxapi.IMG_QUERY_iDATTYPE :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_iDATTYPE .. autodata:: geosoft.gxapi.IMG_QUERY_iRENDER :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_iRENDER .. autodata:: geosoft.gxapi.IMG_QUERY_iKX :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_iKX .. autodata:: geosoft.gxapi.IMG_QUERY_iNX :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_iNX .. autodata:: geosoft.gxapi.IMG_QUERY_iNY :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_iNY .. autodata:: geosoft.gxapi.IMG_QUERY_iNV :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_iNV .. autodata:: geosoft.gxapi.IMG_QUERY_iNE :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_iNE .. autodata:: geosoft.gxapi.IMG_QUERY_rXO :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_rXO .. autodata:: geosoft.gxapi.IMG_QUERY_rYO :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_rYO .. autodata:: geosoft.gxapi.IMG_QUERY_rDX :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_rDX .. autodata:: geosoft.gxapi.IMG_QUERY_rDY :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_rDY .. autodata:: geosoft.gxapi.IMG_QUERY_rROT :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_rROT .. autodata:: geosoft.gxapi.IMG_QUERY_rBASE :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_rBASE .. autodata:: geosoft.gxapi.IMG_QUERY_rMULT :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_rMULT .. autodata:: geosoft.gxapi.IMG_QUERY_rCOMPRESSION_RATIO :annotation: .. autoattribute:: geosoft.gxapi.IMG_QUERY_rCOMPRESSION_RATIO .. _IMG_RELOCATE: IMG_RELOCATE constants ----------------------------------------------------------------------- Relocation Style .. autodata:: geosoft.gxapi.IMG_RELOCATE_FIT :annotation: .. autoattribute:: geosoft.gxapi.IMG_RELOCATE_FIT .. autodata:: geosoft.gxapi.IMG_RELOCATE_ASPECT :annotation: .. autoattribute:: geosoft.gxapi.IMG_RELOCATE_ASPECT <file_sep>/examples/tutorial/Geosoft Databases/get_data_files.py import geosoft.gxpy.gx as gx import geosoft.gxpy.utility as gxu gxc = gx.GXpy() url = 'https://github.com/GeosoftInc/gxpy/raw/9.3/examples/tutorial/Geosoft%20Databases/' gxu.url_retrieve(url + 'mag_data.csv')<file_sep>/docs/GXREG.rst .. _GXREG: GXREG class ================================== .. autoclass:: geosoft.gxapi.GXREG :members: .. _REG_MERGE: REG_MERGE constants ----------------------------------------------------------------------- `GXREG <geosoft.gxapi.GXREG>` merge options .. autodata:: geosoft.gxapi.REG_MERGE_REPLACE :annotation: .. autoattribute:: geosoft.gxapi.REG_MERGE_REPLACE .. autodata:: geosoft.gxapi.REG_MERGE_ADD :annotation: .. autoattribute:: geosoft.gxapi.REG_MERGE_ADD <file_sep>/docs/GXVV.rst .. _GXVV: GXVV class ================================== .. autoclass:: geosoft.gxapi.GXVV :members: .. _VV_DOUBLE_CRC_BITS: VV_DOUBLE_CRC_BITS constants ----------------------------------------------------------------------- Number of bits to use in double CRC's .. autodata:: geosoft.gxapi.VV_DOUBLE_CRC_BITS_EXACT :annotation: .. autoattribute:: geosoft.gxapi.VV_DOUBLE_CRC_BITS_EXACT .. autodata:: geosoft.gxapi.VV_DOUBLE_CRC_BITS_DEFAULT :annotation: .. autoattribute:: geosoft.gxapi.VV_DOUBLE_CRC_BITS_DEFAULT .. autodata:: geosoft.gxapi.VV_DOUBLE_CRC_BITS_MAX :annotation: .. autoattribute:: geosoft.gxapi.VV_DOUBLE_CRC_BITS_MAX .. _VV_FLOAT_CRC_BITS: VV_FLOAT_CRC_BITS constants ----------------------------------------------------------------------- Number of bits to use in float CRC's .. autodata:: geosoft.gxapi.VV_FLOAT_CRC_BITS_EXACT :annotation: .. autoattribute:: geosoft.gxapi.VV_FLOAT_CRC_BITS_EXACT .. autodata:: geosoft.gxapi.VV_FLOAT_CRC_BITS_DEFAULT :annotation: .. autoattribute:: geosoft.gxapi.VV_FLOAT_CRC_BITS_DEFAULT .. autodata:: geosoft.gxapi.VV_FLOAT_CRC_BITS_MAX :annotation: .. autoattribute:: geosoft.gxapi.VV_FLOAT_CRC_BITS_MAX .. _VV_LOG_BASE: VV_LOG_BASE constants ----------------------------------------------------------------------- Type of log to use .. autodata:: geosoft.gxapi.VV_LOG_BASE_10 :annotation: .. autoattribute:: geosoft.gxapi.VV_LOG_BASE_10 .. autodata:: geosoft.gxapi.VV_LOG_BASE_E :annotation: .. autoattribute:: geosoft.gxapi.VV_LOG_BASE_E .. _VV_LOGMODE: VV_LOGMODE constants ----------------------------------------------------------------------- Ways to handle negatives .. autodata:: geosoft.gxapi.VV_LOGMODE_CLIPPED :annotation: .. autoattribute:: geosoft.gxapi.VV_LOGMODE_CLIPPED .. autodata:: geosoft.gxapi.VV_LOGMODE_SCALED :annotation: .. autoattribute:: geosoft.gxapi.VV_LOGMODE_SCALED .. autodata:: geosoft.gxapi.VV_LOGMODE_CLAMPED :annotation: .. autoattribute:: geosoft.gxapi.VV_LOGMODE_CLAMPED .. autodata:: geosoft.gxapi.VV_LOGMODE_LINEAR :annotation: .. autoattribute:: geosoft.gxapi.VV_LOGMODE_LINEAR .. _VV_LOOKUP: VV_LOOKUP constants ----------------------------------------------------------------------- Lookup style .. autodata:: geosoft.gxapi.VV_LOOKUP_EXACT :annotation: .. autoattribute:: geosoft.gxapi.VV_LOOKUP_EXACT .. autodata:: geosoft.gxapi.VV_LOOKUP_NEAREST :annotation: .. autoattribute:: geosoft.gxapi.VV_LOOKUP_NEAREST .. autodata:: geosoft.gxapi.VV_LOOKUP_INTERPOLATE :annotation: .. autoattribute:: geosoft.gxapi.VV_LOOKUP_INTERPOLATE .. autodata:: geosoft.gxapi.VV_LOOKUP_NEARESTCLOSE :annotation: .. autoattribute:: geosoft.gxapi.VV_LOOKUP_NEARESTCLOSE .. autodata:: geosoft.gxapi.VV_LOOKUP_INTERPCLOSE :annotation: .. autoattribute:: geosoft.gxapi.VV_LOOKUP_INTERPCLOSE .. _VV_MASK: VV_MASK constants ----------------------------------------------------------------------- Where to mask .. autodata:: geosoft.gxapi.VV_MASK_INSIDE :annotation: .. autoattribute:: geosoft.gxapi.VV_MASK_INSIDE .. autodata:: geosoft.gxapi.VV_MASK_OUTSIDE :annotation: .. autoattribute:: geosoft.gxapi.VV_MASK_OUTSIDE .. _VV_ORDER: VV_ORDER constants ----------------------------------------------------------------------- Specify if the data is montonically increasing or decreasing. .. autodata:: geosoft.gxapi.VV_ORDER_NONE :annotation: .. autoattribute:: geosoft.gxapi.VV_ORDER_NONE .. autodata:: geosoft.gxapi.VV_ORDER_INCREASING :annotation: .. autoattribute:: geosoft.gxapi.VV_ORDER_INCREASING .. autodata:: geosoft.gxapi.VV_ORDER_DECREASING :annotation: .. autoattribute:: geosoft.gxapi.VV_ORDER_DECREASING .. _VV_SORT: VV_SORT constants ----------------------------------------------------------------------- Sort order .. autodata:: geosoft.gxapi.VV_SORT_ASCENDING :annotation: .. autoattribute:: geosoft.gxapi.VV_SORT_ASCENDING .. autodata:: geosoft.gxapi.VV_SORT_DESCENDING :annotation: .. autoattribute:: geosoft.gxapi.VV_SORT_DESCENDING .. _VV_WINDOW: VV_WINDOW constants ----------------------------------------------------------------------- How to handle `GXVV <geosoft.gxapi.GXVV>` limits .. autodata:: geosoft.gxapi.VV_WINDOW_DUMMY :annotation: .. autoattribute:: geosoft.gxapi.VV_WINDOW_DUMMY .. autodata:: geosoft.gxapi.VV_WINDOW_LIMIT :annotation: .. autoattribute:: geosoft.gxapi.VV_WINDOW_LIMIT <file_sep>/geosoft/gxapi/GXDBWRITE.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXDB import GXDB from .GXVA import GXVA from .GXVV import GXVV ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXDBWRITE(gxapi_cy.WrapDBWRITE): """ GXDBWRITE class. The `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` class is used to open and write to databases. Large blocks of data are split into blocks and served up sequentially to prevent the over-use of virtual memory when VVs or VAs are being written to channels. Individual data blocks are limited by default to 1 MB (which is user-alterable). Data less than the block size are served up whole, one block per line. """ def __init__(self, handle=0): super(GXDBWRITE, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` :returns: A null `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` :rtype: GXDBWRITE """ return GXDBWRITE() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Create Methods @classmethod def create(cls, db): """ Create a `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object Add channels using the `add_channel <geosoft.gxapi.GXDBWRITE.add_channel>` method.channel. :param db: Database input :type db: GXDB :returns: `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object :rtype: GXDBWRITE .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapDBWRITE._create(GXContext._get_tls_geo(), db) return GXDBWRITE(ret_val) @classmethod def create_xy(cls, db): """ Create a `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object for a XY-located data. Add channels using the `add_channel <geosoft.gxapi.GXDBWRITE.add_channel>` method. :param db: Database input :type db: GXDB :returns: `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object :rtype: GXDBWRITE .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapDBWRITE._create_xy(GXContext._get_tls_geo(), db) return GXDBWRITE(ret_val) @classmethod def create_xyz(cls, db): """ Create a `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object for a XYZ-located data. Add channels using the `add_channel <geosoft.gxapi.GXDBWRITE.add_channel>` method.channel :param db: Database input :type db: GXDB :returns: `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object :rtype: GXDBWRITE .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapDBWRITE._create_xyz(GXContext._get_tls_geo(), db) return GXDBWRITE(ret_val) def add_channel(self, chan): """ Add a data channel to the `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object. :param chan: Channel handle (does not need to be locked, but can be.) :type chan: int :returns: Channel index. Use for getting the correct `GXVV <geosoft.gxapi.GXVV>` or `GXVA <geosoft.gxapi.GXVA>` object. :rtype: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._add_channel(chan) return ret_val # Data Access Methods def get_db(self): """ Get the output `GXDB <geosoft.gxapi.GXDB>` handle from the `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object. :returns: `GXDB <geosoft.gxapi.GXDB>` handle :rtype: GXDB .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_db() return GXDB(ret_val) def get_vv(self, chan): """ Get the `GXVV <geosoft.gxapi.GXVV>` handle for a channel. :param chan: Index of channel to access. :type chan: int :returns: `GXVV <geosoft.gxapi.GXVV>` handle :rtype: GXVV .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Call only for single-column (regular) channels. You can call the `get_chan_array_size <geosoft.gxapi.GXDBWRITE.get_chan_array_size>` function to find the number fo columns in a given channel. The `GXVV <geosoft.gxapi.GXVV>` is filled anew for each block served up. """ ret_val = self._get_vv(chan) return GXVV(ret_val) def get_va(self, chan): """ Get the `GXVA <geosoft.gxapi.GXVA>` handle for an array channel. :param chan: Index of channel to access. :type chan: int :returns: `GXVA <geosoft.gxapi.GXVA>` handle :rtype: GXVA .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Call only for array (multi-column) channels. You can call the `get_chan_array_size <geosoft.gxapi.GXDBWRITE.get_chan_array_size>` function to find the number fo columns in a given channel, or you can call `GXVA.col <geosoft.gxapi.GXVA.col>` on the returned `GXVA <geosoft.gxapi.GXVA>` handle. The `GXVA <geosoft.gxapi.GXVA>` is filled anew for each block served up. """ ret_val = self._get_va(chan) return GXVA(ret_val) def get_v_vx(self): """ Get the X channel `GXVV <geosoft.gxapi.GXVV>` handle. :returns: `GXVV <geosoft.gxapi.GXVV>` handle :rtype: GXVV .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Only available for the CreateXY or CreateXYZ methods. The `GXVV <geosoft.gxapi.GXVV>` is filled anew for each block served up. """ ret_val = self._get_v_vx() return GXVV(ret_val) def get_v_vy(self): """ Get the Y channel `GXVV <geosoft.gxapi.GXVV>` handle. :returns: `GXVV <geosoft.gxapi.GXVV>` handle :rtype: GXVV .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Only available for the CreateXY or CreateXYZ methods. The `GXVV <geosoft.gxapi.GXVV>` is filled anew for each block served up. """ ret_val = self._get_v_vy() return GXVV(ret_val) def get_v_vz(self): """ Get the Z channel `GXVV <geosoft.gxapi.GXVV>` handle. :returns: `GXVV <geosoft.gxapi.GXVV>` handle :rtype: GXVV .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Only available for the CreateXY or CreateXYZ methods. The `GXVV <geosoft.gxapi.GXVV>` is filled anew for each block served up. If the Z channel is an array channel, the returned `GXVV <geosoft.gxapi.GXVV>` is the "base" `GXVV <geosoft.gxapi.GXVV>` of the `GXVA <geosoft.gxapi.GXVA>` and contains all items sequentially. """ ret_val = self._get_v_vz() return GXVV(ret_val) def get_chan_array_size(self, chan): """ Get the number of columns of data in a channel. :param chan: Index of channel to access. :type chan: int :returns: The number of columns (array size) for a channel :rtype: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Regular channels have one column of data. Array channels have more than one column of data. This function should be called to determine whether to use `get_vv <geosoft.gxapi.GXDBWRITE.get_vv>` or `get_va <geosoft.gxapi.GXDBWRITE.get_va>` to access data for a channel. """ ret_val = self._get_chan_array_size(chan) return ret_val # Processing def add_block(self, line): """ Add the current block of data. :param line: Line :type line: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** First, set up the data for each channel by copying values into the individual channel VVs and VAs. """ self._add_block(line) def commit(self): """ Commit remaining data to the database. .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._commit() def test_func(self, ra): """ Temporary test function. :param ra: `GXRA <geosoft.gxapi.GXRA>` handle to text file to import. :type ra: GXRA .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Designed to import the "Massive.xyz" file, which has data in the format "X Y Z Data". """ self._test_func(ra) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXST2.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXST2(gxapi_cy.WrapST2): """ GXST2 class. Bi-variate statistics. The `GXST2 <geosoft.gxapi.GXST2>` class accumulates statistics on two data vectors simultaneously in order to compute correlation information. Statistics are accumulated using the `data_vv <geosoft.gxapi.GXST2.data_vv>` function. See also `GXST <geosoft.gxapi.GXST>` (mono-variate statistics). """ def __init__(self, handle=0): super(GXST2, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXST2 <geosoft.gxapi.GXST2>` :returns: A null `GXST2 <geosoft.gxapi.GXST2>` :rtype: GXST2 """ return GXST2() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls): """ Creates a statistics object which is used to accumulate statistics. :returns: `GXST2 <geosoft.gxapi.GXST2>` Object :rtype: GXST2 .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapST2._create(GXContext._get_tls_geo()) return GXST2(ret_val) def data_vv(self, vv_x, vv_y): """ Add all the values in VVx and VVy to `GXST2 <geosoft.gxapi.GXST2>` object. :param vv_x: VVx handle :param vv_y: VVy handle :type vv_x: GXVV :type vv_y: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._data_vv(vv_x, vv_y) def items(self): """ Gets Number of items :returns: Number of items in `GXST2 <geosoft.gxapi.GXST2>` :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._items() return ret_val def reset(self): """ Resets the Statistics. .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._reset() def get(self, id): """ Gets correlation coeff. from the `GXST2 <geosoft.gxapi.GXST2>` object. :param id: :ref:`ST2_CORRELATION` :type id: int :returns: Data you asked for `GS_R8DM <geosoft.gxapi.GS_R8DM>` for none :rtype: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._get(id) return ret_val ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXCOM.rst .. _GXCOM: GXCOM class ================================== .. autoclass:: geosoft.gxapi.GXCOM :members: .. _COM_BAUD: COM_BAUD constants ----------------------------------------------------------------------- Connection Speed .. autodata:: geosoft.gxapi.COM_BAUD_110 :annotation: .. autoattribute:: geosoft.gxapi.COM_BAUD_110 .. autodata:: geosoft.gxapi.COM_BAUD_300 :annotation: .. autoattribute:: geosoft.gxapi.COM_BAUD_300 .. autodata:: geosoft.gxapi.COM_BAUD_600 :annotation: .. autoattribute:: geosoft.gxapi.COM_BAUD_600 .. autodata:: geosoft.gxapi.COM_BAUD_1200 :annotation: .. autoattribute:: geosoft.gxapi.COM_BAUD_1200 .. autodata:: geosoft.gxapi.COM_BAUD_2400 :annotation: .. autoattribute:: geosoft.gxapi.COM_BAUD_2400 .. autodata:: geosoft.gxapi.COM_BAUD_4800 :annotation: .. autoattribute:: geosoft.gxapi.COM_BAUD_4800 .. autodata:: geosoft.gxapi.COM_BAUD_9600 :annotation: .. autoattribute:: geosoft.gxapi.COM_BAUD_9600 .. autodata:: geosoft.gxapi.COM_BAUD_14400 :annotation: .. autoattribute:: geosoft.gxapi.COM_BAUD_14400 .. autodata:: geosoft.gxapi.COM_BAUD_19200 :annotation: .. autoattribute:: geosoft.gxapi.COM_BAUD_19200 .. autodata:: geosoft.gxapi.COM_BAUD_56000 :annotation: .. autoattribute:: geosoft.gxapi.COM_BAUD_56000 .. autodata:: geosoft.gxapi.COM_BAUD_57600 :annotation: .. autoattribute:: geosoft.gxapi.COM_BAUD_57600 .. autodata:: geosoft.gxapi.COM_BAUD_115200 :annotation: .. autoattribute:: geosoft.gxapi.COM_BAUD_115200 .. autodata:: geosoft.gxapi.COM_BAUD_128000 :annotation: .. autoattribute:: geosoft.gxapi.COM_BAUD_128000 .. autodata:: geosoft.gxapi.COM_BAUD_256000 :annotation: .. autoattribute:: geosoft.gxapi.COM_BAUD_256000 .. autodata:: geosoft.gxapi.COM_BAUD_38400 :annotation: .. autoattribute:: geosoft.gxapi.COM_BAUD_38400 .. _COM_DATASIZE: COM_DATASIZE constants ----------------------------------------------------------------------- Data Bits .. autodata:: geosoft.gxapi.COM_DATASIZE_FIVE :annotation: .. autoattribute:: geosoft.gxapi.COM_DATASIZE_FIVE .. autodata:: geosoft.gxapi.COM_DATASIZE_SIX :annotation: .. autoattribute:: geosoft.gxapi.COM_DATASIZE_SIX .. autodata:: geosoft.gxapi.COM_DATASIZE_SEVEN :annotation: .. autoattribute:: geosoft.gxapi.COM_DATASIZE_SEVEN .. autodata:: geosoft.gxapi.COM_DATASIZE_EIGHT :annotation: .. autoattribute:: geosoft.gxapi.COM_DATASIZE_EIGHT .. _COM_FLOWCONTROL: COM_FLOWCONTROL constants ----------------------------------------------------------------------- Flow Control Options .. autodata:: geosoft.gxapi.COM_FLOWCONTROL_NONE :annotation: .. autoattribute:: geosoft.gxapi.COM_FLOWCONTROL_NONE .. autodata:: geosoft.gxapi.COM_FLOWCONTROL_RTS_CTS :annotation: .. autoattribute:: geosoft.gxapi.COM_FLOWCONTROL_RTS_CTS .. autodata:: geosoft.gxapi.COM_FLOWCONTROL_DTR_DSR :annotation: .. autoattribute:: geosoft.gxapi.COM_FLOWCONTROL_DTR_DSR .. autodata:: geosoft.gxapi.COM_FLOWCONTROL_XON_XOFF :annotation: .. autoattribute:: geosoft.gxapi.COM_FLOWCONTROL_XON_XOFF .. _COM_PARITY: COM_PARITY constants ----------------------------------------------------------------------- Parity .. autodata:: geosoft.gxapi.COM_PARITY_EVEN :annotation: .. autoattribute:: geosoft.gxapi.COM_PARITY_EVEN .. autodata:: geosoft.gxapi.COM_PARITY_NARK :annotation: .. autoattribute:: geosoft.gxapi.COM_PARITY_NARK .. autodata:: geosoft.gxapi.COM_PARITY_NONE :annotation: .. autoattribute:: geosoft.gxapi.COM_PARITY_NONE .. autodata:: geosoft.gxapi.COM_PARITY_ODD :annotation: .. autoattribute:: geosoft.gxapi.COM_PARITY_ODD .. autodata:: geosoft.gxapi.COM_PARITY_SPACE :annotation: .. autoattribute:: geosoft.gxapi.COM_PARITY_SPACE .. _COM_STOPBITS: COM_STOPBITS constants ----------------------------------------------------------------------- Stop Bits .. autodata:: geosoft.gxapi.COM_STOPBITS_ONE :annotation: .. autoattribute:: geosoft.gxapi.COM_STOPBITS_ONE .. autodata:: geosoft.gxapi.COM_STOPBITS_ONE5 :annotation: .. autoattribute:: geosoft.gxapi.COM_STOPBITS_ONE5 .. autodata:: geosoft.gxapi.COM_STOPBITS_TWO :annotation: .. autoattribute:: geosoft.gxapi.COM_STOPBITS_TWO <file_sep>/geosoft/gxpy/tests/test_utility.py import unittest import numpy as np import os import datetime import time import requests from datetime import timezone, datetime import geosoft import geosoft.gxapi as gxapi import geosoft.gxpy.gx as gx import geosoft.gxpy.system as gsys import geosoft.gxpy.utility as gxu from base import GXPYTest class Test(GXPYTest): def test_misc(self): self.start() self.assertEqual(gxu.__version__, geosoft.__version__) self.assertEqual(gxu.gx_dtype('float'),gxapi.GS_DOUBLE) self.assertEqual(gxu.gx_dtype('int'),gxapi.GS_LONG) self.assertEqual(gxu.gx_dtype("<U18"),-72) # x4 for full range of UTF-8 characters self.assertEqual(gxu.gx_dtype('uint64'),gxapi.GS_ULONG64) self.assertEqual(gxu.dtype_gx(gxapi.GS_DOUBLE), np.float) self.assertEqual(gxu.dtype_gx(gxapi.GS_FLOAT), np.float32) self.assertEqual(gxu.dtype_gx(gxapi.GS_LONG), np.int32) self.assertEqual(gxu.dtype_gx(-2000).str, "<U2000") self.assertEqual(gxu.dtype_gx(gxapi.GS_ULONG64), np.uint64) self.assertEqual(gxu.gx_dummy(np.float),gxapi.rDUMMY) self.assertEqual(gxu.gx_dummy(np.int32),gxapi.iDUMMY) self.assertEqual(gxu.gx_dummy(np.int64),gxapi.GS_S8DM) self.assertEqual(gxu.gx_dummy(np.str_), '') self.assertEqual(gxu.gx_dummy('U48'), '') self.assertEqual(gxu.gx_dummy(np.uint), gxapi.GS_U4DM) self.assertEqual(gxu.gx_dummy(np.uint8), gxapi.GS_U1DM) self.assertEqual(gxu.gx_dummy(np.uint16), gxapi.GS_U2DM) self.assertEqual(gxu.gx_dummy(np.uint32), gxapi.GS_U4DM) self.assertEqual(gxu.gx_dummy(np.uint64), gxapi.GS_U8DM) self.assertEqual(gxu.gx_dummy(1.5),gxapi.rDUMMY) self.assertEqual(gxu.gx_dummy(type(1.5)), gxapi.rDUMMY) self.assertEqual(gxu.gx_dummy(3),gxapi.iDUMMY) self.assertEqual(gxu.gx_dummy(type(3)), gxapi.iDUMMY) self.assertEqual(gxu.gx_dummy(0xff),gxapi.iDUMMY) self.assertEqual(gxu.gx_dummy('string'), '') self.assertEqual(gxu.gx_dummy('U48'), '') self.assertRaises(KeyError, gxu.gx_dummy, 1j) self.assertRaises(KeyError, gxu.gx_dummy, type(1j)) self.assertEqual(gxu.dummy_none(0), 0) self.assertEqual(gxu.dummy_none(1.), 1.) self.assertEqual(gxu.dummy_none(gxapi.iDUMMY), None) self.assertEqual(gxu.dummy_none(gxapi.rDUMMY), None) self.assertTrue(gxu.is_float(gxu.gx_dtype('float'))) self.assertFalse(gxu.is_int(gxu.gx_dtype('float'))) self.assertTrue(gxu.is_int(gxu.gx_dtype('uint64'))) self.assertFalse(gxu.is_float(gxu.gx_dtype('uint64'))) self.assertTrue(gxu.is_string(gxu.gx_dtype('U18'))) self.assertFalse(gxu.is_int(gxu.gx_dtype('U18'))) self.assertFalse(gxu.is_float(gxu.gx_dtype('U18'))) self.assertEqual(gxu.is_string(gxu.gx_dtype('U18')), 72) self.assertEqual(gxu.dtype_gx_dimension(gxapi.GS_FLOAT), (np.float32, 1)) self.assertEqual(gxu.dtype_gx_dimension(gxapi.GS_FLOAT2D), (np.float32, 2)) self.assertEqual(gxu.dtype_gx_dimension(gxapi.GS_FLOAT3D), (np.float32, 3)) self.assertEqual(gxu.dtype_gx_dimension(gxapi.GS_DOUBLE), (np.float64, 1)) self.assertEqual(gxu.dtype_gx_dimension(gxapi.GS_DOUBLE2D), (np.float64, 2)) self.assertEqual(gxu.dtype_gx_dimension(gxapi.GS_DOUBLE3D), (np.float64, 3)) self.assertEqual(gxu.gx_dtype_dimension(np.float32), gxapi.GS_FLOAT) self.assertEqual(gxu.gx_dtype_dimension(np.float32, 1), gxapi.GS_FLOAT) self.assertEqual(gxu.gx_dtype_dimension(np.float32, 2), gxapi.GS_FLOAT2D) self.assertEqual(gxu.gx_dtype_dimension(np.float32, 3), gxapi.GS_FLOAT3D) self.assertEqual(gxu.gx_dtype_dimension(np.float64), gxapi.GS_DOUBLE) self.assertEqual(gxu.gx_dtype_dimension(np.float64, 1), gxapi.GS_DOUBLE) self.assertEqual(gxu.gx_dtype_dimension(np.float64, 2), gxapi.GS_DOUBLE2D) self.assertEqual(gxu.gx_dtype_dimension(np.float64, 3), gxapi.GS_DOUBLE3D) self.assertRaises(gxu.UtilityException, gxu.gx_dtype_dimension, np.int, 2) self.assertRaises(gxu.UtilityException, gxu.gx_dtype_dimension, np.int, 3) self.assertRaises(gxu.UtilityException, gxu.gx_dtype_dimension, np.float32, 4) npd = np.array([[1,1],[2,2],[-127,1],[3,3]],dtype=gxu.dtype_gx(gxapi.GS_BYTE)) self.assertEqual(list(gxu.dummy_mask(npd)),[False,False,True,False]) npd = np.array([1,2,3,4],dtype=gxu.dtype_gx(gxapi.GS_BYTE)) try: gxu.dummy_mask(npd) self.assertTrue(False) except: pass def test_dictlist(self): self.start() lst = gxapi.GXLST.create(1000) lst.add_item("a","aa") lst.add_item("b","bb") lst.add_item("c","cc") d = gxu.dict_from_lst(lst) self.assertEqual(len(d),lst.size()) self.assertEqual(d.get('b'),'bb') def test_dictreg(self): self.start() d = {'a':'A', 'b':'BEE', 'c':[1,2,3], 'g':7.123, 'h':{'hh':'name'}} reg = gxu.reg_from_dict(d) dd = gxu.dict_from_reg(reg) for key, value in d.items(): self.assertEqual(value, dd[key]) self.assertRaises(gxu.UtilityException, gxu.reg_from_dict, d, max_size=10) def test_parameters(self): self.start() p = {'a': 'string', 'list': [1,2,3], 'tup': (['a','b'], {'q': 1.5})} gxu.save_parameters('param_test', p) r = gxu.get_parameters('param_test') self.assertEqual(r['A'], p['a']) self.assertEqual(r['LIST'], p['list']) self.assertEqual(r['TUP'][0][1], 'b') self.assertEqual(r['TUP'][1]['q'], 1.5) s = gxu.get_parameters('param_test', ['a', 'tup', 'not_there']) self.assertEqual(s['a'], 'string') self.assertEqual(s['tup'][1]['q'], 1.5) self.assertEqual(s.get('not_there', None), None) q = gxu.get_parameters('param_test', ['not_there'], default="yes I am") self.assertEqual(q['not_there'], "yes I am") t = {'1': '\\', '2': '\\\\', '3': '\\\\\\', '4': '\\\\\\\\', '5': '\\\\\\\\\\'} gxu.save_parameters('escape', t) tt = gxu.get_parameters('escape') self.assertEqual(t, tt) def test_parameters_2(self): self.start() self.assertRaises(gxu.UtilityException, gxu.save_parameters) self.assertRaises(gxu.UtilityException, gxu.save_parameters, self._test_case_filename) group = os.path.basename(self._test_case_filename).split('.')[0] self.assertRaises(gxu.UtilityException, gxu.save_parameters, group, {'bad.parameter': ''}) parameter = 'GRID_NAME' p = {parameter: ''} gxu.save_parameters(group, p) r = gxu.get_parameters(group) self.assertEqual(r[parameter], '') r = gxu.get_parameters(group, {parameter: 'bogus'}) self.assertEqual(r[parameter], '') r = gxu.get_parameters(group, {'test': 'bogus'}) self.assertEqual(r['test'], 'bogus') r = gxu.get_parameters(group, (parameter, 'test1', 'test2')) self.assertEqual(r[parameter], '') self.assertEqual(r['test1'], None) self.assertEqual(r['test2'], None) r = gxu.get_parameters(group, (parameter, 'test1', 'test2'), 99) self.assertEqual(r[parameter], '') self.assertEqual(r['test1'], 99) self.assertEqual(r['test2'], 99) gxu.save_parameters(group, r) r = gxu.get_parameters(group) self.assertEqual(r[parameter], '') self.assertEqual(r['TEST1'], 99) self.assertEqual(r['TEST2'], 99) gxu.save_parameters(parms={'test_file': '.\\some_file', 't2': 'c:\\abc\\def'}) r = gxu.get_parameters() self.assertEqual(r['TEST_FILE'], '.\\some_file') self.assertEqual(r['T2'], 'c:\\abc\\def') def test_rdecode(self): self.start() cDUMMY="*" rDUMMY=gxu.rdecode(cDUMMY) def test(s): r = gxu.rdecode(s) return r self.assertEqual(test("1.9"),1.9) self.assertEqual(test("1.o9"),1.09) self.assertEqual(test(""),rDUMMY) self.assertEqual(test("*"),rDUMMY) self.assertEqual(test("*ab"),rDUMMY) self.assertEqual(test("\t"),rDUMMY) self.assertEqual(test(" \t \t"),rDUMMY) self.assertEqual(test("\t 000oooOOO45.o0o0 \t\t"),45) self.assertEqual(test("62"),62) self.assertEqual(test("62S"),-62) self.assertEqual(test("62 00 00N"),62) self.assertEqual(test("-62 00 00"),-62) self.assertEqual(test("62.00.00S"),-62) self.assertEqual(test("62N"),62) self.assertEqual(test("62 45S"),-62.75) self.assertEqual(test("62 29 60w"),-62.5) self.assertEqual(test("62 29 60.00E"),62.5) self.assertEqual(test("-62 29 60.00E"),-62.5) self.assertEqual(test("62.45.0.00s"),-62.75) self.assertEqual(test("62.30.30.15W"),-62.508375) self.assertEqual(test("-6172.16.17.32"),-62.508375) self.assertEqual(test("-62.30.30.15W"),62.508375) self.assertEqual(test("13:14:60.00"),13.25) self.assertEqual(test("13:14:60.00pm"),25.25) self.assertEqual(test("13:15"),13.25) self.assertEqual(test("2:15PM"),14.25) self.assertEqual(test("2:90pm"),15.5) self.assertEqual(test("\to o o "),0.0) self.assertEqual(test("\to 59 6O "),1.0) self.assertEqual(test("bogus"),rDUMMY) self.assertEqual(test("2014-01-01"),2014.0) self.assertEqual(test("2014-02-25"),2014.150684931507) self.assertEqual(test("2014/02/25"),2014.150684931507) self.assertEqual(test("2014/2/25"),2014.150684931507) self.assertEqual(test("2014/02/5"),2014.0958904109589) self.assertEqual(test("2014/12/31"),2014.9972602739726) self.assertEqual(test("2016/12/31"),2016.9972677595629) self.assertEqual(test("2017-1-1"),2017.0) try: gxu.rdecode_err("bogus") self.assertTrue(False) except ValueError: self.assertTrue(True) try: gxu.rdecode_err("\t0 o 0 0") self.assertTrue(False) except ValueError: self.assertTrue(True) def test_decode(self): self.start() cDUMMY="*" rDUMMY=gxapi.GS_R8DM iDUMMY=gxu.decode(cDUMMY,'i') def test(s,f): r = gxu.decode(s,f) return r self.assertEqual(test("1.9",'f8'),1.9) self.assertEqual(test("*",'f'),rDUMMY) self.assertEqual(test("*",'f4'),rDUMMY) self.assertEqual(test("*",'f8'),rDUMMY) self.assertEqual(test("*",'i'),iDUMMY) self.assertEqual(test("*",'i2'),iDUMMY) self.assertEqual(test("*",'i4'),iDUMMY) self.assertEqual(test("*",'i8'),iDUMMY) self.assertEqual(test("*",'i8'),iDUMMY) self.assertEqual(test("62N",'i'),62) self.assertEqual(test("62",'f'),62.0) self.assertEqual(test("62.500001",'i'),63.0) self.assertEqual(test("-62.500001",'i'),-63.0) self.assertEqual(test("62.4999",'i'),62) self.assertEqual(test("-62.4999",'i'),-62) self.assertEqual(test("-62.4999",'i2'),-62) self.assertEqual(test("-62.4999",'i4'),-62) self.assertEqual(test("-62.4999",'i8'),-62) self.assertEqual(test("62.4999",'u'),62) self.assertEqual(test("62.4999",'b'),True) self.assertEqual(test("0",'b'),False) self.assertEqual(test("0.0",'b'),False) self.assertEqual(test("*",'b'),False) self.assertEqual(test("62N",'U5'),"62N") self.assertEqual(test("62Nabcdef",'U3'),"62N") self.assertEqual(test("62N12345",'U5'),"62N12") self.assertEqual(test("62N12345",'S5'),"62N12") self.assertEqual(test("62N12345",'a5'),"62N12") def test_shared_dict(self): self.start() gxu.set_shared_dict() d = gxu.get_shared_dict() self.assertEqual(len(d), 0) gxu.set_shared_dict({'a':0, 'b':[1,2,3]}) d = gxu.get_shared_dict() self.assertEqual(d['a'], 0) self.assertEqual(d['b'][2], 3) d = gxu.get_shared_dict() self.assertEqual(len(d), 0) def test_run_external_python(self): self.start() testpy = os.path.join(os.getcwd(),'test_python.py') with open(testpy, 'w') as py: py.write("import sys\n") py.write("import geosoft.gxpy as gxpy\n") py.write("import geosoft.gxpy.utility as gxu\n") py.write("with gxpy.gx.GXpy() as gxc:\n") py.write(" d = gxu.get_shared_dict()\n") py.write(" gxpy.utility.set_shared_dict({'a':'letter a', 'b':'letter b', 'c':[1,2,3], 'argv': sys.argv, 'in_dict':d})\n") #py.write("input('RUN_EXTERNAL! Press return to continue...')\n") try: test_result = gxu.run_external_python(testpy, script_args='test1 test2', shared_dict={'howdy':'hey there'}, console=False) self.assertEqual(test_result['a'], 'letter a') l = test_result['c'] self.assertEqual(len(l), 3) self.assertEqual(l[1], 2) self.assertEqual(test_result['argv'][1], 'test1') self.assertEqual(test_result['argv'][2], 'test2') self.assertEqual(test_result['in_dict']['howdy'], 'hey there') finally: gxu.delete_file(testpy) self.assertRaises(gxu.UtilityException, gxu.run_external_python, testpy, 'test1 test2') def test_run_external_bad_python(self): self.start() testpy = os.path.join(os.getcwd(), 'test_python.py') with open(testpy, 'w') as py: py.write("import this_module_not_there\n") try: gxu.run_external_python(testpy, script_args='test1 test2') self.assertTrue(False) except gxu.UtilityException as e: self.assertTrue('External python error' in str(e)) except: os.remove(testpy) raise finally: os.remove(testpy) def test_paths(self): self.start() local = gxu.folder_workspace() self.assertEqual(os.path.normpath(local), os.getcwd()) user = gxu.folder_user() self.assertTrue(os.path.isdir(user)) temp = gxu.folder_temp() self.assertTrue(os.path.isdir(temp)) def test_display_message(self): self.start() gxu.display_message('test title', 'test message') def test_version(self): self.start() with self.assertRaises(gxu.UtilityException): gxu.check_version("x.y.z") version_backup = gxu.__version__ try: gxu.__version__ = "9.1" self.assertTrue(gxu.check_version("8.5.9")) self.assertTrue(gxu.check_version("9.0")) self.assertTrue(gxu.check_version("9.1")) self.assertTrue(gxu.check_version("9.1.0")) self.assertTrue(gxu.check_version("9.1a0")) self.assertTrue(gxu.check_version("9.1b0")) self.assertTrue(gxu.check_version("9.1a1")) self.assertTrue(gxu.check_version("9.1b1")) with self.assertRaises(gxu.UtilityException): gxu.check_version("9.2.0b") with self.assertRaises(gxu.UtilityException): gxu.check_version("999.999") self.assertFalse(gxu.check_version("9.1.1", raise_on_fail=False)) self.assertFalse(gxu.check_version("9.2", raise_on_fail=False)) self.assertFalse(gxu.check_version("999.999", raise_on_fail=False)) gxu.__version__ = "9.1a1" self.assertTrue(gxu.check_version("9.0")) self.assertTrue(gxu.check_version("9.0a0")) self.assertFalse(gxu.check_version("9.1", raise_on_fail=False)) finally: gxu.__version__ = version_backup def test_datetime(self): self.start() geo_utc = gxu.datetime_from_year(gxapi.GXSYS.utc_date()) # Due to testing environment variables the above would always be 2003-01-01 py_utc = datetime(2003, 1, 1, tzinfo=timezone.utc) self.assertEqual(geo_utc.year, py_utc.year) self.assertEqual(geo_utc.month, py_utc.month) self.assertEqual(geo_utc.day, py_utc.day) self.assertEqual(geo_utc.hour, 0) self.assertEqual(geo_utc.minute, 0) self.assertEqual(geo_utc.second, 0) self.assertEqual(geo_utc.microsecond, 0) dec_year = gxu.year_from_datetime(py_utc) dt = gxu.datetime_from_year(dec_year) self.assertEqual(dt.year, py_utc.year) self.assertEqual(dt.month, py_utc.month) self.assertEqual(dt.day, py_utc.day) self.assertEqual(dt.hour, py_utc.hour) self.assertEqual(dt.minute, py_utc.minute) self.assertEqual(dt.second, py_utc.second) self.assertEqual(dt.microsecond, round(py_utc.microsecond / 1000.0) * 1000) def test_crc(self): self.start() self.assertEqual(gxu.crc32(b'bunch of bytes'), 3271364337) self.assertEqual(gxu.crc32_str('a string'), 2577552858) def test_sig_fig(self): self.start() self.assertEqual(gxu.str_significant(1.0, 1), '1') self.assertEqual(gxu.str_significant(-1.0, 1), '-1') self.assertEqual(gxu.str_significant(105.1005, 2), '110') self.assertEqual(gxu.str_significant(-105.0, 2), '-100') self.assertEqual(gxu.str_significant(-105.0001000, 2), '-110') self.assertEqual(gxu.str_significant(105.0, 2, mode=-1), '100') self.assertEqual(gxu.str_significant(-105.0, 2, mode=-1), '-100') self.assertEqual(gxu.str_significant(0.000456789123, 3), '0.000457') self.assertEqual(gxu.str_significant(-0.000456789123, 3), '-0.000457') self.assertEqual(gxu.str_significant(0.000456789123, 3, 1), '0.000457') self.assertEqual(gxu.str_significant(0.000456789123, 3, -1), '0.000456') self.assertEqual(gxu.str_significant(-0.000456789123, 3, -1), '-0.000456') self.assertEqual(gxu.str_significant(4567800000.0, 3), '4570000000') self.assertEqual(gxu.str_significant(4567800000.0, 3, 1), '4570000000') self.assertEqual(gxu.str_significant(4567800000.0, 3, -1), '4560000000') self.assertEqual(gxu.str_significant(0.00045, 3), '0.00045') self.assertEqual(gxu.str_significant(4500000000.0, 3), '4500000000') self.assertEqual(gxu.str_significant(0.000451, 1), '0.0005') self.assertEqual(gxu.str_significant(4510000000.0, 1), '5000000000') self.assertEqual(gxu.str_significant(0.000451, 1, -1), '0.0004') self.assertEqual(gxu.str_significant(4510000000.0, 1, -1), '4000000000') def test_xml(self): self.start() d = {'my_root':{'a':1, 'b':'text_string', 'c':(1, 2), 'd':[1, 2, 'txt']}, 'more':{'a':1, 'b':'text_string', 'c':(1, 2), 'd':[1, 2, 'txt']}} xml = gxu.xml_from_dict(d, pretty=True) dxml = gxu.dict_from_xml(xml) self.assertEqual(len(dxml), 2) self.assertTrue('my_root' in dxml) self.assertEqual(dxml['my_root']['b'], 'text_string') self.assertEqual(dxml['my_root']['c'], ['1', '2']) self.assertEqual(dxml['my_root']['d'], ['1', '2', 'txt']) self.assertEqual(dxml['more']['b'], 'text_string') self.assertEqual(dxml['more']['c'], ['1', '2']) self.assertEqual(dxml['more']['d'], ['1', '2', 'txt']) xml = '<?xml version="1.0" encoding="UTF-8"?>\ <gmd:MD_Metadata xsi:schemaLocation="http://www.isotc211.org/2005/gmd ../schemas/iso19139fra/gmd/gmd.xsd">\ <geosoft xmlns="http://www.geosoft.com/schema/geo">\ <dataset version="1" beta="abc">\ <title>test_grid_1</title>\ <file_format>Geosoft Grid</file_format>\ </dataset>\ </geosoft>\ </gmd:MD_Metadata>' d = gxu.dict_from_xml(xml) self.assertEqual(d['gmd:MD_Metadata']['geosoft']['dataset']['@version'], "1") xml = gxu.xml_from_dict(d, pretty=True) self.assertTrue('<dataset ' in xml) d = {'geosoft': d['gmd:MD_Metadata']['geosoft']} d['geosoft'].pop('@xmlns', None) xml = gxu.xml_from_dict(d, pretty=True) self.assertFalse('xmlns=' in xml) xml = gxu.geosoft_xml_from_dict(d, pretty=True) self.assertTrue('xmlns=' in xml) xml = gxu.geosoft_xml_from_dict(d['geosoft'], pretty=True) self.assertTrue('<geosoft xmlns=' in xml) folder, files = gsys.unzip(os.path.join(os.path.dirname(self._test_case_py), 'testgrids.zip'), folder=gx.gx().temp_folder()) gxml = os.path.join(folder, 'test_grid_1.grd.xml') with open(gxml) as f: m = gxu.dict_from_xml(f.read()) xml = gxu.xml_from_dict(m, pretty=True) m2 = gxu.dict_from_xml(xml) self.assertEqual(m2['gmd:MD_Metadata']['idinfo']['status']['update'], 'None planned') def test_url_retrieve(self): self.start def del_file(f): try: os.remove(f) except: pass def hook(a, b, c): print(a, b, c) url = 'https://github.com/GeosoftInc/gxpy/raw/master/README.md' big = 'https://github.com/GeosoftInc/gxpy/raw/master/examples/tutorial/2D%20Views%20and%20Maps/Wittichica Creek Residual Total Field.grd' ref_file = 'README.md' test_file = 'test.grd' try: file_name = gxu.url_retrieve(url) self.assertEqual(file_name, ref_file) file_name = gxu.url_retrieve(url, test_file) self.assertEqual(file_name, test_file) file_name = gxu.url_retrieve(big, reporthook=hook) del_file(file_name) except: print('No internet') pass finally: del_file(ref_file) del_file(test_file) def test_unique_name(self): self.start() def exists(name): if name in ('ab', 'ab(1)', 'ab(2)', 'ab.txt', 'ab(1).txt'): return True if name in ('ab', 'ab_1', 'ab_2', 'ab.txt', 'ab_1.txt'): return True if name in ('ab(special).txt',): return True if name in ('ab(special.txt', 'ab(maki_.txt','ab(maki_3.txt'): return True return False def true(name): return True self.assertEqual(gxu.unique_name('c:/temp/billybob', exists), 'c:/temp/billybob') self.assertEqual(gxu.unique_name('ab', exists), 'ab(3)') self.assertEqual(gxu.unique_name('ab.txt', exists), 'ab(2).txt') self.assertEqual(gxu.unique_name('ab', exists, separator='_'), 'ab_3') self.assertEqual(gxu.unique_name('ab.txt', exists, separator='_'), 'ab_2.txt') self.assertEqual(gxu.unique_name('ab(special).txt', exists), 'ab(special)(1).txt') self.assertEqual(gxu.unique_name('ab(special.txt', exists), 'ab(special(1).txt') self.assertEqual(gxu.unique_name('ab(special_.txt', exists, separator='_'), 'ab(special_.txt') self.assertEqual(gxu.unique_name('ab(maki_.txt', exists, separator='_'), 'ab(maki__1.txt') self.assertEqual(gxu.unique_name('ab(maki_3.txt', exists, separator='_'), 'ab(maki_4.txt') self.assertRaises(gxu.UtilityException, gxu.unique_name, 'anything', true, '()', 10) with open('test(2).txt', 'w+') as f: f.write('stuff') self.assertEqual(gxu.unique_name('test(2).txt'), 'test(3).txt') os.remove('test(2).txt') def test_vec_norm(self): self.start() a = np.array(range(24)) self.assertEqual(gxu.vector_normalize(a).shape, (1,)) self.assertEqual(gxu.vector_normalize(a).sum(), 1.) a = a.reshape((6, 4)) self.assertEqual(gxu.vector_normalize(a).shape, (6, 4)) self.assertAlmostEqual(gxu.vector_normalize(a).sum(), 11.536183542606089) a = a.reshape((3, 2, 4)) self.assertEqual(gxu.vector_normalize(a).shape, (3, 2, 4)) self.assertAlmostEqual(gxu.vector_normalize(a).sum(), 11.536183542606089) a[1, 1, :] = [0, 0, 0, 0] self.assertTrue(np.isnan(gxu.vector_normalize(a).sum())) self.assertAlmostEqual(np.nansum(gxu.vector_normalize(a)), 9.5430071721870711) def test_file_manipulation(self): self.start() fn = gx.gx().temp_file() with open(fn, '+w') as f: f.write('maki') self.assertFalse(gxu.is_file_locked(fn)) self.assertFalse(gxu.is_path_locked(fn)) dir = os.path.dirname(fn) self.assertFalse(gxu.is_path_locked(dir)) self.assertTrue(gxu.is_path_locked(dir, age=1000.*60*60)) self.assertEqual(gxu.file_age('a completely bogus file name'), -1) time.sleep(0.1) self.assertTrue(gxu.file_age(fn) > 0.) @unittest.skip('HTTP tests are skipped for automated regression') def test_http_stuff(self): self.start() def hook(o): return o def pairs_hook(o): return o params = {'key': 'test'} headers = {'Content-Type': 'application/json', 'Accept': 'application/json'} try: r = requests.get("http://dap.geosoft.com/rest/service/configuration", params=params, headers=headers) response = gxu.dict_from_http_response_text(r.text, object_hook=hook) self.assertTrue(response['MajorVersion'] >= 12) response = gxu.dict_from_http_response_text(r.text, object_pairs_hook=pairs_hook) self.assertEqual(response[0][0], 'Name') r = requests.get("http://dap.geosoft.com/rest/service/configuration", params=params) response = gxu.dict_from_http_response_text(r.text) self.assertTrue(int(response['MajorVersion']) >= 12) response = gxu.dict_from_http_response_text(r.text, object_hook=hook) self.assertTrue(int(response['MajorVersion']) >= 12) except: print('No internet') pass if __name__ == '__main__': unittest.main() <file_sep>/geosoft/gxapi/GXTB.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXTB(gxapi_cy.WrapTB): """ GXTB class. The `GXTB <geosoft.gxapi.GXTB>` class is a high-performance table class used to perform table-based processing, such as leveling data in an OASIS database. The `GXLTB <geosoft.gxapi.GXLTB>` class is recommended for use with small tables produced from short lists such as the different geographic projections and their defining parameters. """ def __init__(self, handle=0): super(GXTB, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXTB <geosoft.gxapi.GXTB>` :returns: A null `GXTB <geosoft.gxapi.GXTB>` :rtype: GXTB """ return GXTB() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def set_search_mode(self, mode): """ Set the search mode of a table. :param mode: :ref:`TB_SEARCH` :type mode: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If performance is an issue, you may want to test which search mode provides the best performance with typical data. """ self._set_search_mode(mode) def set_sort_mode(self, mode): """ Set the sort mode of a table. :param mode: :ref:`TB_SORT` :type mode: int .. versionadded:: 9.3.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_sort_mode(mode) @classmethod def create(cls, name): """ Loads a table into memory and return a table handle. :param name: Name of the table file to load :type name: str :returns: `GXTB <geosoft.gxapi.GXTB>` Object :rtype: GXTB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the table contains fewer data columns than are defined by the the table header, the `GXTB <geosoft.gxapi.GXTB>` object will read in the table and dummy the elements of the missing data columns. """ ret_val = gxapi_cy.WrapTB._create(GXContext._get_tls_geo(), name.encode()) return GXTB(ret_val) @classmethod def create_db(cls, db): """ Create a table from a database. :param db: Database :type db: GXDB :returns: `GXTB <geosoft.gxapi.GXTB>` Object :rtype: GXTB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The table will contain fields for all channels in the database. The database is not loaded with data. Use the `load_db <geosoft.gxapi.GXTB.load_db>` function to load data into the table. """ ret_val = gxapi_cy.WrapTB._create_db(GXContext._get_tls_geo(), db) return GXTB(ret_val) @classmethod def create_ltb(cls, ltb): """ Create a table from an `GXLTB <geosoft.gxapi.GXLTB>` database. :param ltb: `GXLTB <geosoft.gxapi.GXLTB>` object :type ltb: GXLTB :returns: `GXTB <geosoft.gxapi.GXTB>` Object :rtype: GXTB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapTB._create_ltb(GXContext._get_tls_geo(), ltb) return GXTB(ret_val) def field(self, name): """ Get a field handle. :param name: Field name :type name: str :returns: The handle to the field (must be present) :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._field(name.encode()) return ret_val def get_string(self, row, col, val): """ Gets a string value from a table element. :param row: Row of element to Get :param col: Column of element to Get :param val: Returned string :type row: int :type col: int :type val: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ val.value = self._get_string(row, col, val.value.encode()) def data_type(self, col): """ Returns the data type for the specified column. :param col: Column of element to Get :type col: int :returns: :ref:`DB_CATEGORY_CHAN` :rtype: int .. versionadded:: 5.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._data_type(col) return ret_val def find_col_by_index(self, idx, name): """ Finds a column's name by its index. :param idx: Index of column to find :param name: Buffer for column name :type idx: int :type name: str_ref .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ name.value = self._find_col_by_index(idx, name.value.encode()) def find_col_by_name(self, name): """ Finds a column's index by its name. :param name: Name of column to find :type name: str :returns: Index of column. -1 if not found. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._find_col_by_name(name.encode()) return ret_val def format(self, col): """ Returns the channel format for the specified column. :param col: Column of element to Get :type col: int :returns: :ref:`DB_CHAN_FORMAT` :rtype: int .. versionadded:: 5.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._format(col) return ret_val def get_int(self, row, col): """ Gets an integer value from a table element. :param row: Row of element to Get :param col: Column of element to Get :type row: int :type col: int :returns: Value :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_int(row, col) return ret_val def num_columns(self): """ Gets the number of data fields (columns) in a table. :returns: Number of columns :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._num_columns() return ret_val def num_rows(self): """ Gets the number of data rows in a table. :returns: Number of rows :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._num_rows() return ret_val def load_db(self, db, line): """ Load a database into a `GXTB <geosoft.gxapi.GXTB>` :param db: Database :param line: Line :type db: GXDB :type line: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The line is appended to the data already in the table. """ self._load_db(db, line) def get_double(self, row, col): """ Gets an real value from a table element. :param row: Row of element to Get :param col: Column of element to Get :type row: int :type col: int :returns: Value :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_double(row, col) return ret_val def save(self, name): """ Saves the data in a table to a file. The table header will be in ASCII and the data will be in BINARY format. :param name: Name of File to save table into :type name: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._save(name.encode()) def save_db(self, db, line): """ Save a `GXTB <geosoft.gxapi.GXTB>` in a database line :param db: Database :param line: Line :type db: GXDB :type line: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Missing channels are created. Data in existing channels on the line will be replaced. """ self._save_db(db, line) def save_to_ascii(self, name): """ Saves the data in a table to a file. The table header will be in ASCII and the data will be in ASCII format. :param name: Name of File to save table into :type name: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._save_to_ascii(name.encode()) def set_int(self, row, col, val): """ Sets an integer value into a table element. :param row: Row of element to set :param col: Column of element to set :param val: Value to set :type row: int :type col: int :type val: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The table field containing the element to be set MUST be of type `GS_BYTE <geosoft.gxapi.GS_BYTE>`, `GS_USHORT <geosoft.gxapi.GS_USHORT>`, `GS_SHORT <geosoft.gxapi.GS_SHORT>`, or `GS_LONG <geosoft.gxapi.GS_LONG>`. If the field is `GS_BYTE <geosoft.gxapi.GS_BYTE>`, `GS_USHORT <geosoft.gxapi.GS_USHORT>`, or `GS_LONG <geosoft.gxapi.GS_LONG>`, the new data value will cause an overflow if the value is out of range of the data type. The new element value will then be invalid. If the row of the new element exceeds the number of rows in the table, then the table will AUTOMATICALLY be EXPANDED to exactly as many rows needed to hold the new element. The new element is placed in the proper field of the last row, and all other field elements have invalid data. All fields of the new rows up to the new element's row will also contain invalid data. """ self._set_int(row, col, val) def set_double(self, row, col, val): """ Sets an real value into a table element. :param row: Row of element to set :param col: Column of element to set :param val: Value to set :type row: int :type col: int :type val: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The table field containing the element to be set MUST be of type `GS_FLOAT <geosoft.gxapi.GS_FLOAT>` or `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`. If the field is `GS_FLOAT <geosoft.gxapi.GS_FLOAT>` the new data value will cause an overflow if the value is out of range of the data type. The new element value will then be invalid. If the row of the new element exceeds the number of rows in the table, then the table will AUTOMATICALLY be EXPANDED to exactly as many rows needed to hold the new element. The new element is placed in the proper field of the last row, and all other field elements have invalid data. All fields of the new rows up to the new element's row will also contain invalid data. """ self._set_double(row, col, val) def set_string(self, row, col, val): """ Sets a string value into a table element. :param row: Row of element to set :param col: Column of element to set :param val: Value to set :type row: int :type col: int :type val: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The table field containing the element to be set MUST be of 'string'. If the row of the new element exceeds the number of rows in the table, then the table will AUTOMATICALLY be EXPANDED to exactly as many rows needed to hold the new element. The new element is placed in the proper field of the last row, and all other field elements have invalid data. All fields of the new rows up to the new element's row will also contain invalid data. """ self._set_string(row, col, val.encode()) def sort(self, col): """ Sorts a table by a specified column. :param col: Index of data Column to sort table by :type col: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the column to sort by contains duplicated values, the sorted table is NOT guaranteed to retain the ordering of the duplicated values/ E.g. Given 2 rows of values: xx yy 1 bb aa 1 If the table is sorted on column 3, the second row may or may not come after the first row in the sorted table. """ self._sort(col) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/tests/test_segy_reader.py import unittest import os import geosoft import geosoft.gxpy.gx as gx from base import GXPYTest import geosoft.gxpy.segy_reader as segy import geosoft.gxapi as gxapi import geosoft.gxapi.GXSEGYREADER import geosoft.gxpy.system as gsys import geosoft.gxpy.coordinate_system as coordinate_system class Test(GXPYTest): @classmethod def setUpClass(cls): cls.setUpGXPYTest() cls.folder, _files = gsys.unzip(os.path.join(os.path.dirname(cls._test_case_py), 'testsegy.zip'), folder=cls._gx.temp_folder()) cls.testfile_2d = os.path.join(cls.folder, '2d.segy') cls.testfile_3d = os.path.join(cls.folder, '3d.segy') def test_datatype_constants(self): self.start() gxapi_datatype_names = [] for i in range(geosoft.gxapi.GXSEGYREADER.get_num_trace_data_types()): name_ref = gxapi.str_ref() geosoft.gxapi.GXSEGYREADER.get_trace_data_type_name( i, name_ref) gxapi_datatype_names.append(name_ref.value) for e in segy.DataType: self.assertTrue(e.value in gxapi_datatype_names) def test_scan_file_non_existing(self): self.start() with self.assertRaises(gxapi.GXError) as raise_context: with segy.SegyReader('this_file_nowhere_to_be_found.sgy') as target: target.scan_file() self.assertTrue('Unable to open "this_file_nowhere_to_be_found.sgy"' in str(raise_context.exception)) def test_scan_file(self): self.start() with segy.SegyReader(self.testfile_3d) as target: target.scan_file() self.assertEqual(target.scan_file(), segy.InlineCrosslineSanity.OK) def test_get_georeferencing(self): self.start() with segy.SegyReader(self.testfile_3d) as target: self.assertEqual(target.georeferencing.coordinate_dict()[ 'name'], '*unknown') self.assertEqual( target.georeferencing.coordinate_dict()['units'][:3], 'ft,') with segy.SegyReader(self.testfile_2d) as target: self.assertEqual(target.georeferencing.coordinate_dict()[ 'name'], '*unknown') self.assertEqual( target.georeferencing.coordinate_dict()['units'][:2], 'm,') def test_set_georeferencing(self): self.start() with segy.SegyReader(self.testfile_3d) as target: georef_dict = target.georeferencing.coordinate_dict() georef_dict['name'] = 'NAD83 / UTM zone 15N' target.georeferencing = coordinate_system.Coordinate_system( georef_dict) self.assertEqual(target.georeferencing.coordinate_dict()[ 'name'], 'NAD83 / UTM zone 15N') def test_export_gdb(self): self.start() with segy.SegyReader(self.testfile_3d) as target: target.scan_file() target.export_files(gdb=os.path.join(self.folder, 'test.gdb')) def test_export_voxel(self): self.start() with segy.SegyReader(self.testfile_3d) as target: target.export_files(voxel=os.path.join( self.folder, 'test.geosoft_voxel')) def test_export_z_slice(self): self.start() with segy.SegyReader(self.testfile_3d) as target: target.export_files( slice_prefix=os.path.join( self.folder, 'test_z_slice'), z_slices=[0, 10]) def test_export_inline_slice(self): self.start() with segy.SegyReader(self.testfile_3d) as target: target.export_files( slice_prefix=os.path.join( self.folder, 'test_inline_slice'), inline_slices=[20, 30]) def test_export_crossline_slice(self): self.start() with segy.SegyReader(self.testfile_3d) as target: target.export_files( slice_prefix=os.path.join( self.folder, 'test_crossline_slice'), crossline_slices=[40, 50]) def test_export_multi_slice(self): self.start() with segy.SegyReader(self.testfile_3d) as target: target.export_files( slice_prefix=os.path.join( self.folder, 'test_crossline_slice'), inline_slices=[60, 70], crossline_slices=[80, 90], z_slices=[100, 110]) def test_get_text_header(self): self.start() with segy.SegyReader(self.testfile_3d) as target: # Header should be 3200 characters long self.assertEqual(len(target.text_header), 3200) # Header should be ASCII self.assertTrue(all(ord(c) < 128 for c in target.text_header)) self.assertEqual(target.text_header[:4], 'C 1 ') self.assertEqual(target.text_header[3120:3124], 'C40 ') def test_get_binary_header(self): self.start() with segy.SegyReader(self.testfile_3d) as target: self.assertEqual(len(target.binary_header.keys()), 27) self.assertEqual(target.binary_header['Line number'], 4) def test_get_trace_header(self): self.start() with segy.SegyReader(self.testfile_3d) as target: self.assertEqual( target.get_trace_header( 0)["Trace sequence number within line"]["offset"], '0') self.assertEqual( target.get_trace_header( 0)["Trace sequence number within line"]["value"], '1') self.assertEqual( target.get_trace_header( 0)["Source coordinate - X"]["offset"], '72') self.assertEqual( target.get_trace_header( 0)["Source coordinate - X"]["value"], '264640') self.assertEqual( target.get_trace_header( 1)["Trace sequence number within line"]["offset"], '0') self.assertEqual( target.get_trace_header( 1)["Trace sequence number within line"]["value"], '2') self.assertEqual( target.get_trace_header( 1)["Source coordinate - X"]["offset"], '72') self.assertEqual( target.get_trace_header( 1)["Source coordinate - X"]["value"], '264640') def test_get_trace_data(self): self.start() with segy.SegyReader(self.testfile_3d) as target: trace = target.get_trace_data(100) self.assertEqual(trace.length, target.trace_length) def test_get_trace_length(self): self.start() with segy.SegyReader(self.testfile_3d) as target: self.assertEqual(target.trace_length, 901) def test_set_trace_length(self): self.start() with segy.SegyReader(self.testfile_3d) as target: target.trace_length = 100 self.assertEqual(target.trace_length, 100) def test_get_trace_length_from_file_header(self): self.start() with segy.SegyReader(self.testfile_3d) as target: target.get_trace_length_from_file_header(16) self.assertEqual(target.trace_length, 14999) def test_get_trace_length_from_trace_header(self): self.start() with segy.SegyReader(self.testfile_3d) as target: target.get_trace_length_from_trace_header(72) self.assertEqual(target.trace_length, 264640) def test_get_trace_data_type(self): self.start() with segy.SegyReader(self.testfile_3d) as target: self.assertEqual(target.trace_data_type, segy.DataType.IBM_FLOAT4) def test_set_trace_data_type(self): self.start() with segy.SegyReader(self.testfile_3d) as target: target.trace_data_type = segy.DataType.IEEE_FLOAT4 self.assertEqual(target.trace_data_type, segy.DataType.IEEE_FLOAT4) def test_get_trace_sample_interval(self): self.start() with segy.SegyReader(self.testfile_3d) as target: self.assertEqual(target.trace_sample_interval, 14999) def test_set_trace_sample_interval(self): self.start() with segy.SegyReader(self.testfile_3d) as target: target.trace_sample_interval = 1000 self.assertEqual(target.trace_sample_interval, 1000) def test_get_z_units(self): self.start() with segy.SegyReader(self.testfile_3d) as target: self.assertEqual(target.z_units, 'microseconds') def test_set_z_units(self): self.start() with segy.SegyReader(self.testfile_3d) as target: target.z_units = 'milliseconds' self.assertEqual(target.z_units, 'milliseconds') def test_get_z_offset(self): self.start() with segy.SegyReader(self.testfile_3d) as target: self.assertEqual(target.z_offset, -7499) def test_set_z_offset(self): self.start() with segy.SegyReader(self.testfile_3d) as target: target.z_offset = 2000.5 self.assertEqual(target.z_offset, 2000.5) def test_get_z_offset_units(self): self.start() with segy.SegyReader(self.testfile_3d) as target: self.assertEqual(target.z_offset_units, "milliseconds") def test_set_z_offset_units(self): self.start() with segy.SegyReader(self.testfile_3d) as target: target.z_offset_units = 'microseconds' self.assertEqual(target.z_offset_units, "microseconds") def test_get_xy_units(self): self.start() with segy.SegyReader(self.testfile_3d) as target: self.assertEqual(target.xy_units, "feet") def test_get_is_depth_or_time(self): self.start() with segy.SegyReader(self.testfile_3d) as target: self.assertEqual(target.is_depth_or_time, segy.ZType.TIME) def test_get_trace_range(self): self.start() with segy.SegyReader(self.testfile_3d) as target: top, bottom = target.trace_range self.assertEqual(top, -7499) self.assertEqual(bottom, 6000.1) def test_get_field_configuration(self): self.start() with segy.SegyReader(self.testfile_3d) as target: config = target.field_configuration self.assertEqual(len(config.fields), 15) self.assertEqual( config.fields[2].mapping, segy.FieldMapping.Crossline) def test_set_field_configuratiion(self): self.start() with segy.SegyReader(self.testfile_3d) as target: config = target.field_configuration config.fields[2].mapping = segy.FieldMapping.Inline config.fields[1].mapping = segy.FieldMapping.Crossline target.field_configuration = config config2 = target.field_configuration self.assertEqual(len(config2.fields), 15) self.assertEqual( config2.fields[2].mapping, segy.FieldMapping.Inline) ########################################################################################## if __name__ == '__main__': unittest.main() <file_sep>/docs/GXPDF3D.rst .. _GXPDF3D: GXPDF3D class ================================== .. autoclass:: geosoft.gxapi.GXPDF3D :members: <file_sep>/docs/GXGD.rst .. _GXGD: GXGD class ================================== .. autoclass:: geosoft.gxapi.GXGD :members: .. _GD_STATUS: GD_STATUS constants ----------------------------------------------------------------------- Grid open mode .. autodata:: geosoft.gxapi.GD_STATUS_READONLY :annotation: .. autoattribute:: geosoft.gxapi.GD_STATUS_READONLY .. autodata:: geosoft.gxapi.GD_STATUS_NEW :annotation: .. autoattribute:: geosoft.gxapi.GD_STATUS_NEW .. autodata:: geosoft.gxapi.GD_STATUS_OLD :annotation: .. autoattribute:: geosoft.gxapi.GD_STATUS_OLD <file_sep>/geosoft/gxapi/GXSTORAGEPROJECT.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXSTORAGEPROJECT(gxapi_cy.WrapSTORAGEPROJECT): """ GXSTORAGEPROJECT class. Project Storage. """ def __init__(self, handle=0): super(GXSTORAGEPROJECT, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXSTORAGEPROJECT <geosoft.gxapi.GXSTORAGEPROJECT>` :returns: A null `GXSTORAGEPROJECT <geosoft.gxapi.GXSTORAGEPROJECT>` :rtype: GXSTORAGEPROJECT """ return GXSTORAGEPROJECT() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def open(cls, name): """ Open a project storage. :param name: Project File Name :type name: str .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSTORAGEPROJECT._open(GXContext._get_tls_geo(), name.encode()) @classmethod def close(cls): """ Close the project storage. .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSTORAGEPROJECT._close(GXContext._get_tls_geo()) @classmethod def remove_dataset(cls, name): """ Remove this dataset from the project. :param name: Dataset File Name :type name: str .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSTORAGEPROJECT._remove_dataset(GXContext._get_tls_geo(), name.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/toc.rst :orphan: .. _toc: Table of Contents ================= .. toctree:: :maxdepth: 2 index geosoft.gxapi.classes geosoft.gxapi geosoft.gxpy version_history.rst<file_sep>/geosoft/gxapi/GXIEXP.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXIEXP(gxapi_cy.WrapIEXP): """ GXIEXP class. The `GXIEXP <geosoft.gxapi.GXIEXP>` class is similar to the `GXEXP <geosoft.gxapi.GXEXP>` class, but is used to apply math expressions to grids (`GXIMG <geosoft.gxapi.GXIMG>` objects). """ def __init__(self, handle=0): super(GXIEXP, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXIEXP <geosoft.gxapi.GXIEXP>` :returns: A null `GXIEXP <geosoft.gxapi.GXIEXP>` :rtype: GXIEXP """ return GXIEXP() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def add_grid(self, img, var): """ This method adds an image to the `GXIEXP <geosoft.gxapi.GXIEXP>` object with a variable name. :param img: Image to add :param var: Variable name :type img: GXIMG :type var: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._add_grid(img, var.encode()) @classmethod def create(cls): """ This method creates an `GXIEXP <geosoft.gxapi.GXIEXP>` object. :returns: `GXIEXP <geosoft.gxapi.GXIEXP>` Object :rtype: GXIEXP .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapIEXP._create(GXContext._get_tls_geo()) return GXIEXP(ret_val) def do_formula(self, formula, unused): """ This method runs a formula on the grids. :param formula: Formula :param unused: Legacy parameter, no longer used. :type formula: str :type unused: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._do_formula(formula.encode(), unused) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXDAT.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXDAT(gxapi_cy.WrapDAT): """ GXDAT class. The `GXDAT <geosoft.gxapi.GXDAT>` object is used to access data from an variety of data sources using the same access functions. The `GXDAT <geosoft.gxapi.GXDAT>` interface supports data access on a point-by-point, of line-by-line basis. For example, the `GXBIGRID.run <geosoft.gxapi.GXBIGRID.run>` function uses 2 `GXDAT <geosoft.gxapi.GXDAT>` objects - one `GXDAT <geosoft.gxapi.GXDAT>` associated with the input data source, which is read line-by-line, and a second associated with the output grid file output grid file. Use a specific `GXDAT <geosoft.gxapi.GXDAT>` creation method for an associated information source in order to make a `GXDAT <geosoft.gxapi.GXDAT>` as required by a specific processing function. The gridding methods all use DATs. """ def __init__(self, handle=0): super(GXDAT, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXDAT <geosoft.gxapi.GXDAT>` :returns: A null `GXDAT <geosoft.gxapi.GXDAT>` :rtype: GXDAT """ return GXDAT() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create_db(cls, db, x_ch, y_ch, z_ch): """ Create a handle to a database `GXDAT <geosoft.gxapi.GXDAT>` object :param db: Handle to database which `GXDAT <geosoft.gxapi.GXDAT>` is connected with :param x_ch: Name of X channel in database :param y_ch: Name of Y channel in database :param z_ch: Name of Z channel in database :type db: GXDB :type x_ch: str :type y_ch: str :type z_ch: str :returns: `GXDAT <geosoft.gxapi.GXDAT>` Object :rtype: GXDAT .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapDAT._create_db(GXContext._get_tls_geo(), db, x_ch.encode(), y_ch.encode(), z_ch.encode()) return GXDAT(ret_val) @classmethod def create_xgd(cls, name, mode): """ Create a handle to a grid file `GXDAT <geosoft.gxapi.GXDAT>` object :param name: Name of grid file to associate `GXDAT <geosoft.gxapi.GXDAT>` with :param mode: :ref:`DAT_XGD` :type name: str :type mode: int :returns: `GXDAT <geosoft.gxapi.GXDAT>` Object :rtype: GXDAT .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapDAT._create_xgd(GXContext._get_tls_geo(), name.encode(), mode) return GXDAT(ret_val) @classmethod def get_lst(cls, lst, interface_name, flags, mode): """ Put available `GXDAT <geosoft.gxapi.GXDAT>` filters and qualifiers in a `GXLST <geosoft.gxapi.GXLST>` :param lst: `GXLST <geosoft.gxapi.GXLST>` object to populate :param interface_name: `GXDAT <geosoft.gxapi.GXDAT>` interface name ("XGD" only support option currently) :param flags: :ref:`DAT_FILE` :param mode: :ref:`DAT_FILE_FORM` :type lst: GXLST :type interface_name: str :type flags: int :type mode: int .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The filters displayed in the Grid/Image file browse dialog are put in the "Name" of the `GXLST <geosoft.gxapi.GXLST>`, while the file qualifiers are stored in the "Value". """ gxapi_cy.WrapDAT._get_lst(GXContext._get_tls_geo(), lst, interface_name.encode(), flags, mode) def range_xyz(self, min_x, min_y, min_z, max_x, max_y, max_z, num_non_dummy): """ Determine the range in X, Y and Z in the `GXDAT <geosoft.gxapi.GXDAT>` source :param min_x: Minimum X (`rMAX <geosoft.gxapi.rMAX>` if none) :param min_y: Minimum Y (`rMAX <geosoft.gxapi.rMAX>` if none) :param min_z: Minimum Z (`rMAX <geosoft.gxapi.rMAX>` if none) :param max_x: Maximum X (`rMIN <geosoft.gxapi.rMIN>` if none) :param max_y: Maximum Y (`rMIN <geosoft.gxapi.rMIN>` if none) :param max_z: Maximum Z (`rMIN <geosoft.gxapi.rMIN>` if none) :param num_non_dummy: Number of non-dummy XYZ. :type min_x: float_ref :type min_y: float_ref :type min_z: float_ref :type max_x: float_ref :type max_y: float_ref :type max_z: float_ref :type num_non_dummy: int_ref .. versionadded:: 7.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Terminates if unable to open an RPT `GXDAT <geosoft.gxapi.GXDAT>` interface. """ min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value, num_non_dummy.value = self._range_xyz(min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value, num_non_dummy.value) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXPAT.rst .. _GXPAT: GXPAT class ================================== .. autoclass:: geosoft.gxapi.GXPAT :members: <file_sep>/geosoft/gxapi/GXSEMPLOT.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXSEMPLOT(gxapi_cy.WrapSEMPLOT): """ GXSEMPLOT class. Oasis montaj implementation of RTE `GXSEMPLOT <geosoft.gxapi.GXSEMPLOT>` """ def __init__(self, handle=0): super(GXSEMPLOT, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXSEMPLOT <geosoft.gxapi.GXSEMPLOT>` :returns: A null `GXSEMPLOT <geosoft.gxapi.GXSEMPLOT>` :rtype: GXSEMPLOT """ return GXSEMPLOT() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def apply_filter_to_mask(cls, db, filter, mask_ch, mineral_ch, mineral, mode): """ Apply the filter to the mask channel :param db: Database handle :param filter: Filter name :param mask_ch: Mask channel name :param mineral_ch: Mineral channel name :param mineral: Mineral to use ("All" or "" for all) :param mode: Mask mode (0: Append, 1: New) :type db: GXDB :type filter: str :type mask_ch: str :type mineral_ch: str :type mineral: str :type mode: int .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The mask channel is updated for the current data to reflect the actions of the filter. Those values passing get 1, those failing get 0. """ gxapi_cy.WrapSEMPLOT._apply_filter_to_mask(GXContext._get_tls_geo(), db, filter.encode(), mask_ch.encode(), mineral_ch.encode(), mineral.encode(), mode) @classmethod def convert_dummies(cls, db, line): """ Convert dummies to zero values for assay channels. :param db: Database handle :param line: Input line to convert :type db: GXDB :type line: int .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The is operation is controlled by the Preferences "Use dummies to indicate no data?" By default, this option is "yes" so this function will return with no changes. However, if "no", then all ASSAY class channels will have dummy values converted to 0.0. """ gxapi_cy.WrapSEMPLOT._convert_dummies(GXContext._get_tls_geo(), db, line) @classmethod def create_groups(cls, db, mask_ch): """ Group data by anomaly or string channel - Interactive. :param db: Database handle :param mask_ch: Mask channel :type db: GXDB :type mask_ch: str .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapSEMPLOT._create_groups(GXContext._get_tls_geo(), db, mask_ch.encode()) @classmethod def default_groups(cls, db): """ Group data by selected anomalies. :param db: Database handle :type db: GXDB .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapSEMPLOT._default_groups(GXContext._get_tls_geo(), db) @classmethod def edit_map_plot_parameters(cls, db, mask_ch, mineral_ch, map, view): """ Alter parameters in an XYplot Triplot map. :param db: Database handle :param mask_ch: Mask channel (can be "") :param mineral_ch: Mineral channel (can be "" for raw data) :param map: Map handle :param view: Map View :type db: GXDB :type mask_ch: str :type mineral_ch: str :type map: GXMAP :type view: str .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The Parameters `GXGUI <geosoft.gxapi.GXGUI>` is loaded based on settings stored in the map. The map is then re-plotted, overwriting the old one, based on the new settings. Note that the selection of data in the current `GXDB <geosoft.gxapi.GXDB>` is used to replot the map. """ gxapi_cy.WrapSEMPLOT._edit_map_plot_parameters(GXContext._get_tls_geo(), db, mask_ch.encode(), mineral_ch.encode(), map, view.encode()) @classmethod def edit_plot_components(cls, db, templ): """ Set group names and channels to plot in a template. :param db: Database handle :param templ: Template name :type db: GXDB :type templ: str .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The "Components" group in the INI file is edited. Looks first in user\\etc, then in \\etc. Looks first for file prefix "semtemplate" then "xyt" or "tri" The altered template will be output to the user\\etc directory with the file extension "semtemplate". """ gxapi_cy.WrapSEMPLOT._edit_plot_components(GXContext._get_tls_geo(), db, templ.encode()) @classmethod def edit_plot_parameters(cls, db, templ): """ Set TriPlot parameters in a template. :param db: Database handle :param templ: Template name :type db: GXDB :type templ: str .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The "Parameters" group in the INI file is edited. Looks first in user\\etc, then in \\etc. Looks first for file prefix "semtemplate" then "xyt" or "tri" The altered template will be output to the user\\etc directory with the file extension "semtemplate". """ gxapi_cy.WrapSEMPLOT._edit_plot_parameters(GXContext._get_tls_geo(), db, templ.encode()) @classmethod def export_overlay(cls, overlay, map, mview, group, plot_type, x_stage, x_oxide, y_stage, y_oxide, z_stage, z_oxide, extension): """ Create overlay map and file from a group. :param overlay: Overlay file name :param map: Associated map :param mview: View with group :param group: Group name :param plot_type: :ref:`SEMPLOT_PLOT` :param x_stage: XStage :param x_oxide: XOxide :param y_stage: YStage :param y_oxide: YOxide :param z_stage: ZStage :param z_oxide: ZOxide :param extension: :ref:`SEMPLOT_EXT` :type overlay: str :type map: str :type mview: GXMVIEW :type group: str :type plot_type: int :type x_stage: str :type x_oxide: str :type y_stage: str :type y_oxide: str :type z_stage: str :type z_oxide: str :type extension: int .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The group is written to a new map, and an overlay file is created which points to this map. """ gxapi_cy.WrapSEMPLOT._export_overlay(GXContext._get_tls_geo(), overlay.encode(), map.encode(), mview, group.encode(), plot_type, x_stage.encode(), x_oxide.encode(), y_stage.encode(), y_oxide.encode(), z_stage.encode(), z_oxide.encode(), extension) @classmethod def export_view(cls, db, lst, new_db, view, mask_ch, mineral_ch, mineral): """ Create a "View" database :param db: Original raw data database :param lst: List of lines (anomlies) to export :param new_db: Destination database :param view: View to export - One of SEMPLOT_XXX_STAGE :param mask_ch: Mask channel ("" for None) :param mineral_ch: Mineral channel :param mineral: Mineral to export ("" for all) :type db: GXDB :type lst: GXLST :type new_db: GXDB :type view: int :type mask_ch: str :type mineral_ch: str :type mineral: str .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapSEMPLOT._export_view(GXContext._get_tls_geo(), db, lst, new_db, view, mask_ch.encode(), mineral_ch.encode(), mineral.encode()) @classmethod def export_view2(cls, db, lst, new_db, view, mask_ch, mineral_ch, mineral, export_extra): """ Create a "View" database, with channel selection :param db: Original raw data database :param lst: List of lines (anomlies) to export :param new_db: Destination database :param view: View to export - One of SEMPLOT_XXX_STAGE :param mask_ch: Mask channel ("" for None) :param mineral_ch: Mineral channel :param mineral: Mineral to export ("" for all) :param export_extra: :ref:`SEMPLOT_EXPORT` Channel selection :type db: GXDB :type lst: GXLST :type new_db: GXDB :type view: int :type mask_ch: str :type mineral_ch: str :type mineral: str :type export_extra: int .. versionadded:: 7.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapSEMPLOT._export_view2(GXContext._get_tls_geo(), db, lst, new_db, view, mask_ch.encode(), mineral_ch.encode(), mineral.encode(), export_extra) @classmethod def filter_lst(cls, lst): """ Fill a `GXLST <geosoft.gxapi.GXLST>` with existing `GXSEMPLOT <geosoft.gxapi.GXSEMPLOT>` filters :param lst: `GXLST <geosoft.gxapi.GXLST>` to fill. :type lst: GXLST .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** "Supplied" filters are stored in \\etc, while user-edited and new filters are stored in user\\etc. This function finds all files with the extension ".semfilter", first in user\\etc, then in \\etc, and adds the file names (without the extension) to the `GXLST <geosoft.gxapi.GXLST>`. The name with the extension is stored as the value. The `GXLST <geosoft.gxapi.GXLST>` is cleared first. """ gxapi_cy.WrapSEMPLOT._filter_lst(GXContext._get_tls_geo(), lst) @classmethod def filter_mineral_pos_data(cls, db, mask_ch, mineral_ch, mineral, pos): """ Filter raw data by position and mineral values :param db: Database handle :param mask_ch: Mask channel :param mineral_ch: Mineral channel :param mineral: Mineral (string) - "C", "I" etc. :param pos: Grain position :type db: GXDB :type mask_ch: str :type mineral_ch: str :type mineral: str :type pos: int .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The Mask channel will be updated so that those data values which "pass" get "1" and those that "fail" get dummy "*" NO DATA IS REMOVED. Works on all selected lines of data. """ gxapi_cy.WrapSEMPLOT._filter_mineral_pos_data(GXContext._get_tls_geo(), db, mask_ch.encode(), mineral_ch.encode(), mineral.encode(), pos) @classmethod def get_associated_lst(cls, db, group, lst): """ Get the associated channels for this group in a `GXLST <geosoft.gxapi.GXLST>` :param db: Database handle :param group: Data Group handle :param lst: `GXLST <geosoft.gxapi.GXLST>` to copy channels into :type db: GXDB :type group: int :type lst: GXLST .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapSEMPLOT._get_associated_lst(GXContext._get_tls_geo(), db, group, lst) @classmethod def get_current_mineral_lst(cls, db, mineral_ch, lst): """ Retrieve `GXLST <geosoft.gxapi.GXLST>` of minerals in selected lines. :param db: Database handle :param mineral_ch: Mineral channel name :param lst: `GXLST <geosoft.gxapi.GXLST>` object :type db: GXDB :type mineral_ch: str :type lst: GXLST .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If the mineral channel name is not specified, it returns just the "X" (Unknown) item. """ gxapi_cy.WrapSEMPLOT._get_current_mineral_lst(GXContext._get_tls_geo(), db, mineral_ch.encode(), lst) @classmethod def get_current_position_lst(cls, db, lst): """ Retrieve `GXLST <geosoft.gxapi.GXLST>` of positions in selected lines. :param db: Database handle :param lst: `GXLST <geosoft.gxapi.GXLST>` object :type db: GXDB :type lst: GXLST .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapSEMPLOT._get_current_position_lst(GXContext._get_tls_geo(), db, lst) @classmethod def get_full_mineral_lst(cls, lst): """ Retrieve `GXLST <geosoft.gxapi.GXLST>` of all minerals in Semplot_Minerals.csv :param lst: `GXLST <geosoft.gxapi.GXLST>` object :type lst: GXLST .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapSEMPLOT._get_full_mineral_lst(GXContext._get_tls_geo(), lst) @classmethod def get_full_position_lst(cls, lst): """ Retrieve `GXLST <geosoft.gxapi.GXLST>` of all possible mineral positions. :param lst: `GXLST <geosoft.gxapi.GXLST>` object :type lst: GXLST .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapSEMPLOT._get_full_position_lst(GXContext._get_tls_geo(), lst) @classmethod def get_grouping_lst(cls, db, lst): """ Get list of items to group symbols by. :param db: Database handle :param lst: List to hold items :type db: GXDB :type lst: GXLST .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The first item is "Anomaly", which gives the line names, The second item (if the channel exists in the database) is the Sample Number. After this are included all string channels which are NOT oxides or elements. (The list can include the mineral). Channel symbol is the `GXLST <geosoft.gxapi.GXLST>` value (except for the first item - "Anomaly") """ gxapi_cy.WrapSEMPLOT._get_grouping_lst(GXContext._get_tls_geo(), db, lst) @classmethod def create_ascii_template(cls, name, temp): """ : Generate ASCII import template automatically :param name: Data file name :param temp: Template to make :type name: str :type temp: str :returns: 1 if it succeeds in creating a Template. 0 if it fails. :rtype: int .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapSEMPLOT._create_ascii_template(GXContext._get_tls_geo(), name.encode(), temp.encode()) return ret_val @classmethod def create_database_template(cls, name, temp): """ Generate database import template automatically :param name: Data file name :param temp: Template to make :type name: str :type temp: str :returns: 1 if it succeeds in creating a Template. 0 if it fails. :rtype: int .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapSEMPLOT._create_database_template(GXContext._get_tls_geo(), name.encode(), temp.encode()) return ret_val @classmethod def edit_filter(cls, db, filter, mask_ch, mineral_ch, mineral): """ Edit and create filter on channel values :param db: Database handle :param filter: Name of filter :param mask_ch: Mask channel name :param mineral_ch: Mineral channel name :param mineral: Mineral to restrict filter to. :type db: GXDB :type filter: str :type mask_ch: str :type mineral_ch: str :type mineral: str :returns: -1 - Cancel - Edits to filter discarded. 0 - Normal Return. Edits saved to filter file. 1 - Apply filter to current data only 2 - Remove filter - If removing filtered data, just restore the data to the Min/Pos data otherwise set the mask channel to 1. Re-entry code. If not `iDUMMY <geosoft.gxapi.iDUMMY>`, what to do inside the filter after going back in. Returned on exit, used on next input. 0 - Nothing. Don't need to go back into this function again. 1 - Edit the filter. Notes New and edited filters are stored in user\\etc in files with the file extension ".semfilter" If a file for the specified filter does not exist, then a new filter by that name will be created. :rtype: int .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapSEMPLOT._edit_filter(GXContext._get_tls_geo(), db, filter.encode(), mask_ch.encode(), mineral_ch.encode(), mineral.encode()) return ret_val @classmethod def get_mineral_channel_name(cls, db, mineral_ch): """ Retrieve the mineral channel name. :param db: Database handle :param mineral_ch: Mineral channel name :type db: GXDB :type mineral_ch: str_ref .. versionadded:: 6.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** First looks at the `GXSEMPLOT <geosoft.gxapi.GXSEMPLOT>`.MINERAL_CHANNEL value. If not found, returns the first MINERAL class channel found. If still not found, returns a blank string. """ mineral_ch.value = gxapi_cy.WrapSEMPLOT._get_mineral_channel_name(GXContext._get_tls_geo(), db, mineral_ch.value.encode()) @classmethod def import_ascii_wizard(cls, name, temp, anomaly): """ Generate a `GXSEMPLOT <geosoft.gxapi.GXSEMPLOT>` ASCII import template. :param name: Data file name :param temp: Template to make :param anomaly: Anomaly name (can be "") :type name: str :type temp: str :type anomaly: str_ref .. versionadded:: 6.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If the anomaly name is not included, then the input data must have an "Anom_Name" field. """ anomaly.value = gxapi_cy.WrapSEMPLOT._import_ascii_wizard(GXContext._get_tls_geo(), name.encode(), temp.encode(), anomaly.value.encode()) @classmethod def import_database_odbc(cls, connection, temp): """ Generate a template file for importing ODBC databases. :param connection: Connection string (input and returned) :param temp: Template file (returned) :type connection: str_ref :type temp: str_ref .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ connection.value, temp.value = gxapi_cy.WrapSEMPLOT._import_database_odbc(GXContext._get_tls_geo(), connection.value.encode(), temp.value.encode()) @classmethod def import_bin(cls, db, data, templ, line, flight, date): """ Import blocked binary or archive ASCII data :param db: Database :param data: Import data file name :param templ: Import template name :param line: Optional Line name (see note 3.) :param flight: Optional Flight number :param date: Optional date :type db: GXDB :type data: str :type templ: str :type line: str :type flight: int :type date: float .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** This wrapper is for `GXSEMPLOT <geosoft.gxapi.GXSEMPLOT>`, and does not require the import licence. 1. Binary import templates have extension .I2 by convention. See BINARY.I2 for a description of the template format. Archive import templates have extension .I3 by convention. See ARCHIVE.I3 for a description of the template format. 2. Both the import template and data file must exist. 3. If a line already exists in the database, a new version is created unless a line name is passed in. In this case, the specified name is used and the imported channels on the previous line will be destroyed. .. seealso:: `GXDU.lab_template <geosoft.gxapi.GXDU.lab_template>` in du.gxh """ gxapi_cy.WrapSEMPLOT._import_bin(GXContext._get_tls_geo(), db, data.encode(), templ.encode(), line.encode(), flight, date) @classmethod def import_database_ado(cls, name, temp): """ Generate a template file for importing semplot databases. :param name: Data file name :param temp: Template to make :type name: str :type temp: str .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapSEMPLOT._import_database_ado(GXContext._get_tls_geo(), name.encode(), temp.encode()) @classmethod def init_group_symbols_used(cls, db): """ Initializes memory of symbols used in plotting. :param db: Database handle :type db: GXDB .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Maintains a list of the symbols used in plotting. Call before Plotting one or more legends - symbols are accumulated. `plot_symbol_legend <geosoft.gxapi.GXSEMPLOT.plot_symbol_legend>` uses this information to create a legend. """ gxapi_cy.WrapSEMPLOT._init_group_symbols_used(GXContext._get_tls_geo(), db) @classmethod def template_type(cls, templ): """ Create a new XYPlot or TriPlot template. :param templ: Template name :type templ: str :returns: `SEMPLOT_PLOT_XYPLOT <geosoft.gxapi.SEMPLOT_PLOT_XYPLOT>` or `SEMPLOT_PLOT_TRIPLOT <geosoft.gxapi.SEMPLOT_PLOT_TRIPLOT>` Terminates if error. :rtype: int .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapSEMPLOT._template_type(GXContext._get_tls_geo(), templ.encode()) return ret_val @classmethod def view_type(cls, map, view): """ Test to see if a view is an XYPlot or Triplot view. :param map: Input map object :param view: Input view name :type map: GXMAP :type view: str :returns: :ref:`SEMPLOT_PLOT` :rtype: int .. versionadded:: 6.4.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** First checks the view name to see if it starts with ``"XYplt_"`` or ``"Triplt_"``. Failing that it looks in the view `GXREG <geosoft.gxapi.GXREG>` for a value for "Components.Type", which will be either "XYPlot" or "TriPlot". If the view does not appear to be an XYPlot or a TriPlot view, the function returns `SEMPLOT_PLOT_UNKNOWN <geosoft.gxapi.SEMPLOT_PLOT_UNKNOWN>`. """ ret_val = gxapi_cy.WrapSEMPLOT._view_type(GXContext._get_tls_geo(), map, view.encode()) return ret_val @classmethod def mineral_id(cls, db, resid, min_ch, res_ch): """ Identify minerals from the oxide channels. :param db: Database :param resid: Maximum residual value (in % of the total oxide) :param min_ch: Mineral channel (Locked RW) :param res_ch: Residual channel (Locked RW) :type db: GXDB :type resid: float :type min_ch: int :type res_ch: int .. versionadded:: 6.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Finds the best mineral matching the composition for each row of oxide values. Works using linear programming and the simplex method to maximize the oxides used to create each of the possible output minerals. The mineral leaving the least leftover is selected, as long as the residual (measured as a percent of the total) is less than or equal to the input value. """ gxapi_cy.WrapSEMPLOT._mineral_id(GXContext._get_tls_geo(), db, resid, min_ch, res_ch) @classmethod def new_filter(cls, filter, model): """ Create a new selection filter. :param filter: New filter name :param model: Filter to use as a model (can be "") :type filter: str :type model: str .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Creates a new, empty filter file in the user\\etc directory """ gxapi_cy.WrapSEMPLOT._new_filter(GXContext._get_tls_geo(), filter.encode(), model.encode()) @classmethod def new_template(cls, templ, type, model): """ Create a new XYPlot or TriPlot template. :param templ: New template name :param type: Unknown :param model: Template to use as a model (can be "") :type templ: str :type type: int :type model: str .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The new template is written to the user\\etc directory, with the file extension "semfilter". The template contains a parameter identifying it as an XY or Triplot. Model Template: Looks first in user\\etc, then in \\etc. Looks first for file prefix "semtemplate" then "xyt" or "tri" Because there are so many shared parameters, it is possible to use an XYPlot template as a model for a TriPlot, and vica-verca, with few complications. (e.g. needing to define a "Z" component) """ gxapi_cy.WrapSEMPLOT._new_template(GXContext._get_tls_geo(), templ.encode(), type, model.encode()) @classmethod def overlay_lst(cls, lst, extension, type): """ Fill a list with the available plot overlay names :param lst: Input `GXLST <geosoft.gxapi.GXLST>`. :param extension: :ref:`SEMPLOT_EXT` :param type: :ref:`SEMPLOT_PLOT` :type lst: GXLST :type extension: int :type type: int .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Looks first in user\\etc, then in \\etc. See :ref:`SEMPLOT_EXT` definitions above for which files to look for. """ gxapi_cy.WrapSEMPLOT._overlay_lst(GXContext._get_tls_geo(), lst, extension, type) @classmethod def plot(cls, db, templ, mask_ch, mineral_ch, map, map_mode, plot_symb): """ Plot an XYPlot or TriPlot based on the template. :param db: Database handle :param templ: Template file name :param mask_ch: Mask channel (can be "") :param mineral_ch: Mineral channel (can be "" for raw data) :param map: Map name :param map_mode: Map open mode; one of MAP_WRITEXXX (see map.gxh) :param plot_symb: Plot symbols (O: No, 1:Yes) ? :type db: GXDB :type templ: str :type mask_ch: str :type mineral_ch: str :type map: str :type map_mode: int :type plot_symb: int .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The "Components" and "Parameters" groups in the INI file are used. Only values with mask values of 1 are plotted, if the mask channel is specified. Call "`reset_used_channel <geosoft.gxapi.GXSEMPLOT.reset_used_channel>`" prior to this function in order to track the values actually plotted. Call `init_group_symbols_used <geosoft.gxapi.GXSEMPLOT.init_group_symbols_used>` prior to this function to reset recording of the symbols used in plotting (for legends etc). """ gxapi_cy.WrapSEMPLOT._plot(GXContext._get_tls_geo(), db, templ.encode(), mask_ch.encode(), mineral_ch.encode(), map.encode(), map_mode, plot_symb) @classmethod def plot_symbol_legend(cls, db, mview, x_min, y_min, y_max, symb_size): """ Plot a symbol legend in a view. :param db: Database handle :param mview: View to plot into :param x_min: X Minimum :param y_min: Y Minimum :param y_max: Y Maximum :param symb_size: Symbol size :type db: GXDB :type mview: GXMVIEW :type x_min: float :type y_min: float :type y_max: float :type symb_size: float .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** This function depends on `init_group_symbols_used <geosoft.gxapi.GXSEMPLOT.init_group_symbols_used>` before the plot for which this legend is created is made. The symbols and groups to use in the legend are stored to a database blob after the plot is made. These values are recovered by this function to make the legend at the specified location. """ gxapi_cy.WrapSEMPLOT._plot_symbol_legend(GXContext._get_tls_geo(), db, mview, x_min, y_min, y_max, symb_size) @classmethod def prop_symb(cls, db, map, view, chan, mask_ch, mineral_ch, log, area, base, scale, symb, wt, line_col, fill_col, legend): """ Plot a proportional symbol plot. :param db: Database handle :param map: Map to plot to :param view: View to replot :param chan: Channel name :param mask_ch: Mask channel (can be "") :param mineral_ch: Mineral channel ( :param log: Linear (0) or logarithmic (1) scaling :param area: Scale by diameter (0) or area (1) :param base: Scale base (log) data units :param scale: Scale factor (log) data units/mm :param symb: Symbol number :param wt: Symbol weight :param line_col: Symbol line color :param fill_col: Symbol fill color :param legend: Plot legend? :type db: GXDB :type map: GXMAP :type view: str :type chan: str :type mask_ch: str :type mineral_ch: str :type log: int :type area: int :type base: float :type scale: float :type symb: int :type wt: int :type line_col: int :type fill_col: int :type legend: int .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Replots map using proportional symbols """ gxapi_cy.WrapSEMPLOT._prop_symb(GXContext._get_tls_geo(), db, map, view.encode(), chan.encode(), mask_ch.encode(), mineral_ch.encode(), log, area, base, scale, symb, wt, line_col, fill_col, legend) @classmethod def replot(cls, db, mask_ch, mineral_ch, map, view): """ Replot an existing `GXSEMPLOT <geosoft.gxapi.GXSEMPLOT>` plot based on current data. :param db: Database handle :param mask_ch: Mask channel (can be "") :param mineral_ch: Mineral channel (can be "" for raw data) :param map: Map handle :param view: Map View containing the plot :type db: GXDB :type mask_ch: str :type mineral_ch: str :type map: GXMAP :type view: str .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Note that the selection of data in the current `GXDB <geosoft.gxapi.GXDB>` is used to replot the map. Call "`reset_used_channel <geosoft.gxapi.GXSEMPLOT.reset_used_channel>`" prior to this function in order to track the values actually plotted. Call `init_group_symbols_used <geosoft.gxapi.GXSEMPLOT.init_group_symbols_used>` prior to this function to reset recording of the symbols used in plotting (for legends etc). """ gxapi_cy.WrapSEMPLOT._replot(GXContext._get_tls_geo(), db, mask_ch.encode(), mineral_ch.encode(), map, view.encode()) @classmethod def re_plot_symbol_legend(cls, db, mview): """ Replot a symbol legend in a view. :param db: Database handle :param mview: View to plot into :type db: GXDB :type mview: GXMVIEW .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Searches the VIEW `GXREG <geosoft.gxapi.GXREG>` for information on a previously created legend, and if it finds that info, replots the Legend, using the current data, group key etc. """ gxapi_cy.WrapSEMPLOT._re_plot_symbol_legend(GXContext._get_tls_geo(), db, mview) @classmethod def reset_groups(cls, db, mask_ch): """ Re-group data using current settings. :param db: Database handle :param mask_ch: Mask channel :type db: GXDB :type mask_ch: str .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapSEMPLOT._reset_groups(GXContext._get_tls_geo(), db, mask_ch.encode()) @classmethod def reset_used_channel(cls, db): """ Set the "Plotted" channel to dummies :param db: Database handle :type db: GXDB .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** This function is called before one or a series of plots to initialize the "Plotted" channel in all the selected lines to dummy values. As the plots are created, those points used in the plot are set to 1, so that at the end the database records which values have been plotted. This information can then be used to make a symbol legend. If the "Plotted" channel does not exist, it is created, associated, loaded, and filled with dummies. """ gxapi_cy.WrapSEMPLOT._reset_used_channel(GXContext._get_tls_geo(), db) @classmethod def select_poly(cls, db, mview, mask_ch, mineral_ch, pply, mode): """ Select data from a polygonal area on a map. :param db: Database handle :param mview: View Handle :param mask_ch: Mask channel to update :param mineral_ch: Mineral channel :param pply: Polygon to select from, in the view coordinates. :param mode: Mask mode (0: Append, 1: New) :type db: GXDB :type mview: GXMVIEW :type mask_ch: str :type mineral_ch: str :type pply: GXPLY :type mode: int .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapSEMPLOT._select_poly(GXContext._get_tls_geo(), db, mview, mask_ch.encode(), mineral_ch.encode(), pply, mode) @classmethod def set_channel_order(cls, db, lst): """ Sets preset channel order. :param db: Database handle :param lst: Channel names, handles :type db: GXDB :type lst: GXLST .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Sets channel order as follows: Sample_No X and Y Locations Mineral Grain_No Position (e.g. center, edge etc.) Grain Morph Oxides (in the order they appear in Semplot_Oxides.csv) Trace Elements (Ordered as in the periodic table) Total Mask IsPlotted (flag set when a value is plotted) Other channels Channel order is set for all "RawData" groups. """ gxapi_cy.WrapSEMPLOT._set_channel_order(GXContext._get_tls_geo(), db, lst) @classmethod def set_channel_units(cls, db): """ Set units for oxides (%) and elements (ppm) :param db: Database handle :type db: GXDB .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If the channel units are already set, then they are not changed. Oxide channels are identified from the Semplot_Oxides.csv file. Trace elements are identified from the periodic table of the elements, except for "Y", if it is the current Y channel. """ gxapi_cy.WrapSEMPLOT._set_channel_units(GXContext._get_tls_geo(), db) @classmethod def set_itr(cls, db, ch, itr): """ Put `GXITR <geosoft.gxapi.GXITR>` into a channel. :param db: Database handle :param ch: Data channel handle :type db: GXDB :type ch: int :type itr: GXITR .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapSEMPLOT._set_itr(GXContext._get_tls_geo(), db, ch, itr) @classmethod def set_mask(cls, db, mask_ch, mineral_ch, mineral, selected, val): """ Set the mask channel ON or OFF. :param db: Database handle :param mask_ch: Mask channel :param mineral_ch: Mineral channel :param mineral: Mineral to use ("All" or "" for all) :param selected: 0 for all lines, 1 for selected lines :param val: 0 for off, 1 for on. :type db: GXDB :type mask_ch: str :type mineral_ch: str :type mineral: str :type selected: int :type val: int .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapSEMPLOT._set_mask(GXContext._get_tls_geo(), db, mask_ch.encode(), mineral_ch.encode(), mineral.encode(), selected, val) @classmethod def sort_data(cls, db, group, anomaly): """ Sort data by Sample No, Grain and Position :param db: Database handle :param group: Data Group handle :param anomaly: Use Anomaly channel as primary sort? :type db: GXDB :type group: int :type anomaly: int .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapSEMPLOT._sort_data(GXContext._get_tls_geo(), db, group, anomaly) @classmethod def template_lst(cls, lst, type): """ Fill a list with the available plot template names :param lst: Input `GXLST <geosoft.gxapi.GXLST>`. :param type: :ref:`SEMPLOT_PLOT` :type lst: GXLST :type type: int .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Looks first in user\\etc, then in \\etc. Looks first for file prefix "semtemplate" then "xyt" or "tri" (New-style templates with the "semtemplate" extentsion have the plot type "triplot" or "xyplot" inside them.) """ gxapi_cy.WrapSEMPLOT._template_lst(GXContext._get_tls_geo(), lst, type) @classmethod def tile_windows(cls): """ Tile currently maximimized windows. .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapSEMPLOT._tile_windows(GXContext._get_tls_geo()) @classmethod def total_oxides(cls, db, mineral_ch): """ Calculate the total oxides channel. :param db: Database handle :param mineral_ch: Mineral channel :type db: GXDB :type mineral_ch: str .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The mineral channel is needed in order to adjust the total with the Fe Corrected Ferric and Ferrous values, and these require a mineral for their identification. If none is provided, mineral "X" (unknown) is assumed. """ gxapi_cy.WrapSEMPLOT._total_oxides(GXContext._get_tls_geo(), db, mineral_ch.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXPJ.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXPJ(gxapi_cy.WrapPJ): """ GXPJ class. The `GXPJ <geosoft.gxapi.GXPJ>` object is created from two `GXIPJ <geosoft.gxapi.GXIPJ>` objects, and is used for converting data in an OASIS database or map object from one map coordinate (projection) system to another. """ def __init__(self, handle=0): super(GXPJ, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXPJ <geosoft.gxapi.GXPJ>` :returns: A null `GXPJ <geosoft.gxapi.GXPJ>` :rtype: GXPJ """ return GXPJ() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def clip_ply(self, min_x, min_y, max_x, max_y, max_dev, pply): """ Create a clip polygon from a projected area. :param min_x: Min X (or Longitude...) :param min_y: Min Y (or Latitude...) :param max_x: Max X :param max_y: Max Y :param max_dev: Max deviation in degrees :param pply: `GXPLY <geosoft.gxapi.GXPLY>` to be filled :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type max_dev: float :type pply: GXPLY .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A rectangular area from (MinX, MinY) to (MaxX, MaxY) is projected throught the `GXPJ <geosoft.gxapi.GXPJ>`. The resulting (non-rectangular) area is then digitized along its edges, then thinned to remove near-collinear points. The thinning is done to any point whose neighbors subtend an angle greater than (180 degrees - maximum deviation). (i.e. if max. dev = 0, only co-linear points would be removed). """ self._clip_ply(min_x, min_y, max_x, max_y, max_dev, pply) def convert_vv(self, vv_x, vv_y): """ Convert VVx/VVy from input projection to output projection. :param vv_x: VVx :param vv_y: VVy :type vv_x: GXVV :type vv_y: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This function is equivalent to `GXVV.project <geosoft.gxapi.GXVV.project>`. """ self._convert_vv(vv_x, vv_y) def convert_vv3(self, vv_x, vv_y, vv_z): """ Convert VVx/VVy/VVz projections :param vv_x: VVx :param vv_y: VVy :param vv_z: VVz :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This function is equivalent to `GXVV.project_3d <geosoft.gxapi.GXVV.project_3d>`. """ self._convert_vv3(vv_x, vv_y, vv_z) def convert_xy(self, x, y): """ Convert X, Y from input projection to output projection. :param x: X (or Longitude) :param y: Y (or Latitude) :type x: float_ref :type y: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ x.value, y.value = self._convert_xy(x.value, y.value) def convert_xy_from_xyz(self, x, y, z): """ Convert X, Y from input projection to output projection, taking Z into account :param x: X (or Longitude) :param y: Y (or Latitude) :param z: Z (or Depth - unchanged) :type x: float_ref :type y: float_ref :type z: float .. versionadded:: 7.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This function is used (for instance) when projecting voxel model locations where the user expects that the vertical position will not change. The regular `convert_xyz <geosoft.gxapi.GXPJ.convert_xyz>` may result in shifts of hundreds, even a thousand meters in case where you are going from the geoid to an ellipsoid. The value of Z can have an important effect on the accuracy of the results, as the normal `convert_xy <geosoft.gxapi.GXPJ.convert_xy>` assumes a value of Z=0 internally and calls `convert_xyz <geosoft.gxapi.GXPJ.convert_xyz>`. """ x.value, y.value = self._convert_xy_from_xyz(x.value, y.value, z) def convert_xyz(self, x, y, z): """ Convert X,Y,Z from input projection to output projection. :param x: X (or Longitude) :param y: Y (or Latitude) :param z: Z (or Depth) :type x: float_ref :type y: float_ref :type z: float_ref .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ x.value, y.value, z.value = self._convert_xyz(x.value, y.value, z.value) @classmethod def create(cls, input, output): """ This method creates a projection object. :param input: Input PRJ file name, "" for geodetic :param output: Ouput PRJ file name, "" for geodetic :type input: str :type output: str :returns: `GXPJ <geosoft.gxapi.GXPJ>` Object :rtype: GXPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapPJ._create(GXContext._get_tls_geo(), input.encode(), output.encode()) return GXPJ(ret_val) @classmethod def create_ipj(cls, ip_jin, ip_jout): """ This method creates a projection object from IPJs. :param ip_jin: Input Projection, (`GXIPJ <geosoft.gxapi.GXIPJ>`)0 for long/lat :param ip_jout: Output Projection, (`GXIPJ <geosoft.gxapi.GXIPJ>`)0 for long/lat :type ip_jin: GXIPJ :type ip_jout: GXIPJ :returns: `GXPJ <geosoft.gxapi.GXPJ>` Object :rtype: GXPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If converting to/from long/lat in the natural coordinate system of the source/target, only the long/lat system can be passed as (`GXIPJ <geosoft.gxapi.GXIPJ>`)0. """ ret_val = gxapi_cy.WrapPJ._create_ipj(GXContext._get_tls_geo(), ip_jin, ip_jout) return GXPJ(ret_val) @classmethod def create_rectified(cls, lon, lat, x, y, rot, scl, dir): """ Create a rectified `GXPJ <geosoft.gxapi.GXPJ>` from lon,lat,rotation :param lon: Longitude at (X,Y) origin :param lat: Latitude at (X,Y) origin :param x: (X,Y) origin :param rot: Coordinate Y relative to geographic N (deg azm) :param scl: Scale to convert X,Y to m. :param dir: :ref:`PJ_RECT` :type lon: float :type lat: float :type x: float :type y: float :type rot: float :type scl: float :type dir: int :returns: `GXPJ <geosoft.gxapi.GXPJ>` Object :rtype: GXPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Given an X,Y coordinate system, the lat/lon origin and angle of the coordinate system, this will create a `GXPJ <geosoft.gxapi.GXPJ>` to convert between X,Y coordinates and Lon,Lat. The Lon/Lat is determined using a Transverse Mercator projection with central meridian through the center of the coordinates on a WGS 84 datum. """ ret_val = gxapi_cy.WrapPJ._create_rectified(GXContext._get_tls_geo(), lon, lat, x, y, rot, scl, dir) return GXPJ(ret_val) def elevation(self): """ Get elevation correction method :returns: :ref:`PJ_ELEVATION` :rtype: int .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** To determine the model in use, refer to the datum_trf column in the user\\csv\\datumtrf.csv file. The datum and geoid model are named in the sqare brackets following the transform name as follows: name [datum_model:geoid] The datum_model is the name of the datum transformation model which will be in a file with extension .ll2 in the \\etc directory. The geoid is the name of the geoid model which will be in a grid file with extension .grd in the \\etc directory. If the geoid model is missing, this method will return `PJ_ELEVATION_NONE <geosoft.gxapi.PJ_ELEVATION_NONE>` and elevation coordinates will not be changed. """ ret_val = self._elevation() return ret_val def is_input_ll(self): """ Is the input projection a lat/long. :returns: 1 - Yes 0 - No :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_input_ll() return ret_val def is_output_ll(self): """ Is the output projection a lat/long. :returns: 1 - Yes 0 - No :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_output_ll() return ret_val def project_bounding_rectangle(self, min_x, min_y, max_x, max_y): """ Project a bounding rectangle. :param min_x: Bounding Region Min X :param min_y: Bounding Region Min Y :param max_x: Bounding Region Max X :param max_y: Bounding Region Max Y :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref .. versionadded:: 5.1.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A rectangular area from (dMinX, dMinY) to (dMaxX, dMaxY) is projected throught the `GXPJ <geosoft.gxapi.GXPJ>`. The resulting region area is then digitized along its edges and a new bounding rectangle is computed. If there is a lot of curve through the projection the resulting bounding region may be slightly smaller than the true region. """ min_x.value, min_y.value, max_x.value, max_y.value = self._project_bounding_rectangle(min_x.value, min_y.value, max_x.value, max_y.value) def project_bounding_rectangle2(self, min_x, min_y, max_x, max_y, err): """ Project a bounding rectangle with error tolerance. :param min_x: Bounding Region Min X :param min_y: Bounding Region Min Y :param max_x: Bounding Region Max X :param max_y: Bounding Region Max Y :param err: Maximum allowable projection error if <= 0.0, will use 0.005% of smallest dimension :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref :type err: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is the same as `project_bounding_rectangle <geosoft.gxapi.GXPJ.project_bounding_rectangle>` except that the bounding rectangle will be limited to an area within which the projection can be performed to an accuracy better than the specified error tolerance. """ min_x.value, min_y.value, max_x.value, max_y.value = self._project_bounding_rectangle2(min_x.value, min_y.value, max_x.value, max_y.value, err) def project_bounding_rectangle_res(self, min_x, min_y, max_x, max_y, res): """ Project a bounding rectangle with resolution. :param min_x: Bounding Region Min X :param min_y: Bounding Region Min Y :param max_x: Bounding Region Max X :param max_y: Bounding Region Max Y :param res: Resolution :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref :type res: float_ref .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This function behaves just like ProjBoundingRectangle_PJ except that it also computes an approximate resolution at the reprojected coordinate system from a given original resolution. """ min_x.value, min_y.value, max_x.value, max_y.value, res.value = self._project_bounding_rectangle_res(min_x.value, min_y.value, max_x.value, max_y.value, res.value) def project_bounding_rectangle_res2(self, min_x, min_y, max_x, max_y, res, err): """ Project a bounding rectangle with resolution and error tolerance. :param min_x: Bounding Region Min X :param min_y: Bounding Region Min Y :param max_x: Bounding Region Max X :param max_y: Bounding Region Max Y :param res: Resolution :param err: Maximum allowable projection error if <= 0.0, will use 0.005% of smallest dimension :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref :type res: float_ref :type err: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is the same as `project_bounding_rectangle_res <geosoft.gxapi.GXPJ.project_bounding_rectangle_res>` except that the bounding rectangle will be limited to an area within which the projection can be performed to an accuracy better than the specified error tolerance. """ min_x.value, min_y.value, max_x.value, max_y.value, res.value = self._project_bounding_rectangle_res2(min_x.value, min_y.value, max_x.value, max_y.value, res.value, err) def project_limited_bounding_rectangle(self, min_xl, min_yl, max_xl, max_yl, min_x, min_y, max_x, max_y): """ Project a bounding rectangle with limits. :param min_xl: Output limited bounding region Min X :param min_yl: Min Y :param max_xl: Max X :param max_yl: Max Y :param min_x: Bounding Region Min X :param min_y: Min Y :param max_x: Max X :param max_y: Max Y :type min_xl: float :type min_yl: float :type max_xl: float :type max_yl: float :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The bounding rectangle will be limited to no larger than the area specified in the output projection. This is useful when projecting from limits that are unreasonable in the target projection. .. seealso:: `project_bounding_rectangle <geosoft.gxapi.GXPJ.project_bounding_rectangle>`. """ min_x.value, min_y.value, max_x.value, max_y.value = self._project_limited_bounding_rectangle(min_xl, min_yl, max_xl, max_yl, min_x.value, min_y.value, max_x.value, max_y.value) def setup_ldt(self): """ Setup the `GXPJ <geosoft.gxapi.GXPJ>` with LDT check. .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** By default, a `GXPJ <geosoft.gxapi.GXPJ>` on the same datum will not apply a LDT, is intended for transformations between datums. However, in some instances you might want to convert between LDTs on the same datum, such as when you have two sets of coordinates that you KNOW came from WGS84 and were placed on this datum using differnt LDT's. If you want to combine such coordinate systems, one or the other should be converted to the other's LDT. Note that a more logical way to do this would be to convert both sets back to their original WGS84 coordinates and combine in WGS84. """ self._setup_ldt() def project_bounding_volume(self, min_x, min_y, min_z, max_x, max_y, max_z): """ Project a bounding volume. :param min_x: Min X :param min_y: Min Y :param min_z: Min Z :param max_x: Max X :param max_y: Max Y :param max_z: Max Z :type min_x: float_ref :type min_y: float_ref :type min_z: float_ref :type max_x: float_ref :type max_y: float_ref :type max_z: float_ref .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value = self._project_bounding_volume(min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXFFT2.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXFFT2(gxapi_cy.WrapFFT2): """ GXFFT2 class. 2-D Fast Fourier Transforms These methods now work with an `GXIMG <geosoft.gxapi.GXIMG>` object, instead of creating their own `GXFFT2 <geosoft.gxapi.GXFFT2>` object. """ def __init__(self, handle=0): super(GXFFT2, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXFFT2 <geosoft.gxapi.GXFFT2>` :returns: A null `GXFFT2 <geosoft.gxapi.GXFFT2>` :rtype: GXFFT2 """ return GXFFT2() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def fft2_in(cls, im_gi, trn_fil, spc_fil): """ `GXFFT2 <geosoft.gxapi.GXFFT2>` transform :param im_gi: Input image :param trn_fil: Output Transform file name string :param spc_fil: Output Power Spectrum file name string :type im_gi: GXIMG :type trn_fil: str :type spc_fil: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapFFT2._fft2_in(GXContext._get_tls_geo(), im_gi, trn_fil.encode(), spc_fil.encode()) @classmethod def filter_pg(cls, pg, con_fil, tr, dx, dy, rot): """ Apply 2D FFT filters to data in pager :param pg: Pager obj :param con_fil: FFT filter control file :param tr: `GXTR <geosoft.gxapi.GXTR>` obj :param dx: rDx - X increment :param dy: rDy - Y increment :param rot: rRot- Rotation degree :type pg: GXPG :type con_fil: str :type tr: GXTR :type dx: float :type dy: float :type rot: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapFFT2._filter_pg(GXContext._get_tls_geo(), pg, con_fil.encode(), tr, dx, dy, rot) @classmethod def flt(cls, im_gi, out_fil, con_fil): """ `GXFFT2 <geosoft.gxapi.GXFFT2>` filter :param im_gi: Input image (Transform grid) :param out_fil: Output file (Transform grid) :param con_fil: Control file :type im_gi: GXIMG :type out_fil: str :type con_fil: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapFFT2._flt(GXContext._get_tls_geo(), im_gi, out_fil.encode(), con_fil.encode()) @classmethod def flt_inv(cls, im_gi, out_fil, con_fil): """ `GXFFT2 <geosoft.gxapi.GXFFT2>` filter and inverse :param im_gi: Input image (Transform grid) :param out_fil: Output file :param con_fil: Control file :type im_gi: GXIMG :type out_fil: str :type con_fil: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapFFT2._flt_inv(GXContext._get_tls_geo(), im_gi, out_fil.encode(), con_fil.encode()) @classmethod def pow_spc(cls, im_gi, spc_fil): """ `GXFFT2 <geosoft.gxapi.GXFFT2>` transform power spectrum :param im_gi: Input image (Transform grid) :param spc_fil: Output Power Spectrum file name string :type im_gi: GXIMG :type spc_fil: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapFFT2._pow_spc(GXContext._get_tls_geo(), im_gi, spc_fil.encode()) @classmethod def rad_spc(cls, im_gi, spc_fil): """ `GXFFT2 <geosoft.gxapi.GXFFT2>` transform Radially averaged power spectrum :param im_gi: Input image (Transform grid) :param spc_fil: Output Radial Spectrum file name string :type im_gi: GXIMG :type spc_fil: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapFFT2._rad_spc(GXContext._get_tls_geo(), im_gi, spc_fil.encode()) @classmethod def rad_spc_alt(cls, im_gi, spc_fil): """ `GXFFT2 <geosoft.gxapi.GXFFT2>` transform Radially averaged power spectrum - log before average and no normalization :param im_gi: Input image (Transform grid) :param spc_fil: Output Radial Spectrum file name string :type im_gi: GXIMG :type spc_fil: str .. versionadded:: 9.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapFFT2._rad_spc_alt(GXContext._get_tls_geo(), im_gi, spc_fil.encode()) @classmethod def rad_spc1(cls, img, vv): """ `GXFFT2 <geosoft.gxapi.GXFFT2>` transform Radially averaged power spectrum for one `GXIMG <geosoft.gxapi.GXIMG>` :param img: Input image (Transform grid) :param vv: Output Radial Spectrum `GXVV <geosoft.gxapi.GXVV>` :type img: GXIMG :type vv: GXVV .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapFFT2._rad_spc1(GXContext._get_tls_geo(), img, vv) @classmethod def rad_spc2(cls, img1, img2, vv, v_vst, opt): """ `GXFFT2 <geosoft.gxapi.GXFFT2>` transform Radially averaged power spectrum for two IMGs :param img1: Input image1 (Transform grid1 - G) :param img2: Input image2 (Transform grid2 - H) :param vv: Output Radial Spectrum `GXVV <geosoft.gxapi.GXVV>` :param v_vst: Output Radial Spectrum Standard deviation VVst (Null: no calc) :param opt: lOpt - 1: <Re(GH*/HH*)> `GXVV <geosoft.gxapi.GXVV>`; 0: <Re(GH*)> `GXVV <geosoft.gxapi.GXVV>` :type img1: GXIMG :type img2: GXIMG :type vv: GXVV :type v_vst: GXVV :type opt: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapFFT2._rad_spc2(GXContext._get_tls_geo(), img1, img2, vv, v_vst, opt) @classmethod def td_xd_y(cls, img_tx, img_ty, out_fil, inv_flg): """ `GXFFT2 <geosoft.gxapi.GXFFT2>` filter (calculate T from the derivatives Tx and Ty) :param img_tx: Input dX image (Transform grid) :param img_ty: Input dY image (Transform grid) :param out_fil: Output T file name :param inv_flg: 0 - no invers, 1 - invers FFT applied :type img_tx: GXIMG :type img_ty: GXIMG :type out_fil: str :type inv_flg: int .. versionadded:: 5.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapFFT2._td_xd_y(GXContext._get_tls_geo(), img_tx, img_ty, out_fil.encode(), inv_flg) @classmethod def trans_pg(cls, pg, opt): """ Apply 2D FFT transform to data in pager :param pg: Pager obj :param opt: :ref:`FFT2_PG` :type pg: GXPG :type opt: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapFFT2._trans_pg(GXContext._get_tls_geo(), pg, opt) @classmethod def filter_response(cls, con_fil, start, increment, input_vv, output_vv, profile_vv): """ Calculates response for filter(s) defined in control file. Not specific to 2D. :param con_fil: FFT filter control file :param start: Wavenumber start :param increment: Wavenumber increment :param input_vv: Input spectrum :param output_vv: Output spectrum :param profile_vv: Output filter profile :type con_fil: str :type start: float :type increment: float :type input_vv: GXVV :type output_vv: GXVV :type profile_vv: GXVV .. versionadded:: 9.9 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapFFT2._filter_response(GXContext._get_tls_geo(), con_fil.encode(), start, increment, input_vv, output_vv, profile_vv) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXGU.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXGU(gxapi_cy.WrapGU): """ GXGU class. Not a class. A catch-all group of functions performing various geophysical processes, including the calculation of simple EM model responses, certain instrument dump file imports, and 2D Euler deconvolution. """ def __init__(self, handle=0): super(GXGU, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXGU <geosoft.gxapi.GXGU>` :returns: A null `GXGU <geosoft.gxapi.GXGU>` :rtype: GXGU """ return GXGU() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def dipole_mag(cls, xyz_file, depth, inc, nx, ny, dx, dy): """ Calculate a dipole magnetic field into XYZ file :param xyz_file: sXYZ :param depth: rDepth :param inc: rInc :param nx: iNX :param ny: iNY :param dx: rDX :param dy: rDY :type xyz_file: str :type depth: float :type inc: float :type nx: int :type ny: int :type dx: float :type dy: float .. versionadded:: 5.1.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapGU._dipole_mag(GXContext._get_tls_geo(), xyz_file.encode(), depth, inc, nx, ny, dx, dy) @classmethod def em_half_space_inv(cls, coil_spacing, coil_frequency, coil_configuration, tol, threshold, vv_height, vv_in_phase, vv_quadrature, vv_res, inv, err, start_val): """ Inverts EM responses to the best halfspace model. :param coil_spacing: Coil spacing: error if == 0 :param coil_frequency: Frequency :param coil_configuration: :ref:`EMLAY_GEOMETRY` :param tol: Fractional error in best fit resistivity :param threshold: Don't invert values below this :param vv_height: Height above ground :param vv_in_phase: In-phase part (ppm) :param vv_quadrature: Quadrature part (ppm) :param vv_res: On return - inverted halfspace resistivities :param inv: :ref:`EM_INV` :param err: :ref:`EM_ERR` :param start_val: Starting value for inversion (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :type coil_spacing: float :type coil_frequency: float :type coil_configuration: int :type tol: float :type threshold: float :type vv_height: GXVV :type vv_in_phase: GXVV :type vv_quadrature: GXVV :type vv_res: GXVV :type inv: int :type err: int :type start_val: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapGU._em_half_space_inv(GXContext._get_tls_geo(), coil_spacing, coil_frequency, coil_configuration, tol, threshold, vv_height, vv_in_phase, vv_quadrature, vv_res, inv, err, start_val) @classmethod def em_half_space_vv(cls, coil_spacing, coil_frequency, coil_configuration, rvv, hvv, ivv, qvv): """ EM Halfspace forward model response. :param coil_spacing: Coil separation :param coil_frequency: Frequency :param coil_configuration: :ref:`EMLAY_GEOMETRY` :param rvv: Input resistivity values :param hvv: Input height values :param ivv: Output In-phase :param qvv: Output Quadrature-phase :type coil_spacing: float :type coil_frequency: float :type coil_configuration: int :type rvv: GXVV :type hvv: GXVV :type ivv: GXVV :type qvv: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapGU._em_half_space_vv(GXContext._get_tls_geo(), coil_spacing, coil_frequency, coil_configuration, rvv, hvv, ivv, qvv) @classmethod def geometrics2_db(cls, db, ra, log_wa, survey_mode, line_dir, corner, bi_uni, corner_x, corner_y, mark_space, line_space): """ Convert a Geometrics STN file to a database. :param db: `GXDB <geosoft.gxapi.GXDB>` handle :param ra: `GXRA <geosoft.gxapi.GXRA>` handle, STN file :param log_wa: Log file `GXWA <geosoft.gxapi.GXWA>` handle :param survey_mode: Simple mode (1) or Mapped mode (2) :param line_dir: Survey line orientation: North-south - 0 East-west - 1 :param corner: Starting survey position: SW - 0, NW - 1, SE - 2, NE - 3, :param bi_uni: Bidirectional (0) or Unidirectional (1) :param corner_x: Starting position X :param corner_y: Starting position Y :param mark_space: Mark spacing :param line_space: Line spacing :type db: GXDB :type ra: GXRA :type log_wa: GXWA :type survey_mode: int :type line_dir: int :type corner: int :type bi_uni: int :type corner_x: float :type corner_y: float :type mark_space: float :type line_space: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Assumes that the database is new and empty. If not, existing channels with names X, Y, Mag1, Mag2, Time, Date, and Mark will deleted and then created. Existing lines will be erased and then created if they are the same as the new ones. """ gxapi_cy.WrapGU._geometrics2_db(GXContext._get_tls_geo(), db, ra, log_wa, survey_mode, line_dir, corner, bi_uni, corner_x, corner_y, mark_space, line_space) @classmethod def geometrics2_tbl(cls, ra, wa, log_wa): """ Convert a Geometrics station file (STN) to a table file (TBL) :param ra: `GXRA <geosoft.gxapi.GXRA>` handle, input station file :param wa: Output TBL file :param log_wa: Log file `GXWA <geosoft.gxapi.GXWA>` handle :type ra: GXRA :type wa: GXWA :type log_wa: GXWA .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapGU._geometrics2_tbl(GXContext._get_tls_geo(), ra, wa, log_wa) @classmethod def geometrics_qc(cls, wa, line, in_vv, tol, min_coord, max_coord, out_vv, flag_vv): """ Correct reading positions in a database. :param wa: Output error log file :param line: Database line number. For output to log file only :param in_vv: Input `GXVV <geosoft.gxapi.GXVV>`, `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` :param tol: Tolerance defined as percentage, say 50.0 means 50%. Must be >=0.0 Lower bound = (Normal Density) - (Normal Density)*Tolerance Upper bound = (Normal Density) + (Normal Density)*Tolerance :param min_coord: Minimum coordinate (X or Y) :param max_coord: Maximum coordinate (X or Y) :param out_vv: Output `GXVV <geosoft.gxapi.GXVV>`, `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` :param flag_vv: Output Flag `GXVV <geosoft.gxapi.GXVV>`, `GS_LONG <geosoft.gxapi.GS_LONG>` :type wa: GXWA :type line: str :type in_vv: GXVV :type tol: float :type min_coord: float :type max_coord: float :type out_vv: GXVV :type flag_vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** There are six cases to consider: ======== ==== ============= ======================================== Case Flag Solutions Symptoms ======== ==== ============= ======================================== CASE 1A: 0 No correction Recorded and actual Line lengths same Reading densities vary slightly (passed the tolerance test) -------- ---- ------------- ---------------------------------------- CASE 1B -1 No correction Line lengths same Reading densities vary and cannot pass the tolerance test -------- ---- ------------- ---------------------------------------- CASE 2A 1 Corrected by Recorded line length too short extension Possible high readings in segment(s) Corrected (by extending) and actual lengths become the same -------- ---- ------------- ---------------------------------------- CASE 2B 2 Corrected by Recorded line length too short interpolation Possible high readings in segment(s) Corrected (by extending) and actual lengths are not same. Interpolation is then applied -------- ---- ------------- ---------------------------------------- CASE 3A 1 Corrected by Recorded line length too long shifting or Possible low readings in segment(s) (shrank) Corrected (by shifting) and actual lengths are same -------- ---- ------------- ---------------------------------------- CASE 3B 2 Corrected by Recorded line length too long interpolation Possible low readings in segment(s) Corrected (by shifting) and actual lengths are not same. Interpolation is then applied ======== ==== ============= ======================================== TERMINOLOGY: Segments A segment refers to the distance and its contents between two adjacent fiducial markers Normal Density The density (number of readings) shared by the segments in a survey line. The number of segments with the density is greater than the number of segments having a different density in a line. Tolerance and Bound: Tolerance is defined as a percentage, say ``50% (=0.5)``. Based on the tolerance, a lower bound and upper bound can be defined: :: Lower bound = (Normal Density) - (Normal Density)*Tolerance Upper bound = (Normal Density) - (Normal Density)*Tolerance Segments will pass the tolerance test if the number of readings falls within the Lower and Upper Bounds. """ gxapi_cy.WrapGU._geometrics_qc(GXContext._get_tls_geo(), wa, line.encode(), in_vv, tol, min_coord, max_coord, out_vv, flag_vv) @classmethod def geonics3138_dump2_db(cls, db, r_ah, r_ad, log_wa, line_mult, stat_mult): """ Convert a Geonics EM31/EM38 file in dump format to a database. :param db: `GXDB <geosoft.gxapi.GXDB>` handle :param r_ah: `GXRA <geosoft.gxapi.GXRA>` handle, Header file :param r_ad: `GXRA <geosoft.gxapi.GXRA>` handle, Dump file :param log_wa: Log file `GXWA <geosoft.gxapi.GXWA>` handle :param line_mult: Line multiplier :param stat_mult: Station multiplier :type db: GXDB :type r_ah: GXRA :type r_ad: GXRA :type log_wa: GXWA :type line_mult: float :type stat_mult: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Assumes that the database is new and empty. If not, existing channels with names X, Y, Station, Conductivity, Inphase, Quadrature, and Time will deleted and then created. Existing lines will be erased and then created if they are the same as the new ones. """ gxapi_cy.WrapGU._geonics3138_dump2_db(GXContext._get_tls_geo(), db, r_ah, r_ad, log_wa, line_mult, stat_mult) @classmethod def geonics61_dump2_db(cls, db, ra, log_wa, line_mult, stat_mult): """ Convert a Geonics EM61 file in dump format to a database. :param db: `GXDB <geosoft.gxapi.GXDB>` handle :param ra: `GXRA <geosoft.gxapi.GXRA>` handle, dump file :param log_wa: Log file `GXWA <geosoft.gxapi.GXWA>` handle :param line_mult: Line multiplier :param stat_mult: Station multiplier - Not used in the calculation :type db: GXDB :type ra: GXRA :type log_wa: GXWA :type line_mult: float :type stat_mult: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Assumes that the database is new and empty. If not, existing channels with names X, Y, Station, Conductivity, Inphase, Quadrature, and Time will deleted and then created. Existing lines will be erased and then created if they are the same as the new ones. """ gxapi_cy.WrapGU._geonics61_dump2_db(GXContext._get_tls_geo(), db, ra, log_wa, line_mult, stat_mult) @classmethod def geonics_dat2_db(cls, db, ra, log_wa, line_mult, stat_mult): """ Convert a Geonics EM31/EM38/EM61 file in `GXDAT <geosoft.gxapi.GXDAT>` format to a database. :param db: `GXDB <geosoft.gxapi.GXDB>` handle :param ra: `GXRA <geosoft.gxapi.GXRA>` handle :param log_wa: Log file `GXWA <geosoft.gxapi.GXWA>` handle :param line_mult: Line multiplier :param stat_mult: Station multiplier - Not used in the calculation :type db: GXDB :type ra: GXRA :type log_wa: GXWA :type line_mult: float :type stat_mult: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Assumes that the database is new and empty. If not, existing channels with names X, Y, Station, Conductivity, Inphase, Quadrature, and Time will deleted and then created. Existing lines will be erased and then created if they are the same as the new ones. """ gxapi_cy.WrapGU._geonics_dat2_db(GXContext._get_tls_geo(), db, ra, log_wa, line_mult, stat_mult) @classmethod def gr_curv_cor(cls, vv_elev, vv_lat, vv_boug): """ Gravity Curvature (Bullard B) Correction to Bouguer anomaly :param vv_elev: Input Elevation `GXVV <geosoft.gxapi.GXVV>` :param vv_lat: Input Latitude `GXVV <geosoft.gxapi.GXVV>` :param vv_boug: Bouguer `GXVV <geosoft.gxapi.GXVV>` for Curvature Correction :type vv_elev: GXVV :type vv_lat: GXVV :type vv_boug: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapGU._gr_curv_cor(GXContext._get_tls_geo(), vv_elev, vv_lat, vv_boug) @classmethod def gr_curv_cor_ex(cls, vv_elev, vv_lat, vv_boug, rho): """ Gravity Curvature (Bullard B) Correction to Bouguer anomaly, with user input cap density. :param vv_elev: Input Elevation `GXVV <geosoft.gxapi.GXVV>` :param vv_lat: Input Latitude `GXVV <geosoft.gxapi.GXVV>` :param vv_boug: Bouguer `GXVV <geosoft.gxapi.GXVV>` for Curvature Correction :param rho: Cap Density (g/cm^3 :type vv_elev: GXVV :type vv_lat: GXVV :type vv_boug: GXVV :type rho: float .. versionadded:: 8.0.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapGU._gr_curv_cor_ex(GXContext._get_tls_geo(), vv_elev, vv_lat, vv_boug, rho) @classmethod def gr_demvv(cls, im_gdem, vv_x, vv_y, vv_z): """ Get gravity DEM grid `GXVV <geosoft.gxapi.GXVV>` for Bouguer anomaly :param im_gdem: DEM grid :param vv_x: Input X `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Input Y `GXVV <geosoft.gxapi.GXVV>` :param vv_z: Output DEM `GXVV <geosoft.gxapi.GXVV>` for Bouguer Correction :type im_gdem: GXIMG :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 6.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapGU._gr_demvv(GXContext._get_tls_geo(), im_gdem, vv_x, vv_y, vv_z) @classmethod def gr_test(cls, xm, ym, zm, vv_x, vv_y, vv_g3, vv_g4, vv_g1, vv_g2): """ Test triangular prism gravity calculation :param xm: dXm - model dimension x :param ym: dYm - model dimension y :param zm: dZm - model depth :param vv_x: VVx - stations x :param vv_y: VVy - stations y :param vv_g3: VVg3 - 2 triangular prism gravity results :param vv_g4: VVg4 - regtangular prism gravity results :param vv_g1: VVg1 - lower triangular prism gravity results :param vv_g2: VVg2 - upper triangular prism gravity results :type xm: float :type ym: float :type zm: float :type vv_x: GXVV :type vv_y: GXVV :type vv_g3: GXVV :type vv_g4: GXVV :type vv_g1: GXVV :type vv_g2: GXVV .. versionadded:: 5.1.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapGU._gr_test(GXContext._get_tls_geo(), xm, ym, zm, vv_x, vv_y, vv_g3, vv_g4, vv_g1, vv_g2) @classmethod def gravity_still_reading_correction(cls, db, grav_in, date, time, still, grav_out): """ Gravity Still Reading Correction on selected lines. :param db: Database :param grav_in: Input gravity channel handle [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param date: Input date channel handle [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param time: Input time channel handle [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param still: Still readings file :param grav_out: Output gravity channel handle [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type db: GXDB :type grav_in: int :type date: int :type time: int :type still: str :type grav_out: int .. versionadded:: 8.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapGU._gravity_still_reading_correction(GXContext._get_tls_geo(), db, grav_in, date, time, still.encode(), grav_out) @classmethod def gravity_still_reading_database_correction(cls, db, grav_in, date, time, stillDB, station_channel, date_channel, time_channel, readings_channel, grav_out): """ Gravity Still Reading Correction on selected lines, using a still readings database :param db: Database :param grav_in: Input gravity channel handle [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param date: Input date channel handle [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param time: Input time channel handle [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param stillDB: Still readings database :param station_channel: station channel in still readings database :param date_channel: date channel in still readings database :param time_channel: time channel in still readings database :param readings_channel: readings (gravity) channel in still readings database :param grav_out: Output gravity channel handle [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type db: GXDB :type grav_in: int :type date: int :type time: int :type stillDB: GXDB :type station_channel: str :type date_channel: str :type time_channel: str :type readings_channel: str :type grav_out: int .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapGU._gravity_still_reading_database_correction(GXContext._get_tls_geo(), db, grav_in, date, time, stillDB, station_channel.encode(), date_channel.encode(), time_channel.encode(), readings_channel.encode(), grav_out) @classmethod def despike_em_array(cls, vv_in, vv_noise, vv_out, num_removed): """ Despike a time-series with individual noise levels :param vv_in: `GXVV <geosoft.gxapi.GXVV>` input time series) :param vv_noise: `GXVV <geosoft.gxapi.GXVV>` individual noise values) :param vv_out: `GXVV <geosoft.gxapi.GXVV>` despiked output time series :param num_removed: Number of spikes removed - returned :type vv_in: GXVV :type vv_noise: GXVV :type vv_out: GXVV :type num_removed: int_ref .. versionadded:: 9.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Remove spikes from a single EM time-series decay curve. Each point has its own noise level. The algorithm is to be determined. """ num_removed.value = gxapi_cy.WrapGU._despike_em_array(GXContext._get_tls_geo(), vv_in, vv_noise, vv_out, num_removed.value) @classmethod def em_layer(cls, coil_spacing, coil_frequency, coil_height, coil_configuration, n_layers, vv_thickness, vv_sigma, in_phase, quadrature): """ Calculate the EM response of a layered earth model. :param coil_spacing: Coil spacing, error if == 0 :param coil_frequency: Coil frequency :param coil_height: Coil height above layer [0] :param coil_configuration: :ref:`EMLAY_GEOMETRY` :param n_layers: Number of layers (including lower halfspace) :param vv_thickness: sNLayer-1 thicknesses [0] to [sNLayer-2] :param vv_sigma: sNLayer conductivities [0] to [sNLayer-1] :param in_phase: On return - in-phase part (ppm) :param quadrature: On return - quadrature part (ppm) :type coil_spacing: float :type coil_frequency: float :type coil_height: float :type coil_configuration: int :type n_layers: int :type vv_thickness: GXVV :type vv_sigma: GXVV :type in_phase: float_ref :type quadrature: float_ref :returns: 0 of OK 1 if some error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val, in_phase.value, quadrature.value = gxapi_cy.WrapGU._em_layer(GXContext._get_tls_geo(), coil_spacing, coil_frequency, coil_height, coil_configuration, n_layers, vv_thickness, vv_sigma, in_phase.value, quadrature.value) return ret_val @classmethod def em_plate(cls, strike_length, dip_length, strike, dip, plunge, x_off, y_off, z_off, plate_depth, n_spons, sig_tvv, tx_orient, tx_freq, tx_dt, params, xivv, yivv, zivv, xqvv, yqvv, zqvv): """ Calculate the conductance of a thin plate model. :param strike_length: Plate strike length (m) :param dip_length: Plate dip length (m) :param strike: Plate strike (degrees) from X axis :param dip: Plate dip (degrees) from horizontal :param plunge: Plate plunge (degrees) from horizontal :param x_off: Rx offset in X from Tx :param y_off: Rx offset in Y from Tx :param z_off: Rx offset in Z from Tx (+'ve down) :param plate_depth: Depth below Tx :param n_spons: :ref:`EMPLATE_DOMAIN` :param sig_tvv: The plate conductances (`GXVV <geosoft.gxapi.GXVV>` length <= 100) :param tx_orient: :ref:`EMPLATE_TX` :param tx_freq: Tx frequency (for `EMPLATE_TIME <geosoft.gxapi.EMPLATE_TIME>`) :param tx_dt: Tx time window spacing (for `EMPLATE_TIME <geosoft.gxapi.EMPLATE_TIME>`) :param params: The frequency/time parameters (SI units: f[Hz] or t[s]) :param xivv: On return - X in-phase part (ppm) :param yivv: On return - Y in-phase part (ppm) :param zivv: On return - Z in-phase part (ppm) :param xqvv: On return - X quadrature part (ppm) :param yqvv: On return - Y quadrature part (ppm) :param zqvv: On return - Z quadrature part (ppm) :type strike_length: float :type dip_length: float :type strike: float :type dip: float :type plunge: float :type x_off: float :type y_off: float :type z_off: float :type plate_depth: float :type n_spons: int :type sig_tvv: GXVV :type tx_orient: int :type tx_freq: float :type tx_dt: float :type params: float :type xivv: GXVV :type yivv: GXVV :type zivv: GXVV :type xqvv: GXVV :type yqvv: GXVV :type zqvv: GXVV :returns: 0 of OK 1 if some error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapGU._em_plate(GXContext._get_tls_geo(), strike_length, dip_length, strike, dip, plunge, x_off, y_off, z_off, plate_depth, n_spons, sig_tvv, tx_orient, tx_freq, tx_dt, params, xivv, yivv, zivv, xqvv, yqvv, zqvv) return ret_val @classmethod def gen_ux_detect_symbols_group_name(cls, target_gdb, targets, ostr): """ Generate a group name string for UX-Detect symbols :param target_gdb: Input Targets database name :param targets: Input Targets group (line) name :param ostr: Output group name string :type target_gdb: str :type targets: str :type ostr: str_ref .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Start a new group for the symbols in the UX-Detect system. The Target GDB is often in the form "GDB_Targets", where "GDB" is the original data. Cut off the part including the underscore when creating the map, so you don't get map group Names like "SYMBOLS_UxData_Targets_Targets". .. seealso:: `GXSTR.gen_group_name <geosoft.gxapi.GXSTR.gen_group_name>` """ ostr.value = gxapi_cy.WrapGU._gen_ux_detect_symbols_group_name(GXContext._get_tls_geo(), target_gdb.encode(), targets.encode(), ostr.value.encode()) @classmethod def import_daarc500_ethernet(cls, file, output, bytes): """ Import Ethernet data from the RMS Instruments DAARC500. :param file: File to import :param output: Output binary file :param bytes: Returned number of bytes per block :type file: str :type output: str :type bytes: int_ref .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Imports Ethernet data recorded by the RMS Instruments DAARC500 instrument, and outputs the data to a new binary file, returning the number of bytes per block, to make it easier to import the data using the regular binary import. """ bytes.value = gxapi_cy.WrapGU._import_daarc500_ethernet(GXContext._get_tls_geo(), file.encode(), output.encode(), bytes.value) @classmethod def import_daarc500_serial(cls, file, channel, output, bytes): """ Import Serial data from the RMS Instruments DAARC500. :param file: File to import :param channel: Channel to import, 1-8 :param output: Output binary file :param bytes: Returned number of bytes per block :type file: str :type channel: int :type output: str :type bytes: int_ref .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Imports a single channel of the up to 8 serial data channels recorded by the RMS Instruments DAARC500 instrument, and outputs the data for that channel to a new binary file, returning the number of bytes per block, to make it easier to import the data using the regular binary import. """ bytes.value = gxapi_cy.WrapGU._import_daarc500_serial(GXContext._get_tls_geo(), file.encode(), channel, output.encode(), bytes.value) @classmethod def import_p190(cls, db, file, rec_type, wa): """ Import navigation data in the P190 format. :param db: Database handle :param file: P190 file name :param rec_type: Single letter code, e.g. "C", "E", "S", "T" or "V", or blank for all records. :param wa: Log file :type db: GXDB :type file: str :type rec_type: str :type wa: GXWA .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Imports the data, and, if projection information is included set the "X" and "Y" channel projection info. (Note: the last file imported always takes precedence). Different record types are imported to separate lines, but in the same order as in the file. Data in existing lines is overwritten. If the record type is specified, only records beginning with that letter are imported, otherwise all records (except for the header "H" records) are imported. """ gxapi_cy.WrapGU._import_p190(GXContext._get_tls_geo(), db, file.encode(), rec_type.encode(), wa) @classmethod def lag_daarc500_gps(cls, mag_fid_vv, mag_event_vv, gps_fid_vv): """ Lag the GPS fid values for the DAARC500 import. :param mag_fid_vv: Mag fid values (`GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`) :param mag_event_vv: Mag event values (`GS_LONG <geosoft.gxapi.GS_LONG>`) :param gps_fid_vv: GPS fid values (`GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`, altered on return) :type mag_fid_vv: GXVV :type mag_event_vv: GXVV :type gps_fid_vv: GXVV .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The fiducial times recorded for the GPS in the RMS Instrument DAARC500 are delayed, and associated with the "wrong" fid value. They should actually be moved to the previous fid value in the mag data where the event flag is non-zero. """ gxapi_cy.WrapGU._lag_daarc500_gps(GXContext._get_tls_geo(), mag_fid_vv, mag_event_vv, gps_fid_vv) @classmethod def magnetic_tilt_depth(cls, tilt_deriv_grid, horiz_deriv_grid, database): """ Calculate the depth of magnetic sources based on the tilt depth method by <NAME> et al. :param tilt_deriv_grid: Tilt derivative grid :param horiz_deriv_grid: Horizontal derivative grid :param database: Output database name (will overwrite existing) :type tilt_deriv_grid: str :type horiz_deriv_grid: str :type database: str .. versionadded:: 9.9 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Tilt-Depth is a magnetic depth estimation method that lends itself to rapid mapping of sedimentary basins without the complexity of unraveling the depth solutions from methods that give multiple solutions (e.g. Euler deconvolution). Reference: Sedimentary basins reconnaissance using the magnetic Tilt-Depth method by <NAME> et al. Exploration Geophysics, 2010, 41, 198-209. <NAME> & <NAME> have extended and improved this method, as have other authors. The 0-value contour of the tilt derivative grid is sampled to a new database, and the horizontal derivative grid is sampled at those locations. The inverse of the horizontal derivative values are output to the Tilt_Depth channel. """ gxapi_cy.WrapGU._magnetic_tilt_depth(GXContext._get_tls_geo(), tilt_deriv_grid.encode(), horiz_deriv_grid.encode(), database.encode()) @classmethod def maxwell_plate_corners(cls, x, y, z, dip, dip_dir, plunge, length, width, x1, y1, z1, x2, y2, z2, x3, y3, z3, x4, y4, z4): """ Calculate the corner point locations for a Maxwell Plate. :param x: Top-center point, X :param y: Top-center point, Y :param z: Top-center point, Z :param dip: Dip :param dip_dir: Dip-direction :param plunge: Plunge :param length: Length :param width: Width (height) :param x1: [returned] Corner 1 X :param y1: [returned] Corner 1 Y :param z1: [returned] Corner 1 Z :param x2: [returned] Corner 2 X :param y2: [returned] Corner 2 Y :param z2: [returned] Corner 2 Z :param x3: [returned] Corner 3 X :param y3: [returned] Corner 3 Y :param z3: [returned] Corner 3 Z :param x4: [returned] Corner 4 X :param y4: [returned] Corner 4 Y :param z4: [returned] Corner 4 Z :type x: float :type y: float :type z: float :type dip: float :type dip_dir: float :type plunge: float :type length: float :type width: float :type x1: float_ref :type y1: float_ref :type z1: float_ref :type x2: float_ref :type y2: float_ref :type z2: float_ref :type x3: float_ref :type y3: float_ref :type z3: float_ref :type x4: float_ref :type y4: float_ref :type z4: float_ref .. versionadded:: 6.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This routine calculates the corner locations of plates defined in the Maxwell Plate program, given the top-center location and plate geometry parameters. """ x1.value, y1.value, z1.value, x2.value, y2.value, z2.value, x3.value, y3.value, z3.value, x4.value, y4.value, z4.value = gxapi_cy.WrapGU._maxwell_plate_corners(GXContext._get_tls_geo(), x, y, z, dip, dip_dir, plunge, length, width, x1.value, y1.value, z1.value, x2.value, y2.value, z2.value, x3.value, y3.value, z3.value, x4.value, y4.value, z4.value) @classmethod def scan_daarc500_ethernet(cls, file, type, items): """ Scan Ethernet data from the RMS Instruments DAARC500. :param file: File to import :param type: Recognized type :param items: Number of items :type file: str :type type: int_ref :type items: int_ref .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Scans the file to see what data type is in the Ethernet file. Currently only detects GR820 types. """ type.value, items.value = gxapi_cy.WrapGU._scan_daarc500_ethernet(GXContext._get_tls_geo(), file.encode(), type.value, items.value) @classmethod def scan_daarc500_serial(cls, file, vv_type, vv_items): """ Scan Serial data from the RMS Instruments DAARC500. :param file: File to import :param vv_type: 8 Recognized types - `GS_LONG <geosoft.gxapi.GS_LONG>` :param vv_items: 8 Numbers of items - `GS_LONG <geosoft.gxapi.GS_LONG>` :type file: str :type vv_type: GXVV :type vv_items: GXVV .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Scans the file to see which of the 8 serial channels were used to store data. """ gxapi_cy.WrapGU._scan_daarc500_serial(GXContext._get_tls_geo(), file.encode(), vv_type, vv_items) @classmethod def vv_euler(cls, vv_xin, vv_yin, img_data, imgx, imgy, imgz, vv_xout, vv_yout, vv_depth, vvdc, vv_zer, vvx_yer, wnd_sz, si, wt_pow, x_yfit): """ Get Euler solutions of depth from VVs and grids. :param vv_xin: Input X `GXVV <geosoft.gxapi.GXVV>` :param vv_yin: Input Y `GXVV <geosoft.gxapi.GXVV>` :param img_data: Field grid :param imgx: dF/dX grid :param imgy: dF/dY grid :param imgz: dF/dZ grid :param vv_xout: Output X `GXVV <geosoft.gxapi.GXVV>` :param vv_yout: Output Y `GXVV <geosoft.gxapi.GXVV>` :param vv_depth: Output depth `GXVV <geosoft.gxapi.GXVV>` :param vvdc: Output background field `GXVV <geosoft.gxapi.GXVV>` :param vv_zer: Output depth uncertainty `GXVV <geosoft.gxapi.GXVV>` :param vvx_yer: Output XY uncertainty `GXVV <geosoft.gxapi.GXVV>` :param wnd_sz: Window size :param si: Structure index :param wt_pow: Weighting factor :param x_yfit: :ref:`PEAKEULER_XY` :type vv_xin: GXVV :type vv_yin: GXVV :type img_data: GXIMG :type imgx: GXIMG :type imgy: GXIMG :type imgz: GXIMG :type vv_xout: GXVV :type vv_yout: GXVV :type vv_depth: GXVV :type vvdc: GXVV :type vv_zer: GXVV :type vvx_yer: GXVV :type wnd_sz: int :type si: float :type wt_pow: float :type x_yfit: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** All VVs must be REAL The output X and Y values are the same as the inputs, except if `PEAKEULER_XY_FIT <geosoft.gxapi.PEAKEULER_XY_FIT>` is selected. All other output values are set to dummy if: a) The input X or Y is a dummy b) The derived window size is a dummy. c) The derived solution is outside the range d) The solution is invalid (singular matrix) """ gxapi_cy.WrapGU._vv_euler(GXContext._get_tls_geo(), vv_xin, vv_yin, img_data, imgx, imgy, imgz, vv_xout, vv_yout, vv_depth, vvdc, vv_zer, vvx_yer, wnd_sz, si, wt_pow, x_yfit) @classmethod def vv_euler2(cls, vv_xin, vv_yin, img_data, imgx, imgy, imgz, vv_xout, vv_yout, vv_depth, vvdc, vv_zer, vvx_yer, vv_wnd, si, wt_pow, x_yfit): """ Get Euler solutions of depth from VVs and grids (method 2). :param vv_xin: Input X `GXVV <geosoft.gxapi.GXVV>` :param vv_yin: Input Y `GXVV <geosoft.gxapi.GXVV>` :param img_data: Field grid :param imgx: dF/dX grid :param imgy: dF/dY grid :param imgz: dF/dZ grid :param vv_xout: Output X `GXVV <geosoft.gxapi.GXVV>` :param vv_yout: Output Y `GXVV <geosoft.gxapi.GXVV>` :param vv_depth: Output depth `GXVV <geosoft.gxapi.GXVV>` :param vvdc: Output background field `GXVV <geosoft.gxapi.GXVV>` :param vv_zer: Output depth uncertainty `GXVV <geosoft.gxapi.GXVV>` :param vvx_yer: Output XY uncertainty `GXVV <geosoft.gxapi.GXVV>` :param vv_wnd: Window size (diameters of targets) :param si: Structure index :param wt_pow: Weighting factor :param x_yfit: :ref:`PEAKEULER_XY` :type vv_xin: GXVV :type vv_yin: GXVV :type img_data: GXIMG :type imgx: GXIMG :type imgy: GXIMG :type imgz: GXIMG :type vv_xout: GXVV :type vv_yout: GXVV :type vv_depth: GXVV :type vvdc: GXVV :type vv_zer: GXVV :type vvx_yer: GXVV :type vv_wnd: GXVV :type si: float :type wt_pow: float :type x_yfit: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** All VVs must be REAL .. seealso:: `vv_euler <geosoft.gxapi.GXGU.vv_euler>` """ gxapi_cy.WrapGU._vv_euler2(GXContext._get_tls_geo(), vv_xin, vv_yin, img_data, imgx, imgy, imgz, vv_xout, vv_yout, vv_depth, vvdc, vv_zer, vvx_yer, vv_wnd, si, wt_pow, x_yfit) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/dataframe.py """ Table (records, fields) handling, inherits from Pandas (http://pandas.pydata.org/) base class. :Classes: :`Data_frame`: dataframe that holds a table .. seealso:: :class:`geosoft.gxapi.GXLTB` .. note:: Regression tests provide usage examples: `dataframe tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_dataframe.py>`_ """ import pandas as pd import geosoft import geosoft.gxapi as gxapi from . import utility as gxu __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) class DfException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.dataframe`. .. versionadded:: 9.2 """ pass def table_record(table, rec): """ Return a dictionary of a single record from a table :param table: table name :param rec: record wanted :returns: dictionary containing record values as strings .. versionadded:: 9.2 """ t = Data_frame(table, records=rec) return t.to_dict(orient='records')[0] def table_column(table, col): """ Return a dictionary of a column from a table :param table: table name :param col: column wanted :returns: dictionary containing record values as strings .. versionadded:: 9.2 """ t = Data_frame(table, columns=col).to_dict(orient='index') d = {} for rec in t.keys(): d[rec] = t[rec][col] return d def Data_frame(initial=None, records=None, columns=None): """ Pandas DataFrame from a Geosoft table. :parameters: :initial: Geosoft table name, which is normally an ASCII csv file. If the table cannot be found in the project folder `user/csv` is searched, then the Geosoft `csv` folder. :records: Record name to include, or a list of records to include. If not specified all records are included in the dataframe. :columns: Column name to be included, or a list of column names to include. If not specified all columns are included in the dataframe. :raises: :DfException: if no columns.records found in the table. If only some fields are found the dataframe is created with the found fields. :raises geosoft.gxapi.GXError: if a requested record is not found. This returns a Pandas DataFrame instance, which can be accessed and used with standard Pandas calls. Column names from Geosoft table files are always uppercase, regardless of case used in the table file. Record/index names from Geosoft table files are case-sensitive. Example table file "rockcode.csv": .. code:: / standard Geosoft rock codes CODE,LABEL,__DESCRIPTION,PATTERN,PAT_SIZE,PAT_DENSITY,PAT_THICKNESS,COLOR bau,BAU,BAUXITE,100,,,,RG49B181 bif,BIF,"BANDED IRON FM",202,,,,R cal,CAL,CALCRETE,315,,,,B cbt,CBT,CARBONATITE,305,,,,R128G128B192 .. code:: include geosoft.gxpy as gxpy with gxpy.GXpy() as gx: df = gxpy.dataframe.Data_frame('rockcode') print(len(df)) print(df.loc['bif', 'DESCRIPTION']) # "BANDED IRON FM" print(df.loc['bif'][1]) # "BANDED IRON FM" print(df.iloc[1,0]) # "BIF" print(df.loc['cal', 'PATTERN']) # "315" .. versionadded:: 9.2 .. versionchanged:: 9.4 """ if not type(initial) is str: raise DfException(_t('Only Geosoft tables are supported.')) df = pd.DataFrame() if initial is None: return df lst = gxapi.GXLST.create(geosoft.gxpy.MAX_LST) sr = gxapi.str_ref() if records is None: try: ltb = gxapi.GXLTB.create(initial, 0, 1, '') except geosoft.gxapi.GXError as e: raise DfException(str(e)) else: if type(records) is str: if not records: raise DfException(_t('Empty records string.')) try: ltb = gxapi.GXLTB.create(initial, 0, 1, records) except geosoft.gxapi.GXError as e: raise DfException(_t('Invalid table \'{}\' ({})').format(initial, str(e))) except geosoft.gxapi.GXAPIError as e: raise DfException(_t('Record \'{}\' not in \'{}\' ({})').format(records, initial, str(e))) records = None else: ltb = gxapi.GXLTB.create(initial, 0, 1, '') col_indexes = [] for i in range(1, ltb.fields()): ltb.get_field(i, sr) if columns is None: incl = True elif type(columns) is str: incl = sr.value == columns else: incl = sr.value in columns if incl: df[sr.value] = () col_indexes.append(i) if len(col_indexes) == 0: raise DfException(_t('Table has no columns or \'{}\' column(s) not found.'.format(columns))) if records is None: ltb.get_lst(0, lst) keys = list(gxu.dict_from_lst(lst, True)) vlst = list(df.columns) for j in range(len(keys)): nf = 0 for i in col_indexes: ltb.get_string(j, i, sr) vlst[nf] = sr.value nf += 1 df.loc[keys[j]] = vlst else: # selective read vlst = list(df.columns) for rec in records: j = ltb.find_key(rec) nf = 0 for i in col_indexes: ltb.get_string(j, i, sr) vlst[nf] = sr.value nf += 1 df.loc[rec] = vlst return df <file_sep>/examples/tutorial/Geosoft modules - gxapi and gxpy/grid_dimensions_gxpy.py import geosoft.gxpy as gxpy gxc = gxpy.gx.GXpy() grid = gxpy.grid.Grid.open('test.grd(GRD)') print(' dimension (nx, ny): ({}, {})'.format(grid.nx, grid.ny), '\n separation (x, y): ({}, {})'.format(grid.dx, grid.dy), '\n origin (x, y): ({}, {})'.format(grid.x0, grid.y0), '\n rotation: {}'.format(grid.rot)) <file_sep>/geosoft/gxapi/GXSHP.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXSHP(gxapi_cy.WrapSHP): """ GXSHP class. The `GXSHP <geosoft.gxapi.GXSHP>` class is used to create ESRI shape files. **Note:** Shape files contain a single "geometry" type, e.g. points, arcs or polygons. They may be accompanied by a DBF file containing attribute data. """ def __init__(self, handle=0): super(GXSHP, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXSHP <geosoft.gxapi.GXSHP>` :returns: A null `GXSHP <geosoft.gxapi.GXSHP>` :rtype: GXSHP """ return GXSHP() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def append_item(self): """ Append the current item and data to an old `GXSHP <geosoft.gxapi.GXSHP>` object. .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The currently stored `GXSHP <geosoft.gxapi.GXSHP>` item and data are written to the `GXSHP <geosoft.gxapi.GXSHP>` geometry and data files. (If no data fields have been defined, then the data file is not written). """ self._append_item() @classmethod def create(cls, name, type): """ Create a new `GXSHP <geosoft.gxapi.GXSHP>` object :param name: File name :param type: :ref:`SHP_GEOM_TYPE` :type name: str :type type: int :returns: `GXSHP <geosoft.gxapi.GXSHP>` object :rtype: GXSHP .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The file name is used to create the various files. The file type and extension are added: e.g. "filename.shp", "filename.dbf" The following geometry types are currently supported: Type Required geometry function. `SHP_GEOM_TYPE_POINT <geosoft.gxapi.SHP_GEOM_TYPE_POINT>` `set_point <geosoft.gxapi.GXSHP.set_point>` `SHP_GEOM_TYPE_ARC <geosoft.gxapi.SHP_GEOM_TYPE_ARC>` `set_arc <geosoft.gxapi.GXSHP.set_arc>` `SHP_GEOM_TYPE_POLYGON <geosoft.gxapi.SHP_GEOM_TYPE_POLYGON>` `set_polygon <geosoft.gxapi.GXSHP.set_polygon>` `SHP_GEOM_TYPE_POINTZ <geosoft.gxapi.SHP_GEOM_TYPE_POINTZ>` `set_point_z <geosoft.gxapi.GXSHP.set_point_z>` `SHP_GEOM_TYPE_ARCZ <geosoft.gxapi.SHP_GEOM_TYPE_ARCZ>` `set_arc_z <geosoft.gxapi.GXSHP.set_arc_z>` `SHP_GEOM_TYPE_POLYGONZ <geosoft.gxapi.SHP_GEOM_TYPE_POLYGONZ>` `set_polygon_z <geosoft.gxapi.GXSHP.set_polygon_z>` """ ret_val = gxapi_cy.WrapSHP._create(GXContext._get_tls_geo(), name.encode(), type) return GXSHP(ret_val) def add_int_field(self, field): """ Add an INT type data field to a shape file :param field: Field name :type field: str :returns: Index of the new field :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The returned field index should be used with the SetXXX_SHP functions to set individual data values. """ ret_val = self._add_int_field(field.encode()) return ret_val def add_double_field(self, field, dec): """ Add a REAL type data field to a shape file :param field: Field name :param dec: Number of decimal places :type field: str :type dec: int :returns: Index of the new field :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The returned field index should be used with the SetXXX_SHP functions to set individual data values. """ ret_val = self._add_double_field(field.encode(), dec) return ret_val def add_string_field(self, field, width): """ Add a string type data field to a shape file :param field: Field name :param width: Maximum number of characters in the string :type field: str :type width: int :returns: Index of the new field :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The returned field index should be used with the SetXXX_SHP functions to set individual data values. """ ret_val = self._add_string_field(field.encode(), width) return ret_val def find_field(self, field): """ Find the index for a data field. :param field: Field name :type field: str :returns: The index, -1 if not found. :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._find_field(field.encode()) return ret_val def max_id_num(self): """ Get the max ID number. :returns: The max ID number. :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._max_id_num() return ret_val def num_fields(self): """ Get the field number. :returns: The field number. :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._num_fields() return ret_val def num_records(self): """ Get the record number. :returns: The record number. :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._num_records() return ret_val def type(self): """ Get the `GXSHP <geosoft.gxapi.GXSHP>` object's geometry type. :returns: The `GXSHP <geosoft.gxapi.GXSHP>` object's geometry type (:ref:`SHP_GEOM_TYPE`) :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._type() return ret_val @classmethod def open(cls, name): """ Open an old `GXSHP <geosoft.gxapi.GXSHP>` object :param name: File name :type name: str :returns: `GXSHP <geosoft.gxapi.GXSHP>` object :rtype: GXSHP .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSHP._open(GXContext._get_tls_geo(), name.encode()) return GXSHP(ret_val) def set_arc(self, vv_x, vv_y): """ Write an XY arc (polyline) item. :param vv_x: X locations :param vv_y: Y locations :type vv_x: GXVV :type vv_y: GXVV .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Can ONLY be used for `SHP_GEOM_TYPE_ARC <geosoft.gxapi.SHP_GEOM_TYPE_ARC>` files. """ self._set_arc(vv_x, vv_y) def set_arc_z(self, vv_x, vv_y, vv_z): """ Write an XYZ arc (polyline) item. :param vv_x: X locations :param vv_y: Y locations :param vv_z: Z locations :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Can ONLY be used for `SHP_GEOM_TYPE_ARCZ <geosoft.gxapi.SHP_GEOM_TYPE_ARCZ>` files. """ self._set_arc_z(vv_x, vv_y, vv_z) def set_int(self, index, val): """ Set a data value to a int. :param index: Data field index :param val: Input int value :type index: int :type val: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The input value is converted to the field's data type. """ self._set_int(index, val) def set_ipj(self, ipj): """ Set a `GXSHP <geosoft.gxapi.GXSHP>` object's projection. :param ipj: Input `GXIPJ <geosoft.gxapi.GXIPJ>` :type ipj: GXIPJ .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the `GXSHP <geosoft.gxapi.GXSHP>` object has a projection, and it is not `IPJ_TYPE_NONE <geosoft.gxapi.IPJ_TYPE_NONE>`, then it will be output to a file with the .prj extension when the first object is output. This function should be called BEFORE the first object is written. """ self._set_ipj(ipj) def set_point(self, x, y): """ Write an XY point item. :param x: X location :param y: Y location :type x: float :type y: float .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Can ONLY be used for `SHP_GEOM_TYPE_POINT <geosoft.gxapi.SHP_GEOM_TYPE_POINT>` files. """ self._set_point(x, y) def set_point_z(self, x, y, z): """ Write an XYZ point item. :param x: X location :param y: Y location :param z: Z location :type x: float :type y: float :type z: float .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Can ONLY be used for `SHP_GEOM_TYPE_POINTZ <geosoft.gxapi.SHP_GEOM_TYPE_POINTZ>` files. """ self._set_point_z(x, y, z) def set_polygon(self, vv_x, vv_y, inclusive): """ Write an XY polygon item. :param vv_x: X locations :param vv_y: Y locations :param inclusive: ``True`` for outer ring polygon (inclusive/island), ``False`` for inner ring (exclusive/hole) :type vv_x: GXVV :type vv_y: GXVV :type inclusive: bool .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Can ONLY be used for `SHP_GEOM_TYPE_POLYGON <geosoft.gxapi.SHP_GEOM_TYPE_POLYGON>` files. """ self._set_polygon(vv_x, vv_y, inclusive) def set_polygon_z(self, vv_x, vv_y, vv_z, inclusive): """ Write an XYZ polygon item. :param vv_x: X locations :param vv_y: Y locations :param vv_z: Z locations :param inclusive: ``True`` for outer ring polygon (inclusive/island), ``False`` for inner ring (exclusive/hole) :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type inclusive: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Can ONLY be used for `SHP_GEOM_TYPE_POLYGONZ <geosoft.gxapi.SHP_GEOM_TYPE_POLYGONZ>` files. """ self._set_polygon_z(vv_x, vv_y, vv_z, inclusive) def set_double(self, index, val): """ Set a data value to a real. :param index: Data field index :param val: Input real value :type index: int :type val: float .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The input value is converted to the field's data type. """ self._set_double(index, val) def set_string(self, index, str_val): """ Set a data value to a string. :param index: Data field index :param str_val: Input string value :type index: int :type str_val: str .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The input string is converted to the field's data type. """ self._set_string(index, str_val.encode()) def write_item(self): """ Output the current item and data. .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The currently stored `GXSHP <geosoft.gxapi.GXSHP>` item and data are written to the `GXSHP <geosoft.gxapi.GXSHP>` geometry and data files. (If no data fields have been defined, then the data file is not written). """ self._write_item() ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/docstring_info.py # Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Source: https://raw.githubusercontent.com/openstack/rally/master/rally/common/plugin/info.py import re from sphinx.util import docstrings PARAM_OR_RETURNS_REGEX = re.compile(":(?:param|returns)") RETURNS_REGEX = re.compile(":returns: (?P<doc>.*)", re.S) PARAM_REGEX = re.compile(":param (?P<name>[\*\w]+): (?P<doc>.*?)" "(?:(?=:param)|(?=:return)|(?=:raises)|\Z)", re.S) def reindent(string): return "\n".join(l.strip() for l in string.strip().split("\n")) def parse_docstring(docstring): """Parse the docstring into its components. :returns: a dictionary of form { "short_description": ..., "long_description": ..., "params": [{"name": ..., "doc": ...}, ...], "returns": ... } """ short_description = long_description = returns = "" params = [] if docstring: docstring = "\n".join(docstrings.prepare_docstring(docstring)) lines = docstring.split("\n", 1) short_description = lines[0] if len(lines) > 1: long_description = lines[1].strip() params_returns_desc = None match = PARAM_OR_RETURNS_REGEX.search(long_description) if match: long_desc_end = match.start() params_returns_desc = long_description[long_desc_end:].strip() long_description = long_description[:long_desc_end].rstrip() if params_returns_desc: params = [ {"name": name, "doc": "\n".join(docstrings.prepare_docstring(doc))} for name, doc in PARAM_REGEX.findall(params_returns_desc) ] match = RETURNS_REGEX.search(params_returns_desc) if match: returns = reindent(match.group("doc")) return { "short_description": short_description, "long_description": long_description, "params": params, "returns": returns } <file_sep>/geosoft/gxapi/GXPRAGA3.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXPRAGA3(gxapi_cy.WrapPRAGA3): """ GXPRAGA3 class. `GXPRAGA3 <geosoft.gxapi.GXPRAGA3>` application methods **Note:** No notes """ def __init__(self, handle=0): super(GXPRAGA3, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXPRAGA3 <geosoft.gxapi.GXPRAGA3>` :returns: A null `GXPRAGA3 <geosoft.gxapi.GXPRAGA3>` :rtype: GXPRAGA3 """ return GXPRAGA3() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def launch(cls): """ This method launches the application. :returns: 1 - OK, 2 - Cancel :rtype: int .. versionadded:: 6.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapPRAGA3._launch(GXContext._get_tls_geo()) return ret_val ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/tutorial/Hello World/hello_world_debug_extension.py import pydevd import geosoft.gxapi as gxapi import geosoft.gxpy as gxpy def rungx(): # remote debug, MUST be removed/commented out for production # this will break inside a running "Debug Extension" configuration in PyCharm pydevd.settrace('localhost', port=34765, stdoutToServer=True, stderrToServer=True) gxc = gxpy.gx.gx() gxapi.GXSYS.display_message("GX Python", "Hello {}".format(gxc.gid)) if __name__ == "__main__": gxc = gxpy.gx.GXpy() print('Hello {}'.format(gxc.gid))<file_sep>/docs/GXUNC.rst .. _GXUNC: GXUNC class ================================== .. autoclass:: geosoft.gxapi.GXUNC :members: .. _UTF8: UTF8 constants ----------------------------------------------------------------------- UTF-8 Defines .. autodata:: geosoft.gxapi.UTF8_MAX_CHAR :annotation: .. autoattribute:: geosoft.gxapi.UTF8_MAX_CHAR <file_sep>/docs/GXLAYOUT.rst .. _GXLAYOUT: GXLAYOUT class ================================== .. autoclass:: geosoft.gxapi.GXLAYOUT :members: .. _LAYOUT_CONSTR: LAYOUT_CONSTR constants ----------------------------------------------------------------------- Layout constraint specifiers .. autodata:: geosoft.gxapi.LAYOUT_CONSTR_LEFT :annotation: .. autoattribute:: geosoft.gxapi.LAYOUT_CONSTR_LEFT .. autodata:: geosoft.gxapi.LAYOUT_CONSTR_RIGHT :annotation: .. autoattribute:: geosoft.gxapi.LAYOUT_CONSTR_RIGHT .. autodata:: geosoft.gxapi.LAYOUT_CONSTR_TOP :annotation: .. autoattribute:: geosoft.gxapi.LAYOUT_CONSTR_TOP .. autodata:: geosoft.gxapi.LAYOUT_CONSTR_BOTTOM :annotation: .. autoattribute:: geosoft.gxapi.LAYOUT_CONSTR_BOTTOM .. autodata:: geosoft.gxapi.LAYOUT_CONSTR_WIDTH :annotation: .. autoattribute:: geosoft.gxapi.LAYOUT_CONSTR_WIDTH .. autodata:: geosoft.gxapi.LAYOUT_CONSTR_HEIGHT :annotation: .. autoattribute:: geosoft.gxapi.LAYOUT_CONSTR_HEIGHT .. autodata:: geosoft.gxapi.LAYOUT_CONSTR_HCENTER :annotation: .. autoattribute:: geosoft.gxapi.LAYOUT_CONSTR_HCENTER .. autodata:: geosoft.gxapi.LAYOUT_CONSTR_VCENTER :annotation: .. autoattribute:: geosoft.gxapi.LAYOUT_CONSTR_VCENTER .. autodata:: geosoft.gxapi.LAYOUT_CONSTR_MOVEL :annotation: .. autoattribute:: geosoft.gxapi.LAYOUT_CONSTR_MOVEL .. autodata:: geosoft.gxapi.LAYOUT_CONSTR_MOVER :annotation: .. autoattribute:: geosoft.gxapi.LAYOUT_CONSTR_MOVER .. autodata:: geosoft.gxapi.LAYOUT_CONSTR_MOVET :annotation: .. autoattribute:: geosoft.gxapi.LAYOUT_CONSTR_MOVET .. autodata:: geosoft.gxapi.LAYOUT_CONSTR_MOVEB :annotation: .. autoattribute:: geosoft.gxapi.LAYOUT_CONSTR_MOVEB <file_sep>/docs/templates/version_history.rst {% for mod in modules %} {{ mod.__name__ }} module history ========================================== {% for ver in mod._versions %}{% set contents = mod._version_history[ver] %} {% set classes = contents['classes'] %} {% set funcs = contents['functions'] %} Version {{ ver }} ----------------- {% if classes %} New Classes ^^^^^^^^^^^ {% for cl in classes %} {{ cl }} {% endfor %} {% endif %} New Functions ^^^^^^^^^^^^^ {% for fn in funcs %} {{ fn }} {% endfor %} {% endfor %} {% endfor %} <file_sep>/examples/geosoft_research/self_organizing_maps/python/mvar.py # -*- coding: utf-8 -*- """ Created on Thu Feb 27 16:51:26 2014 @author: ian """ import time import numpy as np import random from enum import Enum import geosoft.gxpy.gx as gxp import geosoft.gxpy.utility as gxu import geosoft.gxpy.gdb as gxgdb def isqrt(n): x = n y = (x + 1) // 2 while y < x: x = y y = (x + n // x) // 2 return x def no_stop(): return False class MvarException(RuntimeError): pass class NormType(Enum): none = 0 # none normal = 1 # gaussian (x-mean)/standard deviation lognormal = 2 # gaussian log(x)/standard deviation(log(x)) def normalize(data, norm): """ Normalize data array, normalized in-place. Normalization is based on simple Gaussian distribution, such that the resulting data has a mean of 0.0, and a standard deviation of 1.0. Logarithmic normalization first takes the log of the data, clipped to a minimum positive value. If called with only NormType defined, the mean and standard deviation are calculated from the data, and returned. For logarithmic normalization, the minimum positive value is either the minimum of the data, if positive, or the standard deviation/1.0e6. The returned tuple can be passed to either this function to apply the normalization parameters to other data, or to denormalize() to convert normalized data back to original scaling. :param data: 1D data to normalise :param norm: normalization tuple (NormType, mean, std, logmin), where: =========== =========================================== NormType one of NormType enum mean mean value to remove std standard deviation logmin minimum value for logarithmic normalization =========== =========================================== If only NormType is provided as a single value (not a tuple), the mean, std and logmin are calculated from the data. :returns: tuple of normalization parameters (NormType, mean, std, logmin) which can be used to normalize or denormalize data. """ if isinstance(norm, tuple): # normalization parameters are provided ntype = norm[0] if ntype == NormType.none: return ntype, None, None, None dmean = norm[1] dstd = norm[2] dlogmin = norm[3] # for logarithmic normalization, take the log of the data if ntype == NormType.lognormal: data[:] = np.log(np.clip(data, dlogmin, data.max())) else: ntype = norm if ntype == NormType.none: return ntype, None, None, None if ntype == NormType.lognormal: dlogmin = data.min() if dlogmin <= 0.0: dstd = data.std() if dstd == 0.0: return NormType.none, None, None, None else: dlogmin = dstd/1.0e6 data[:] = np.log(np.clip(data, dlogmin, data.max())) else: data[:] = np.log(data) dmean = data.mean() dstd = data.std() else: dmean = data.mean() dstd = data.std() dlogmin = None # normalize data[:] = (data - dmean) / dstd return ntype, dmean, dstd, dlogmin def denormalize(data, spec): """ Denormalize data in-place. :param data: :param spec: normalization specification as tuple (Normalize, mean, std). """ if spec[0] == NormType.none: return data[:] = data * spec[2] + spec[1] if spec[0] == NormType.lognormal: data[:] = np.exp(data) def bmu_euclidean(classes, vector, amp=None): """ BMU based on euclidean distance similarity :param classes: numpy array of class vectors, dimensioned same as vec :param vector: vector to test :param amp: ignored :returns: index to BMU in classes """ classes = classes.reshape(-1, len(vector)) return int(np.square(classes - vector).sum(1).argmin()) def amp_squared(classes): """ Returns conditioned amplitudes squared, for use as amp= array in bmu_cosine. Note that amplitudes == 0.0 will be replaced with 1.0 to prevent division by 0. :param classes: list of class centroid values :returns: array of vector amplitudes """ amp = np.sum(np.square(classes), axis=1) amp[amp == 0.0] = 1.0 return amp def bmu_cosine(classes, vector, amp=None): """ BMU based on cosine (direction) test (A dot B)/sqrt(sum[A*A] * sum[B*B]). See http://en.wikipedia.org/wiki/Cosine_similarity :param classes: numpy array of class vectors, shaped (-1,vec.shape[0]) :param vector: vector to test :param amp: optional array of corresponding class amplitudes squared. This is an optimization to avoid recalculating class amplitudes with each call. Use amp_squared() to calculate amplitudes, which also replaces zero amplitudes with 1.0 to avoid division by 0. :returns: index to BMU in classes """ # class amplitude and vector amplitude amp_vec = np.sum(np.square(vector)) if amp is None: amp = np.sum(np.square(classes), axis=1).flatten() min_c = int(amp.argmin()) if amp_vec == 0.0: return min_c # make zero-length class vectors have length 1 amp[amp == 0.0] = 1.0 elif amp_vec == 0.0: return int(amp.argmin()) dot = np.dot(classes, vector) / np.sqrt(amp * amp_vec) return int(dot.argmax()) # list of similarity functions _simfunc = {'Euclidean distance': bmu_euclidean, 'Cosine (direction)': bmu_cosine} def similarity_functions(): """ :returns: list of available built-in similarity function """ def sortkey(p): if p == "Euclidean distance": return "" else: return p simfunc = list(_simfunc.keys()) simfunc.sort(key=sortkey) return simfunc def classify_data(classes, data, similarity=None): """ Classify a provided data vector to the provided classes. The data may not contain dummies :param classes: set of classes to choose from, shaped (-1,data.shape[1]) :param data: array of multivariate data to classify, shape (-1,nVar) :param similarity: similarity function :returns: classification array """ # set default similarity function to bmu_euclidean default_sim = bmu_euclidean def bmu(vec): return default_sim(classes, vec, amp=class_amp) if not (similarity is None): if isinstance(similarity, str): default_sim = _simfunc.get(similarity, None) if default_sim is None: raise MvarException("Unknown similarity function '{}'".format(similarity)) else: default_sim = similarity # som amplitudes class_amp = np.sum(np.square(classes), axis=1) # classify data_classes = np.apply_along_axis(bmu, 1, data) return data_classes def euclidean_distance(class_values, data): """ Return array of Euclidean distance between classValue and data vectors. If you have an array of classes and a 1-D array of assignments that come from classify_data, pass classes[assignments] for classValue. :param class_values: numpy array of class vectors, shaped (-1,vec.shape[0]) :param data: data array shaped (len(assigned),classes.shape[1]) :returns: numpy 1D array of distances """ return np.sqrt(np.square(data - class_values).sum(1)) def separations(classes): """ Return the average Euclidean separation between neighboring classes in a 2D class(neuron) network of vectors. Returns a 2D distance with dimension will be one less than passed classes. :returns: 2D array of average separation between class neurons """ diff = np.zeros((classes.shape[0] - 1, classes.shape[1] - 1), dtype=float) for i in range(classes.shape[0] - 1): for j in range(classes.shape[1] - 1): d1 = np.linalg.norm(classes[i, j] - classes[i + 1, j]) d2 = np.linalg.norm(classes[i, j] - classes[i, j + 1]) d3 = np.linalg.norm(classes[i + 1, j] - classes[i + 1, j + 1]) d4 = np.linalg.norm(classes[i, j + 1] - classes[i + 1, j + 1]) diff[i, j] = (d1 + d2 + d3 + d4) / 4.0 return diff class SOM: """ Self-Organizing-Map classification :param data: 2D array of multivariate training data (rows, variables), must be float, no dummies :param nclasses: the number of classes in the network, must be one of list_dim() :param neighborhood: initial training neighborhood in nodes, default includes all classes :param rate: rate of focus refinements, default 0.9999 :param focus: number of focusing passes, default 1/(1-rate) :param weight: adjustment weight for each training adjustment, default 0.01 :param similarity: a similarity function (classes,vector) that returns index of closest match of vector to one of passed numpy array of classes. Default is Euclidean static function bmu_euclidean(). Built-in functions are: ============= ======================================================= bmu_euclidean closest match based on simple Euclidean distance bmu_cosine closest match based on vector direction and amplitude ============= ======================================================= :param progress: progress function (string, percent, som), som is the current som net :param stop: stop request function, returns True to stop the analysis :param levels: number of anomalous levels to add, default is 0 (see below) :param percent: percentage of data to consider anomalous, default is 5.0 percent If levels > 0, the SOM network is expanded in steps by analysing the anomalous data that is farthest from the existing network based on the standard deviation of the distance of data from the network of the previous level. #. Calculate an anomalous cutoff based on standard deviation distance to class #. Collect anomalous data that is farthest from current neurons #. Run SOM on this anomalous data to come up with n+1..2n classes #. Repeat 2 and 3 up to number of levels The result is a set of stratified classifications in which the higher class sets are ever more anomalous. Setting level=0 (the default) creates a conventional SOM. The most useful level setting based on the geoscience datasets studies so far is 1, which creates a naturally anomalous set of classifications in the n+1 to 2n range. For example, for a dimension 3 network classified for levels=1, the there will be 18 classes; classes 0 to 8 are background classes, and classes 9 to 19 are anomalous classes. """ # minimum and maximum dimensions _minDimension = 2 _maxDimension = 16 def __init__(self, data, nclasses, neighborhood=0, rate=0.9999, focus=None, weight=0.01, levels=0, percent=5.0, similarity=None, progress=print, stop=None): # sensible inputs if not (nclasses in self.list_dim()): l = self.list_dim() raise MvarException("nclasses({}) must one of {}".format(nclasses, l)) self.dim = isqrt(nclasses) self.nVar = data.shape[1] if self.nVar <= 0: raise MvarException("Variables ({}) must be 1 or larger".format(self.nVar)) if rate >= 1.0: raise MvarException("rate({}) must be less than 1.0".format(rate)) # training parameters self.trnData = data if neighborhood > 0: self.nbh = neighborhood else: self.nbh = self.dim if (rate >= 1.0) or (rate < 0.5): self.rate = 0.9999 else: self.rate = rate if focus is None: self.focus = int(1 / (1 - self.rate)) else: self.focus = focus self.weight = weight self.progress = progress # similarity function if similarity is None: self._bmu = bmu_cosine else: self._bmu = similarity if stop is None: self.stop = no_stop else: self.stop = stop # initialize som to random values selected from data self.som = np.zeros((self.dim, self.dim, data.shape[1])) sample = np.random.permutation(nclasses).reshape((self.dim, self.dim)) * int(data.shape[0] / nclasses) for i in range(self.dim): for j in range(self.dim): self.som[i, j] = data[sample[i, j], :] # train the SOM self._train() # expand for anomalous data if (levels > 0) and (percent > 0.0): for lev in range(levels): progress("Anomalous {}%, level {}".format(percent, lev + 1)) classes = self.som.reshape(-1, self.nVar) cls = classify_data(classes, data, similarity=self._bmu) eud = euclidean_distance(classes[cls], data) cutoff = np.percentile(eud, int(100.0 - percent)) data = data[eud > cutoff, :] # stop if we do not have a minimum amount of data if data.shape[0] < (self.dim * self.dim): break s = SOM(data, nclasses, neighborhood=neighborhood, rate=rate, weight=weight, progress=progress, stop=self.stop) # add anomalous som to base som self.som = np.append(self.som, s.som) # reshape the som to a simple 1D list of variable set self.som = self.som.reshape((-1, data.shape[1])) @staticmethod def list_dim(): l = [] for i in range(SOM._minDimension, SOM._maxDimension + 1): l.append(i ** 2) return l def _alignment(self, vec): """ alignment (dot-product) similarity""" dot = np.dot(self.som.reshape(-1, self.nVar), vec) return int(dot.argmax()) def _adjustall(self, vec): """ Update all neurons to the data vec """ self.som += self.weight * (vec - self.som) def _adjustneighborhood(self, vec, bmu, gamma): """ Update neurons based on neighborhood distance from the BMU """ v, h, n = self.som.shape ind = np.indices((v, h)) ind[0] -= bmu[0] ind[1] -= bmu[1] dist = np.sqrt(np.square(ind[0]) + np.square(ind[1])) mask = np.less(dist, gamma).astype(float) mask *= self.weight mask = np.multiply.outer(mask, np.ones(n, int)) self.som += mask * (vec - self.som) def _train(self): """ train the som """ self.progress("Training...", 0, None) data = self.trnData gamma = float(self.nbh) npass = 0 while gamma > 1.0: npass += 1 if npass % 1000 == 0: self.progress(" Pass {}".format(npass), int(100 - (gamma - 1) * 100 / (self.nbh - 1)), self) if self.stop(): return vec = data[int(data.shape[0] * random.random())] # choose a random data point from the sample data bmu = divmod(self._bmu(self.som.reshape(-1, vec.shape[0]), vec), self.dim) # closest to 2D BMU if gamma >= self.dim: # adjust all self._adjustall(vec) else: self._adjustneighborhood(vec, bmu, gamma) gamma *= self.rate # focus neurons for i in range(self.focus): if i % 1000 == 0: self.progress(" Focus {} of {}".format(i, self.focus), i, None) if self.stop(): return vec = data[int(data.shape[0] * random.random())] # choose a random data point from the sample data bmu = divmod(self._bmu(self.som.reshape(-1, vec.shape[0]), vec), self.dim) self._adjustneighborhood(vec, bmu, 0.5) def density(self): """ :returns: density matrix for the current SOM based on training data """ # classify all the training data class_data = classify_data(self.som.reshape(-1, self.trnData.shape[1]), self.trnData, self._bmu) dens = np.bincount(class_data) # TODO: used to be self.som.reshape((self.dim,self.dim)) - double check why this did not work return dens.reshape((-1,self.dim)) def _no_stop(): return False class SOMgdb: """ Apply SOM analysis to a Geosoft GDB :param gdb: database file to process :param fields: list of multivariate fields to be used in the som :param nomalize list of normalizations (NormType) requested, one for each field or a single type for all :param dim: som dimension. The SOM neural network is square, so the number of base classifications is (dim*dim) :param per: percent cutoff for anomalous classes. Anomalous classes are determined based on a percentage of the data that least-fits the base neurons. :param class_err: names of the output fields for the classification index and the Euler Distance :param ch_filter: tuple( filter_channel, filter_value ), filter to only process data that matches filter_value in the filter_channel. This is to limit analysis to a specific existing classification. :param similarity: a similarity function (classes,vector) that returns index of closest match of vector to one of passed numpy array of classes. Default is Euclidean static function bmu_euclidean(). Built-in functions are: ============= ======================================================= bmu_euclidean closest match based on simple Euclidean distance bmu_cosine closest match based on vector direction ============= ======================================================= :param progress: function called to report progress (message, percent_complete) :param stop: function called to test for a stop request, return True to stop working. """ def __init__(self, gdb, fields, dim=16, per=2.0, normalize=NormType.none, class_err=('Class', 'EuD'), ch_filter=('', None), similarity=None, progress=print, stop=None): # read/sample data self.gdb = gdb self.fields = list(fields) self.dim = dim self.per = per self.outClass = class_err[0] self.outDist = class_err[1] if ch_filter[0]: self.ch_filter = (ch_filter[0], gxp.rdecode(ch_filter[1])) else: self.ch_filter = ('', None) # similarity function if similarity is None: self._sim = bmu_euclidean else: self._sim = similarity self.progress = progress self.progress("Database: {}".format(gdb.file_name)) self.progress("Data: {}".format(fields)) # normalize the normalise list norm = [] norm.append(normalize) n = len(self.fields) - len(norm) for i in range(n): norm.append(norm[-1]) if stop: self.stop = stop else: self.stop = _no_stop # read data training_data = self._readdata() if training_data.shape[0] == 0: raise MvarException("No data to process.") # Normalize the data self.progress("Normalizing ...") self.normspecs = self._normalize(training_data, norm) # setup the som last = time.perf_counter() self.progress("Training the SOM ...") if self.per > 0.0: levels = 1 per = self.per else: levels = 0 per = 0.0 self.som = SOM(training_data, self.dim, progress=self.progress, levels=levels, percent=per) self.progress("Training time {:.2f} seconds".format(time.perf_counter() - last)) # classify the data self.progress("Classifying the data...") self._classify_db() som = self.som.som.reshape((-1, training_data.shape[1])) for i in range(som.shape[0]): print('{}> {}'.format(i, som[i])) def _readdata(self): self.nCh = len(self.fields) # read line at a time lines = self.gdb.list_lines(select=True) nl = len(lines) # if ch_filter, add filter to the data if self.ch_filter[0]: self.fields.append(self.ch_filter[0]) n = 0 data = [] for l in lines: npd, ch, fid = self.gdb.read_line(l, channels=self.fields, dummy=gxgdb.READ_REMOVE_DUMMYROWS) # build data array if n == 0: data = npd # .copy() else: data = np.vstack((data, npd)) n += 1 if self.progress: self.progress('{} + {} from line {}'.format(data.shape, npd.shape[0], l), n * 100 / nl) if self.stop(): raise MvarException("Stop requested") del npd # filter if self.ch_filter[0]: # create filter array, True for values that match the filter value filt = data[:, -1] == self.ch_filter[1] # create final data array with only filtered values, and drop the filter from the data data = data[filt, :-1] self.fields = self.fields[:-1] return data def _normalize(self, data, norm): """ Normalize data array. :param data: 2D data to normalise, shape (rows,variables) :param norm: list of normalization types, one per variable: :returns: list of normalization specs """ normspecs = [] nfields = data.shape[1] for i in range(nfields): if self.progress: self.progress("{}: {}".format(norm[i],self.fields[i]), i * 100.0 / nfields) normspecs.append(normalize(data[:,i],norm[i])) return normspecs def _apply_norms(self, data): """ Normalize data to match the SOM normalization """ nfields = data.shape[1] for i in range(nfields): normalize(data[:,i],self.normspecs[i]) def _classify_db(self): # create output channels self.gdb.delete_channel(self.outClass) self.gdb.new_channel(self.outClass, np.int32) self.gdb.delete_channel(self.outDist) self.gdb.new_channel(self.outDist, np.float64) # if filter, add filter to the data if self.ch_filter[0]: self.fields.append(self.ch_filter[0]) # put results back in the database lines = self.gdb.list_lines(select=True) nl = len(lines) n = 0 for l in lines: ln, lsymb = self.gdb.line_name_symb(l) data, ch, fid = self.gdb.read_line(l, channels=self.fields) dummy = gxu.gx_dummy(data.dtype) # mask will hold True for data to be removed from output mask = np.apply_along_axis(lambda a: dummy in a, 1, data) if self.ch_filter[0]: filt = data[:, -1] != self.ch_filter[1] mask += filt data = data[:, :-1] # remove filter channel data[mask] = 0.0 self._apply_norms(data) classes = self.som.som.reshape(-1, data.shape[1]) clss = classify_data(classes, data, similarity=self._sim) err = euclidean_distance(classes[clss], data) clss[mask] = gxu.gx_dummy(clss.dtype) err[mask] = gxu.gx_dummy(err.dtype) self.gdb.write_channel(lsymb, self.outClass, clss, fid=fid) self.gdb.write_channel(lsymb, self.outDist, err, fid=fid) n += 1 self.progress('Writing line {}'.format(ln), (n * 100.0) / nl) if self.stop(): raise MvarException("Stop requested") if self.ch_filter[0]: self.fields = self.fields[:-1] <file_sep>/examples/extra_tests/gdb_stress_test.py import os import sys import geosoft.gxpy.gx as gx import geosoft.gxpy.gdb as gxdb gxc = gx.GXpy() try: dir = sys.argv[1] except IndexError: print('folder path that contains one or more test databases is required as a single command line parameter') exit() max_lines_per_db = 4 for filename in os.listdir(dir): if filename.endswith('.gdb'): path = os.path.join(dir, filename) with gxdb.Geosoft_gdb.open(path) as gdb: if 'empty' in filename: gdb.delete_line('L0') print(gdb.file_name, gdb.list_channels()) n = 0 for line in gdb.list_lines(): data, ch, fid = gdb.read_line(line) print(line, data.shape) n += 1 if n >= max_lines_per_db: break<file_sep>/geosoft/gxpy/tests/test_free_gdb.py import unittest import os import numpy as np import geosoft.gxapi as gxapi import geosoft.gxpy.system as gsys import geosoft.gxpy.gdb as gxdb import geosoft.gxpy.grid as gxgrd import geosoft.gxpy.coordinate_system as gxcs import geosoft.gxpy.metadata as gxmeta import geosoft.gxpy.vv as gxvv from base import GXPYTest class Test(GXPYTest): @classmethod def setUpClass(cls): cls.setUpGXPYTest() print('User:', cls._gx.gid) cls.folder, files = gsys.unzip(os.path.join(os.path.dirname(__file__), 'test_database.zip'), folder=cls._gx.temp_folder()) cls.gdb_name = os.path.join(cls.folder, files[0]) def skip(self): if self._gx.entitled: print('\n****** Skipping free-licence test for licenced ID: {} ******'.format(self._gx.gid)) return False return True def tf(f): return os.path.join(os.path.dirname(__file__), f) def test_noprops_GDB(self): self.start() if self.skip(): with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: self.assertTrue(len(gdb.list_channels()) >= 6) self.assertTrue('X' in gdb.list_channels()) self.assertTrue('dx' in gdb.list_channels(chan=gxdb.CHAN_ALL)) self.assertTrue('vector' in gdb.list_channels(chan=gxdb.CHAN_ARRAY)) self.assertFalse('vector' in gdb.list_channels(chan=gxdb.CHAN_NORMAL)) self.assertEqual(gdb.channel_width('vector'), 3) self.assertEqual(gdb.channel_width('x'), 1) gdb.discard() def test_read(self): self.start() if self.skip(): with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: for l in gdb.list_lines(): npd, ch, fid = gdb.read_line(l) self.assertEqual(len(ch), 8) def test_large_write(self): self.start() if self.skip(): try: name = None with gxdb.Geosoft_gdb.new('new', overwrite=True) as gdb: name = gdb.file_name npd = np.empty((200000, 2)) npd[:,:] = np.nan line = gdb.new_line('test') gdb.write_line(line, npd, ['x', 'y']) npd2, ch, fid = gdb.read_line(line) self.assertEqual(len(ch), 2) self.assertEqual(npd2.shape, npd.shape) finally: gxdb.delete_files(name) def test_va_write(self): self.start() if self.skip(): try: name = None with gxdb.Geosoft_gdb.new('new', overwrite=True) as gdb: name = gdb.file_name npd = np.empty((2000000, 3)) npd[:, :] = np.nan line = gdb.new_line('test') c = gxdb.Channel.new(gdb, 'xx', array=3) gdb.write_line(line, npd, ['xx[0]', 'xx[1]', 'xx[2]']) c = gxdb.Channel(gdb, 'xx') self.assertEqual(c.array, 3) npd2, ch, fid = gdb.read_line(line) self.assertEqual(len(ch), 3, npd.shape) finally: gxdb.delete_files(name) def test_grid(self): self.start() if self.skip(): name = 'test_free.grd(GRD)' with gxgrd.Grid.new(name, properties={'nx': 1200, 'ny': 800}, overwrite=True) as grd: name = grd.file_name_decorated self.assertEqual(grd.nx, 1200) self.assertEqual(grd.ny, 800) with gxgrd.Grid.open(name) as grd: grd.delete_files() self.assertEqual(grd.nx, 1200) self.assertEqual(grd.ny, 800) def test_voxel(self): self.start() if self.skip(): name = 'voxel.geosoft_voxel' gxapi.GXVOX.generate_constant_value(name, 1.0, 5, 0, 0, 0, 1, 1, 1, 300, 200, 50, gxcs.Coordinate_system().gxipj, gxmeta.Metadata().gxmeta) gxvox = gxapi.GXVOX.create(name) minx = gxapi.float_ref() miny = gxapi.float_ref() minz = gxapi.float_ref() maxx = gxapi.float_ref() maxy = gxapi.float_ref() maxz = gxapi.float_ref() gxvox.get_area(minx, miny, minz, maxx, maxy, maxz) self.assertEqual(minx.value, -0.5) self.assertEqual(maxz.value, 49.5) vvx = gxvv.GXvv() vvy = gxvv.GXvv() vvz = gxvv.GXvv() gxvox.get_location_points(vvx.gxvv, vvy.gxvv, vvz.gxvv) gxvoxe = gxapi.GXVOXE.create(gxvox) vvd = gxvv.GXvv([float(n) for n in range(500)]) gxvoxe.vector(0., 0., 0., 1.5, 1.5, 0.5, vvd.gxvv, gxapi.VOXE_EVAL_INTERP) self.assertEqual(vvd.length, 500) self.assertEqual(vvd[0][0], 1.0) self.assertTrue(np.isnan(vvd[499][0])) gxvoxe = None gxvox = None try: os.remove(name) os.remove(name + '.xml') except: pass ############################################################################################### if __name__ == '__main__': unittest.main()<file_sep>/geosoft/gxapi/GXARCSYS.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXARCSYS(gxapi_cy.WrapARCSYS): """ GXARCSYS class. This library is not a class. It contains various general system utilities used by the Geosoft extensions for ArcGIS. """ def __init__(self, handle=0): super(GXARCSYS, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXARCSYS <geosoft.gxapi.GXARCSYS>` :returns: A null `GXARCSYS <geosoft.gxapi.GXARCSYS>` :rtype: GXARCSYS """ return GXARCSYS() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def get_browse_loc(cls, path): """ Get the current catalog browser location in ArcGIS :param path: Path String :type path: str_ref .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Gets the "local" directory (current catalog browser location in ArcGIS if map has not been saved, otherwise MxD path). We cannot mess with the CWD in ArcGIS because there MxD settings for relative/absolute paths depends on it. """ path.value = gxapi_cy.WrapARCSYS._get_browse_loc(GXContext._get_tls_geo(), path.value.encode()) @classmethod def get_current_doc(cls, path): """ Get the current Mx Document file name :param path: Path String :type path: str_ref .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the current document is not yet saved, this will return an empty string. """ path.value = gxapi_cy.WrapARCSYS._get_current_doc(GXContext._get_tls_geo(), path.value.encode()) @classmethod def set_browse_loc(cls, path): """ Set the current catalog browser location in ArcGIS :param path: Path String :type path: str .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Will also set the current working directory (CWD) if the MxD has not been saved. We cannot mess with the CWD in ArcGIS because their MxD settings for relative/absolute paths depends on it. """ gxapi_cy.WrapARCSYS._set_browse_loc(GXContext._get_tls_geo(), path.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXIPJ.rst .. _GXIPJ: GXIPJ class ================================== .. autoclass:: geosoft.gxapi.GXIPJ :members: .. _IPJ_3D_FLAG: IPJ_3D_FLAG constants ----------------------------------------------------------------------- 3D Flags .. autodata:: geosoft.gxapi.IPJ_3D_FLAG_NONE :annotation: .. autoattribute:: geosoft.gxapi.IPJ_3D_FLAG_NONE .. autodata:: geosoft.gxapi.IPJ_3D_FLAG_INVERTANGLES :annotation: .. autoattribute:: geosoft.gxapi.IPJ_3D_FLAG_INVERTANGLES .. autodata:: geosoft.gxapi.IPJ_3D_FLAG_INVERTZ :annotation: .. autoattribute:: geosoft.gxapi.IPJ_3D_FLAG_INVERTZ .. autodata:: geosoft.gxapi.IPJ_3D_FLAG_ORDER_ROTATION :annotation: .. autoattribute:: geosoft.gxapi.IPJ_3D_FLAG_ORDER_ROTATION .. _IPJ_3D_ROTATE: IPJ_3D_ROTATE constants ----------------------------------------------------------------------- 3D Rotation Mode .. autodata:: geosoft.gxapi.IPJ_3D_ROTATE_DEFAULT :annotation: .. autoattribute:: geosoft.gxapi.IPJ_3D_ROTATE_DEFAULT .. autodata:: geosoft.gxapi.IPJ_3D_ROTATE_XYZ :annotation: .. autoattribute:: geosoft.gxapi.IPJ_3D_ROTATE_XYZ .. autodata:: geosoft.gxapi.IPJ_3D_ROTATE_XZY :annotation: .. autoattribute:: geosoft.gxapi.IPJ_3D_ROTATE_XZY .. autodata:: geosoft.gxapi.IPJ_3D_ROTATE_YXZ :annotation: .. autoattribute:: geosoft.gxapi.IPJ_3D_ROTATE_YXZ .. autodata:: geosoft.gxapi.IPJ_3D_ROTATE_YZX :annotation: .. autoattribute:: geosoft.gxapi.IPJ_3D_ROTATE_YZX .. autodata:: geosoft.gxapi.IPJ_3D_ROTATE_ZXY :annotation: .. autoattribute:: geosoft.gxapi.IPJ_3D_ROTATE_ZXY .. autodata:: geosoft.gxapi.IPJ_3D_ROTATE_ZYX :annotation: .. autoattribute:: geosoft.gxapi.IPJ_3D_ROTATE_ZYX .. _IPJ_CSP: IPJ_CSP constants ----------------------------------------------------------------------- Projection Setting .. autodata:: geosoft.gxapi.IPJ_CSP_SCALE :annotation: .. autoattribute:: geosoft.gxapi.IPJ_CSP_SCALE .. autodata:: geosoft.gxapi.IPJ_CSP_FALSEEAST :annotation: .. autoattribute:: geosoft.gxapi.IPJ_CSP_FALSEEAST .. autodata:: geosoft.gxapi.IPJ_CSP_FALSENORTH :annotation: .. autoattribute:: geosoft.gxapi.IPJ_CSP_FALSENORTH .. autodata:: geosoft.gxapi.IPJ_CSP_LATORIGIN :annotation: .. autoattribute:: geosoft.gxapi.IPJ_CSP_LATORIGIN .. autodata:: geosoft.gxapi.IPJ_CSP_LONORIGIN :annotation: .. autoattribute:: geosoft.gxapi.IPJ_CSP_LONORIGIN .. autodata:: geosoft.gxapi.IPJ_CSP_PARALLEL_1 :annotation: .. autoattribute:: geosoft.gxapi.IPJ_CSP_PARALLEL_1 .. autodata:: geosoft.gxapi.IPJ_CSP_PARALLEL_2 :annotation: .. autoattribute:: geosoft.gxapi.IPJ_CSP_PARALLEL_2 .. autodata:: geosoft.gxapi.IPJ_CSP_AZIMUTH :annotation: .. autoattribute:: geosoft.gxapi.IPJ_CSP_AZIMUTH .. autodata:: geosoft.gxapi.IPJ_CSP_ANGLE :annotation: .. autoattribute:: geosoft.gxapi.IPJ_CSP_ANGLE .. autodata:: geosoft.gxapi.IPJ_CSP_POINTLAT_1 :annotation: .. autoattribute:: geosoft.gxapi.IPJ_CSP_POINTLAT_1 .. autodata:: geosoft.gxapi.IPJ_CSP_POINTLON_1 :annotation: .. autoattribute:: geosoft.gxapi.IPJ_CSP_POINTLON_1 .. autodata:: geosoft.gxapi.IPJ_CSP_POINTLAT_2 :annotation: .. autoattribute:: geosoft.gxapi.IPJ_CSP_POINTLAT_2 .. autodata:: geosoft.gxapi.IPJ_CSP_POINTLON_2 :annotation: .. autoattribute:: geosoft.gxapi.IPJ_CSP_POINTLON_2 .. _IPJ_NAME: IPJ_NAME constants ----------------------------------------------------------------------- Project Name .. autodata:: geosoft.gxapi.IPJ_NAME_PCS :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_PCS .. autodata:: geosoft.gxapi.IPJ_NAME_PROJECTION :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_PROJECTION .. autodata:: geosoft.gxapi.IPJ_NAME_METHOD :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_METHOD .. autodata:: geosoft.gxapi.IPJ_NAME_DATUM :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_DATUM .. autodata:: geosoft.gxapi.IPJ_NAME_ELLIPSOID :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_ELLIPSOID .. autodata:: geosoft.gxapi.IPJ_NAME_LDATUM :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_LDATUM .. autodata:: geosoft.gxapi.IPJ_NAME_UNIT_ABBR :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_UNIT_ABBR .. autodata:: geosoft.gxapi.IPJ_NAME_UNIT_FULL :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_UNIT_FULL .. autodata:: geosoft.gxapi.IPJ_NAME_TYPE :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_TYPE .. autodata:: geosoft.gxapi.IPJ_NAME_LLDATUM :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_LLDATUM .. autodata:: geosoft.gxapi.IPJ_NAME_METHOD_PARMS :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_METHOD_PARMS .. autodata:: geosoft.gxapi.IPJ_NAME_METHOD_LABEL :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_METHOD_LABEL .. autodata:: geosoft.gxapi.IPJ_NAME_DATUM_PARMS :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_DATUM_PARMS .. autodata:: geosoft.gxapi.IPJ_NAME_LDATUM_PARMS :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_LDATUM_PARMS .. autodata:: geosoft.gxapi.IPJ_NAME_GEOID :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_GEOID .. autodata:: geosoft.gxapi.IPJ_NAME_LDATUMDESCRIPTION :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_LDATUMDESCRIPTION .. autodata:: geosoft.gxapi.IPJ_NAME_METHOD_PARMS_NATIVE :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_METHOD_PARMS_NATIVE .. autodata:: geosoft.gxapi.IPJ_NAME_ORIENTATION_PARMS :annotation: .. autoattribute:: geosoft.gxapi.IPJ_NAME_ORIENTATION_PARMS .. _IPJ_ORIENT: IPJ_ORIENT constants ----------------------------------------------------------------------- Projection Orientation .. autodata:: geosoft.gxapi.IPJ_ORIENT_DEFAULT :annotation: .. autoattribute:: geosoft.gxapi.IPJ_ORIENT_DEFAULT .. autodata:: geosoft.gxapi.IPJ_ORIENT_PLAN :annotation: .. autoattribute:: geosoft.gxapi.IPJ_ORIENT_PLAN .. autodata:: geosoft.gxapi.IPJ_ORIENT_SECTION :annotation: .. autoattribute:: geosoft.gxapi.IPJ_ORIENT_SECTION .. autodata:: geosoft.gxapi.IPJ_ORIENT_SECTION_NORMAL :annotation: .. autoattribute:: geosoft.gxapi.IPJ_ORIENT_SECTION_NORMAL .. autodata:: geosoft.gxapi.IPJ_ORIENT_DEPTH_SECTION :annotation: .. autoattribute:: geosoft.gxapi.IPJ_ORIENT_DEPTH_SECTION .. autodata:: geosoft.gxapi.IPJ_ORIENT_3D :annotation: .. autoattribute:: geosoft.gxapi.IPJ_ORIENT_3D .. autodata:: geosoft.gxapi.IPJ_ORIENT_3D_MATRIX :annotation: .. autoattribute:: geosoft.gxapi.IPJ_ORIENT_3D_MATRIX .. autodata:: geosoft.gxapi.IPJ_ORIENT_SECTION_CROOKED :annotation: .. autoattribute:: geosoft.gxapi.IPJ_ORIENT_SECTION_CROOKED .. _IPJ_PARM_LST: IPJ_PARM_LST constants ----------------------------------------------------------------------- Projection List .. autodata:: geosoft.gxapi.IPJ_PARM_LST_COORDINATESYSTEM :annotation: .. autoattribute:: geosoft.gxapi.IPJ_PARM_LST_COORDINATESYSTEM .. autodata:: geosoft.gxapi.IPJ_PARM_LST_DATUM :annotation: .. autoattribute:: geosoft.gxapi.IPJ_PARM_LST_DATUM .. autodata:: geosoft.gxapi.IPJ_PARM_LST_PROJECTION :annotation: .. autoattribute:: geosoft.gxapi.IPJ_PARM_LST_PROJECTION .. autodata:: geosoft.gxapi.IPJ_PARM_LST_UNITS :annotation: .. autoattribute:: geosoft.gxapi.IPJ_PARM_LST_UNITS .. autodata:: geosoft.gxapi.IPJ_PARM_LST_LOCALDATUMDESCRIPTION :annotation: .. autoattribute:: geosoft.gxapi.IPJ_PARM_LST_LOCALDATUMDESCRIPTION .. autodata:: geosoft.gxapi.IPJ_PARM_LST_LOCALDATUMNAME :annotation: .. autoattribute:: geosoft.gxapi.IPJ_PARM_LST_LOCALDATUMNAME .. autodata:: geosoft.gxapi.IPJ_PARM_LST_UNITSDESCRIPTION :annotation: .. autoattribute:: geosoft.gxapi.IPJ_PARM_LST_UNITSDESCRIPTION .. _IPJ_TYPE: IPJ_TYPE constants ----------------------------------------------------------------------- `GXIPJ <geosoft.gxapi.GXIPJ>` Types .. autodata:: geosoft.gxapi.IPJ_TYPE_PRJ :annotation: .. autoattribute:: geosoft.gxapi.IPJ_TYPE_PRJ .. autodata:: geosoft.gxapi.IPJ_TYPE_PCS :annotation: .. autoattribute:: geosoft.gxapi.IPJ_TYPE_PCS .. autodata:: geosoft.gxapi.IPJ_TYPE_GCS :annotation: .. autoattribute:: geosoft.gxapi.IPJ_TYPE_GCS .. autodata:: geosoft.gxapi.IPJ_TYPE_ANY :annotation: .. autoattribute:: geosoft.gxapi.IPJ_TYPE_ANY .. autodata:: geosoft.gxapi.IPJ_TYPE_NONE :annotation: .. autoattribute:: geosoft.gxapi.IPJ_TYPE_NONE .. autodata:: geosoft.gxapi.IPJ_TYPE_WRP :annotation: .. autoattribute:: geosoft.gxapi.IPJ_TYPE_WRP .. autodata:: geosoft.gxapi.IPJ_TYPE_TEST :annotation: .. autoattribute:: geosoft.gxapi.IPJ_TYPE_TEST .. _IPJ_UNIT: IPJ_UNIT constants ----------------------------------------------------------------------- Projection Unit Type .. autodata:: geosoft.gxapi.IPJ_UNIT_ABBREVIATION :annotation: .. autoattribute:: geosoft.gxapi.IPJ_UNIT_ABBREVIATION .. autodata:: geosoft.gxapi.IPJ_UNIT_FULLNAME :annotation: .. autoattribute:: geosoft.gxapi.IPJ_UNIT_FULLNAME .. _IPJ_WARP: IPJ_WARP constants ----------------------------------------------------------------------- Warp (Transformation) type .. autodata:: geosoft.gxapi.IPJ_WARP_MATRIX :annotation: .. autoattribute:: geosoft.gxapi.IPJ_WARP_MATRIX .. autodata:: geosoft.gxapi.IPJ_WARP_NONE :annotation: .. autoattribute:: geosoft.gxapi.IPJ_WARP_NONE .. autodata:: geosoft.gxapi.IPJ_WARP_TRANS1 :annotation: .. autoattribute:: geosoft.gxapi.IPJ_WARP_TRANS1 .. autodata:: geosoft.gxapi.IPJ_WARP_TRANS2 :annotation: .. autoattribute:: geosoft.gxapi.IPJ_WARP_TRANS2 .. autodata:: geosoft.gxapi.IPJ_WARP_TRANS3 :annotation: .. autoattribute:: geosoft.gxapi.IPJ_WARP_TRANS3 .. autodata:: geosoft.gxapi.IPJ_WARP_QUAD :annotation: .. autoattribute:: geosoft.gxapi.IPJ_WARP_QUAD .. autodata:: geosoft.gxapi.IPJ_WARP_MULTIPOINT :annotation: .. autoattribute:: geosoft.gxapi.IPJ_WARP_MULTIPOINT .. autodata:: geosoft.gxapi.IPJ_WARP_LOG :annotation: .. autoattribute:: geosoft.gxapi.IPJ_WARP_LOG .. autodata:: geosoft.gxapi.IPJ_WARP_MULTIPOINT_Y :annotation: .. autoattribute:: geosoft.gxapi.IPJ_WARP_MULTIPOINT_Y <file_sep>/examples/tutorial/3D Views/get_data_files.py import geosoft.gxpy.gx as gx import geosoft.gxpy.utility as gxu gxc = gx.GXpy() url = 'https://github.com/GeosoftInc/gxpy/raw/9.3/examples/tutorial/3D%20Views/' gxu.url_retrieve(url + 'Wittichica Creek Residual Total Field.grd') gxu.url_retrieve(url + 'Wittichica Creek Residual Total Field.grd.gi') gxu.url_retrieve(url + 'Wittichica Creek Residual Total Field.grd.xml') gxu.url_retrieve(url + 'Wittichica DEM.grd') gxu.url_retrieve(url + 'Wittichica DEM.grd.gi') gxu.url_retrieve(url + 'Wittichica DEM.grd.xml')<file_sep>/docs/GXRGRD.rst .. _GXRGRD: GXRGRD class ================================== .. autoclass:: geosoft.gxapi.GXRGRD :members: <file_sep>/docs/GXKML.rst .. _GXKML: GXKML class ================================== .. autoclass:: geosoft.gxapi.GXKML :members: .. _KML_ALT: KML_ALT constants ----------------------------------------------------------------------- KML Altitude values. .. autodata:: geosoft.gxapi.KML_ALT_CLAMPTOGROUND :annotation: .. autoattribute:: geosoft.gxapi.KML_ALT_CLAMPTOGROUND .. autodata:: geosoft.gxapi.KML_ALT_RELATIVETOGROUND :annotation: .. autoattribute:: geosoft.gxapi.KML_ALT_RELATIVETOGROUND .. autodata:: geosoft.gxapi.KML_ALT_ABSOLUTE :annotation: .. autoattribute:: geosoft.gxapi.KML_ALT_ABSOLUTE <file_sep>/docs/GXSTK.rst .. _GXSTK: GXSTK class ================================== .. autoclass:: geosoft.gxapi.GXSTK :members: .. _STK_AXIS: STK_AXIS constants ----------------------------------------------------------------------- `GXSTK <geosoft.gxapi.GXSTK>` Axis defines .. autodata:: geosoft.gxapi.STK_AXIS_X :annotation: .. autoattribute:: geosoft.gxapi.STK_AXIS_X .. autodata:: geosoft.gxapi.STK_AXIS_Y :annotation: .. autoattribute:: geosoft.gxapi.STK_AXIS_Y .. _STK_AXIS_POS: STK_AXIS_POS constants ----------------------------------------------------------------------- `GXSTK <geosoft.gxapi.GXSTK>` Axis defines. Use with STK_AXIS_X and STK_AXIS_Y .. autodata:: geosoft.gxapi.STK_AXIS_NONE :annotation: .. autoattribute:: geosoft.gxapi.STK_AXIS_NONE .. autodata:: geosoft.gxapi.STK_AXIS_LEFT :annotation: .. autoattribute:: geosoft.gxapi.STK_AXIS_LEFT .. autodata:: geosoft.gxapi.STK_AXIS_RIGHT :annotation: .. autoattribute:: geosoft.gxapi.STK_AXIS_RIGHT .. autodata:: geosoft.gxapi.STK_AXIS_BOTH :annotation: .. autoattribute:: geosoft.gxapi.STK_AXIS_BOTH .. autodata:: geosoft.gxapi.STK_AXIS_BOTTOM :annotation: .. autoattribute:: geosoft.gxapi.STK_AXIS_BOTTOM .. autodata:: geosoft.gxapi.STK_AXIS_TOP :annotation: .. autoattribute:: geosoft.gxapi.STK_AXIS_TOP .. _STK_FLAG: STK_FLAG constants ----------------------------------------------------------------------- Stack flags .. autodata:: geosoft.gxapi.STK_FLAG_PROFILE :annotation: .. autoattribute:: geosoft.gxapi.STK_FLAG_PROFILE .. autodata:: geosoft.gxapi.STK_FLAG_FID :annotation: .. autoattribute:: geosoft.gxapi.STK_FLAG_FID .. autodata:: geosoft.gxapi.STK_FLAG_SYMBOL :annotation: .. autoattribute:: geosoft.gxapi.STK_FLAG_SYMBOL .. autodata:: geosoft.gxapi.STK_FLAG_XBAR :annotation: .. autoattribute:: geosoft.gxapi.STK_FLAG_XBAR .. autodata:: geosoft.gxapi.STK_FLAG_XLABEL :annotation: .. autoattribute:: geosoft.gxapi.STK_FLAG_XLABEL .. autodata:: geosoft.gxapi.STK_FLAG_XTITLE :annotation: .. autoattribute:: geosoft.gxapi.STK_FLAG_XTITLE .. autodata:: geosoft.gxapi.STK_FLAG_YBAR :annotation: .. autoattribute:: geosoft.gxapi.STK_FLAG_YBAR .. autodata:: geosoft.gxapi.STK_FLAG_YLABEL :annotation: .. autoattribute:: geosoft.gxapi.STK_FLAG_YLABEL .. autodata:: geosoft.gxapi.STK_FLAG_YTITLE :annotation: .. autoattribute:: geosoft.gxapi.STK_FLAG_YTITLE .. autodata:: geosoft.gxapi.STK_FLAG_GRID1 :annotation: .. autoattribute:: geosoft.gxapi.STK_FLAG_GRID1 .. autodata:: geosoft.gxapi.STK_FLAG_GRID2 :annotation: .. autoattribute:: geosoft.gxapi.STK_FLAG_GRID2 .. _STK_GRID: STK_GRID constants ----------------------------------------------------------------------- Stack Grid define .. autodata:: geosoft.gxapi.STK_GRID_PRIMARY :annotation: .. autoattribute:: geosoft.gxapi.STK_GRID_PRIMARY .. autodata:: geosoft.gxapi.STK_GRID_SECONDARY :annotation: .. autoattribute:: geosoft.gxapi.STK_GRID_SECONDARY <file_sep>/examples/tutorial/3D Views/tmi_as_3d_relief.py import geosoft.gxpy.gx as gx import geosoft.gxpy.view as gxview import geosoft.gxpy.group as gxgroup import geosoft.gxpy.agg as gxagg import geosoft.gxpy.grid as gxgrd import geosoft.gxpy.viewer as gxviewer gxc = gx.GXpy() grid_file = 'Wittichica Creek Residual Total Field.grd' # create a 3D view with gxview.View_3d.new("TMI in relief", area_2d=gxgrd.Grid.open(grid_file).extent_2d(), coordinate_system=gxgrd.Grid.open(grid_file).coordinate_system, overwrite=True) as v: v3d_name = v.file_name # use the data grid as the relief surface v.set_plane_relief_surface(grid_file) # add the grid image to the view, with shading, 20 nT contour interval to match default contour lines gxgroup.Aggregate_group.new(v, gxagg.Aggregate_image.new(grid_file, shade=True, contour=20)) gxgroup.contour(v, 'TMI_contour', grid_file) # display the map in a Geosoft viewer gxviewer.view_document(v3d_name, wait_for_close=False) <file_sep>/examples/tutorial/Grids and Images/grid_print.py import geosoft.gxpy as gxpy gxc = gxpy.gx.GXpy() with gxpy.grid.Grid.open('elevation_surfer.grd(SRF;VER=V7)') as grid: print('coordinate_system: ', grid.coordinate_system) for x, y, z, v in grid: if v is not None: print(x, y, z, v) <file_sep>/geosoft/gxapi/GXFFT.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXFFT(gxapi_cy.WrapFFT): """ GXFFT class. This class allows for the application of predefined filters to data in an OASIS database. The system uses the Winograd algorithm to transform data in the spatial domain to the wavenumber or Fourier domain. """ def __init__(self, handle=0): super(GXFFT, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXFFT <geosoft.gxapi.GXFFT>` :returns: A null `GXFFT <geosoft.gxapi.GXFFT>` :rtype: GXFFT """ return GXFFT() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def add_white_noise(self, amp, option): """ Add white noise to the power spectrum of an FFT object. :param amp: The value added to the real part of all non-DC components of the current power spectrum :param option: :ref:`FFT_WHITE_NOISE` :type amp: float :type option: int .. versionadded:: 9.9 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._add_white_noise(amp, option) def app_dens(self, thick, dens): """ Appparent density filter :param thick: Thickness (meters) of the earth model :param dens: Background density (g/cm3) (default = 0) :type thick: float :type dens: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._app_dens(thick, dens) def app_susc(self, strength): """ Apparent susceptiblity filter :param strength: Total magnetic field strength :type strength: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Reduction to magnetic pole (`red_pol <geosoft.gxapi.GXFFT.red_pol>`) and downward continuation (`contin <geosoft.gxapi.GXFFT.contin>`) should be called BEFORE using `app_susc <geosoft.gxapi.GXFFT.app_susc>`. """ self._app_susc(strength) def band_pass(self, llen, hlen, pass_defined): """ Bandpass filter (using low and high wavelength cutoffs) :param llen: Low Cutoff wavelength (meters) :param hlen: High Cutoff wavelength (meter) :param pass_defined: 1= Pass the defined band (default); 0= Reject the band :type llen: float :type hlen: float :type pass_defined: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._band_pass(llen, hlen, pass_defined) def b_worth(self, clen, degree, filter_type): """ Butterworth filter :param clen: Central cutoff wavelength (meter) :param degree: Degree of the filter function (default = 8.0) :param filter_type: Filter type: 1= Low-pass (regional) filter (default) 0= High-pass (residual) filter :type clen: float :type degree: float :type filter_type: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._b_worth(clen, degree, filter_type) def rc_filter(self, clen, filter_type): """ RC filter :param clen: Central cutoff wavelength (meter) :param filter_type: Filter type: 1= Low-pass (regional) filter (default) 0= High-pass (residual) filter :type clen: float :type filter_type: int .. versionadded:: 8.5 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._rc_filter(clen, filter_type) def contin(self, dist): """ Upward/Downward continuation filter :param dist: Distance to continue; positive = downwards negative = upwards :type dist: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._contin(dist) def cos_roll(self, llen, hlen, degree, type): """ Cosine roll-off filter :param llen: Low wavelength start point (meters) :param hlen: High wavelength end point (meters) :param degree: Degree of the filter function (default = 2.0) :param type: Filter type: 1= Low-pass (regional) filter (default) 0= High-pass (residual) filter :type llen: float :type hlen: float :type degree: float :type type: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._cos_roll(llen, hlen, degree, type) @classmethod def create(cls, gvv, interv, trend): """ Create a New `GXFFT <geosoft.gxapi.GXFFT>` with detrend options. :param gvv: `GXVV <geosoft.gxapi.GXVV>` to transform. :param interv: Element space interval :param trend: :ref:`FFT_DETREND` :type gvv: GXVV :type interv: float :type trend: int :returns: `GXFFT <geosoft.gxapi.GXFFT>` Object :rtype: GXFFT .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The detrending options control the removal of a trend from the data before the `GXFFT <geosoft.gxapi.GXFFT>` is applied. The default data expansion is 10% before `GXFFT <geosoft.gxapi.GXFFT>`. """ ret_val = gxapi_cy.WrapFFT._create(GXContext._get_tls_geo(), gvv, interv, trend) return GXFFT(ret_val) @classmethod def create_ex(cls, gvv, interv, trend, expansion): """ Create a New `GXFFT <geosoft.gxapi.GXFFT>` with detrend and expansion options. :param gvv: `GXVV <geosoft.gxapi.GXVV>` to transform. :param interv: Element space interval :param trend: :ref:`FFT_DETREND` :param expansion: Minimum expansion % :type gvv: GXVV :type interv: float :type trend: int :type expansion: float :returns: `GXFFT <geosoft.gxapi.GXFFT>` Object :rtype: GXFFT .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The detrending options control the removal of a trend from the data before the `GXFFT <geosoft.gxapi.GXFFT>` is applied. The expansion options control the minimum data expansion before the `GXFFT <geosoft.gxapi.GXFFT>` is applied. """ ret_val = gxapi_cy.WrapFFT._create_ex(GXContext._get_tls_geo(), gvv, interv, trend, expansion) return GXFFT(ret_val) @classmethod def create_ref(cls, gvv, interv, trend): """ Create `GXFFT <geosoft.gxapi.GXFFT>` object with detrend options from reference (original) channel, but no `GXFFT <geosoft.gxapi.GXFFT>` process. :param gvv: `GXVV <geosoft.gxapi.GXVV>` contains channel data to perform `GXFFT <geosoft.gxapi.GXFFT>` operations upon. :param interv: Element space interval, should be the same as in `create_ex <geosoft.gxapi.GXFFT.create_ex>` call :param trend: :ref:`FFT_DETREND` :type gvv: GXVV :type interv: float :type trend: int :returns: `GXFFT <geosoft.gxapi.GXFFT>` Object :rtype: GXFFT .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** This just creates an object. It is intended to be called immediately after with `set_vv <geosoft.gxapi.GXFFT.set_vv>`. """ ret_val = gxapi_cy.WrapFFT._create_ref(GXContext._get_tls_geo(), gvv, interv, trend) return GXFFT(ret_val) @classmethod def create_ref_ex(cls, gvv, interv, trend, expansion, d_cmult): """ Create `GXFFT <geosoft.gxapi.GXFFT>` object with detrend and expansion options from reference (original) channel, but no `GXFFT <geosoft.gxapi.GXFFT>` process. :param gvv: `GXVV <geosoft.gxapi.GXVV>` contains channel data to perform `GXFFT <geosoft.gxapi.GXFFT>` operations upon. :param interv: Element space interval, should be the same as in `create_ex <geosoft.gxapi.GXFFT.create_ex>` call :param trend: :ref:`FFT_DETREND` :param expansion: Minimum expansion %, should be the same as in `create_ex <geosoft.gxapi.GXFFT.create_ex>` call :param d_cmult: DC level multiple :type gvv: GXVV :type interv: float :type trend: int :type expansion: float :type d_cmult: float :returns: `GXFFT <geosoft.gxapi.GXFFT>` Object :rtype: GXFFT .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** This just creates an object. It is intended to be called immediately after with `set_vv <geosoft.gxapi.GXFFT.set_vv>`. """ ret_val = gxapi_cy.WrapFFT._create_ref_ex(GXContext._get_tls_geo(), gvv, interv, trend, expansion, d_cmult) return GXFFT(ret_val) def gaus(self, dev, type): """ Gaussian filter :param dev: Standard deviation cutoff of function (meters) :param type: Filter type: 1= Low-pass (residual) filter (default) 0= High-pass (regional) filter :type dev: float :type type: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._gaus(dev, type) def get_vv(self, gv_vr, gv_vi): """ Copies real and imaginary `GXVV <geosoft.gxapi.GXVV>`'s to user `GXVV <geosoft.gxapi.GXVV>`'s. :param gv_vr: Real component :param gv_vi: Imaginary component :type gv_vr: GXVV :type gv_vi: GXVV .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._get_vv(gv_vr, gv_vi) def h_drv(self, order): """ Horizontal derivative :param order: Order of differentiation (default = 1) :type order: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._h_drv(order) def high_pass(self, wlen, fid_int): """ High bandpass filter :param wlen: Cutoff wavelength (meter) :param fid_int: Fiducial increment of the `GXFFT <geosoft.gxapi.GXFFT>`'s channel data :type wlen: float :type fid_int: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._high_pass(wlen, fid_int) def h_int(self): """ Horizontal integration .. versionadded:: 5.1.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._h_int() def inverse(self, gvv, gv_vm): """ Inverse the `GXFFT <geosoft.gxapi.GXFFT>` from wave number domain to space domain :param gvv: Output `GXVV <geosoft.gxapi.GXVV>` :param gv_vm: Original `GXVV <geosoft.gxapi.GXVV>` which was used to create `GXFFT <geosoft.gxapi.GXFFT>` (will be used as mask for output `GXVV <geosoft.gxapi.GXVV>`; no masking if this parameter is NULL) :type gvv: GXVV :type gv_vm: GXVV .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._inverse(gvv, gv_vm) def low_pass(self, wlen): """ Low bandpass filter :param wlen: Cutoff wavelength (meters) :type wlen: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._low_pass(wlen) def red_pol(self, inc, dec, incp, dir): """ Reduction to magnetic pole :param inc: Geomagnetic inclination (degrees) :param dec: Geomagnetic declination (degrees) :param incp: Inclination (degrees) for amplitude correction (default = 20.0) :param dir: Direction (degrees) of Line from North :type inc: float :type dec: float :type incp: float :type dir: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._red_pol(inc, dec, incp, dir) def nyquist(self): """ Gets the Nyquist frequency (wavenumbers/sample unit). :returns: Nyquist frequency (wavenumbers/sample unit). :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._nyquist() return ret_val def samp_incr(self): """ Gets the original sample increment. :returns: Original sample increment. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._samp_incr() return ret_val def wave_incr(self): """ Get the wave number increment. :returns: Wave number increment :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._wave_incr() return ret_val def set_vv(self, gv_vr, gv_vi): """ Sets real and imaginary VVs in `GXFFT <geosoft.gxapi.GXFFT>`. :param gv_vr: Real component :param gv_vi: Imaginary component :type gv_vr: GXVV :type gv_vi: GXVV .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The `GXVV <geosoft.gxapi.GXVV>` must have been obtained from the same `GXFFT <geosoft.gxapi.GXFFT>` using the `set_vv <geosoft.gxapi.GXFFT.set_vv>` method. """ self._set_vv(gv_vr, gv_vi) def spectrum(self, gvv): """ Calculates a power spectrum :param gvv: Output power spectrum `GXVV <geosoft.gxapi.GXVV>` :type gvv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._spectrum(gvv) def v_drv(self, order): """ Vertical derivative :param order: Order of differentiation (default = 1) :type order: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._v_drv(order) def v_int(self): """ Vertical integration .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._v_int() def write_spectrum(self, gvv, out_file): """ Writes a power spectrum to a file :param gvv: Output power spectrum `GXVV <geosoft.gxapi.GXVV>` :param out_file: File name for output spectrum :type gvv: GXVV :type out_file: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._write_spectrum(gvv, out_file.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXWA.rst .. _GXWA: GXWA class ================================== .. autoclass:: geosoft.gxapi.GXWA :members: .. _WA_ENCODE: WA_ENCODE constants ----------------------------------------------------------------------- `GXWA <geosoft.gxapi.GXWA>` Encode defines .. autodata:: geosoft.gxapi.WA_ENCODE_ANSI :annotation: .. autoattribute:: geosoft.gxapi.WA_ENCODE_ANSI .. autodata:: geosoft.gxapi.WA_ENCODE_RAW :annotation: .. autoattribute:: geosoft.gxapi.WA_ENCODE_RAW .. autodata:: geosoft.gxapi.WA_ENCODE_UTF8 :annotation: .. autoattribute:: geosoft.gxapi.WA_ENCODE_UTF8 .. autodata:: geosoft.gxapi.WA_ENCODE_UTF8_NOHEADER :annotation: .. autoattribute:: geosoft.gxapi.WA_ENCODE_UTF8_NOHEADER .. autodata:: geosoft.gxapi.WA_ENCODE_UTF16_NOHEADER :annotation: .. autoattribute:: geosoft.gxapi.WA_ENCODE_UTF16_NOHEADER .. _WA_OPEN: WA_OPEN constants ----------------------------------------------------------------------- `GXWA <geosoft.gxapi.GXWA>` Open defines .. autodata:: geosoft.gxapi.WA_NEW :annotation: .. autoattribute:: geosoft.gxapi.WA_NEW .. autodata:: geosoft.gxapi.WA_APPEND :annotation: .. autoattribute:: geosoft.gxapi.WA_APPEND <file_sep>/examples/tutorial/2D Views and Maps/grid_map.py import geosoft.gxpy.gx as gx import geosoft.gxpy.map as gxmap import geosoft.gxpy.view as gxview import geosoft.gxpy.group as gxgroup import geosoft.gxpy.agg as gxagg import geosoft.gxpy.grid as gxgrd import geosoft.gxpy.viewer as gxviewer gxc = gx.GXpy() # create a map from the grid coordinate system and extent with gxgrd.Grid('Wittichica Creek Residual Total Field.grd') as grd: grid_file_name = grd.file_name_decorated # create a map for this grid on A4 media, scale to fit the extent with gxmap.Map.new('Wittichica residual TMI', data_area=grd.extent_2d(), media="A4", margins=(1, 3.5, 3, 1), coordinate_system=grd.coordinate_system, overwrite=True) as gmap: map_file_name = gmap.file_name # draw into the views on the map. We are reopening the map as the Aggregate class only works with a closed grid. with gxmap.Map.open(map_file_name) as gmap: # work with the data view with gxview.View.open(gmap, "data") as v: # add the grid image to the view with gxagg.Aggregate_image.new(grid_file_name) as agg: gxgroup.Aggregate_group.new(v, agg) # display the map in a Geosoft viewer gxviewer.view_document(map_file_name, wait_for_close=False) <file_sep>/geosoft/gxapi/GXTC.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXTC(gxapi_cy.WrapTC): """ GXTC class. The `GXTC <geosoft.gxapi.GXTC>` object is used in gravitational modelling to create a terrain correction grid from a topography grid. This is accomplished with a call first to `grregter <geosoft.gxapi.GXTC.grregter>`, which determines the terrain correction from an input topography grid, then to `grterain <geosoft.gxapi.GXTC.grterain>`, which calculates the actual corrections at the input positions. """ def __init__(self, handle=0): super(GXTC, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXTC <geosoft.gxapi.GXTC>` :returns: A null `GXTC <geosoft.gxapi.GXTC>` :rtype: GXTC """ return GXTC() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, img, elev_unit, dinner, douter, dens_t, dens_w, elev_w, edge, edge_elev, opt): """ Creates a Terrain Correction object :param img: Topo (DEM) grid :param elev_unit: Elevation unit in 1 metre (i.e. 0.3048 for feet) :param dinner: Inner distance (in topo grid projection units, default in metres) :param douter: Outer distance (in topo grid projection units, default in metres) :param dens_t: Terrain density in g/cc :param dens_w: Water density in g/cc :param elev_w: Water reference elevation (in elevation unit) :param edge: 1 to calculate an edge correction (compensation), 0 otherwise :param edge_elev: Average elevation beyond max distance (in elevation unit) :param opt: :ref:`TC_OPT` :type img: GXIMG :type elev_unit: float :type dinner: float :type douter: float :type dens_t: float :type dens_w: float :type elev_w: float :type edge: int :type edge_elev: float :type opt: int :returns: `GXTC <geosoft.gxapi.GXTC>` Object :rtype: GXTC .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapTC._create(GXContext._get_tls_geo(), img, elev_unit, dinner, douter, dens_t, dens_w, elev_w, edge, edge_elev, opt) return GXTC(ret_val) @classmethod def create_ex(cls, img, elev_unit, dinner, douter, dens_t, dens_w, elev_w, edge, edge_elev, opt, survey_type): """ Creates a Terrain Correction object with surveytype :param img: Topo (DEM) grid :param elev_unit: Elevation unit in 1 metre (i.e. 0.3048 for feet) :param dinner: Inner distance (in topo grid projection units, default in metres) :param douter: Outer distance (in topo grid projection units, default in metres) :param dens_t: Terrain density in g/cc :param dens_w: Water density in g/cc :param elev_w: Water reference elevation (in elevation unit) :param edge: 1 to calculate an edge correction (compensation), 0 otherwise :param edge_elev: Average elevation beyond max distance (in elevation unit) :param opt: :ref:`TC_OPT` :param survey_type: :ref:`TC_SURVEYTYPE` :type img: GXIMG :type elev_unit: float :type dinner: float :type douter: float :type dens_t: float :type dens_w: float :type elev_w: float :type edge: int :type edge_elev: float :type opt: int :type survey_type: int :returns: `GXTC <geosoft.gxapi.GXTC>` Object :rtype: GXTC .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapTC._create_ex(GXContext._get_tls_geo(), img, elev_unit, dinner, douter, dens_t, dens_w, elev_w, edge, edge_elev, opt, survey_type) return GXTC(ret_val) @classmethod def create_ex2(cls, img, elev_unit, dinner, douter, dens_t, dens_w, elev_w, edge, edge_elev, opt, survey_type, img2): """ Creates a Terrain Correction object with surveytype and topo surface elevation grid :param img: Topo (Rock Surface DEM) grid :param elev_unit: Elevation unit in 1 metre (i.e. 0.3048 for feet) :param dinner: Inner distance (in topo grid projection units, default in metres) :param douter: Outer distance (in topo grid projection units, default in metres) :param dens_t: Terrain density in g/cc :param dens_w: Water density in g/cc :param elev_w: Water reference elevation (in elevation unit) :param edge: 1 to calculate an edge correction (compensation), 0 otherwise :param edge_elev: Average elevation beyond max distance (in elevation unit) :param opt: :ref:`TC_OPT` :param survey_type: :ref:`TC_SURVEYTYPE` :param img2: Topo (Elev Surface DEM) grid :type img: GXIMG :type elev_unit: float :type dinner: float :type douter: float :type dens_t: float :type dens_w: float :type elev_w: float :type edge: int :type edge_elev: float :type opt: int :type survey_type: int :type img2: GXIMG :returns: `GXTC <geosoft.gxapi.GXTC>` Object :rtype: GXTC .. versionadded:: 9.9 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapTC._create_ex2(GXContext._get_tls_geo(), img, elev_unit, dinner, douter, dens_t, dens_w, elev_w, edge, edge_elev, opt, survey_type, img2) return GXTC(ret_val) def grregter(self, im_gi, im_go): """ Create a terrain correction grid for a topo grid. :param im_gi: Input `GXIMG <geosoft.gxapi.GXIMG>` (local DEM topo grid used for station elevation) :param im_go: Image of output grid :type im_gi: GXIMG :type im_go: GXIMG .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._grregter(im_gi, im_go) def grterain(self, gv_vx, gv_vy, gv_velev, gv_vslop, gv_vtcor, im_gcor, dens_t): """ Calculate terrain corrections. :param gv_vx: Input X channel data (in topo grid projection units, default in metres) :param gv_vy: Input Y channel data (in topo grid projection units, default in metres) :param gv_velev: Input Elevation channel data (in elevation unit) :param gv_vslop: Input slope channel data :param gv_vtcor: Output Terrain Corrected channel data :param im_gcor: Image of input correction grid :param dens_t: Terrain density (default 2.67) :type gv_vx: GXVV :type gv_vy: GXVV :type gv_velev: GXVV :type gv_vslop: GXVV :type gv_vtcor: GXVV :type im_gcor: GXIMG :type dens_t: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._grterain(gv_vx, gv_vy, gv_velev, gv_vslop, gv_vtcor, im_gcor, dens_t) def grterain2(self, gv_vx, gv_vy, gv_velev, gv_vslop, gv_vwater, gv_vtcor, im_gcor, dens_t): """ Calculate terrain corrections (work for marine gravity too). :param gv_vx: Input X channel data (in topo grid projection units, default in metres) :param gv_vy: Input Y channel data (in topo grid projection units, default in metres) :param gv_velev: Input Elevation channel data (in elevation unit) :param gv_vslop: Input slope channel data :param gv_vwater: Input Water depth channel data (in metres) :param gv_vtcor: Output Terrain Corrected channel data :param im_gcor: Image of input correction grid :param dens_t: Terrain density (default 2.67) :type gv_vx: GXVV :type gv_vy: GXVV :type gv_velev: GXVV :type gv_vslop: GXVV :type gv_vwater: GXVV :type gv_vtcor: GXVV :type im_gcor: GXIMG :type dens_t: float .. versionadded:: 6.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._grterain2(gv_vx, gv_vy, gv_velev, gv_vslop, gv_vwater, gv_vtcor, im_gcor, dens_t) def g_gterain(self, gv_vx, p3, p4, p5, p6, p7, p8): """ Calculate GG terrain corrections :param gv_vx: Input X channel data (in topo grid projection units, default in metres) :param p3: Input Y channel data (in topo grid projection units, default in metres) :param p4: Input Elevation channel data (in elevation unit) :param p5: Output Terrain Corrected channel data :param p6: Terrain density (default 2.67) :param p7: Terrain reference level (default 0.0) :param p8: :ref:`GG_ELEMENT` :type gv_vx: GXVV :type p3: GXVV :type p4: GXVV :type p5: GXVV :type p6: float :type p7: float :type p8: int .. versionadded:: 6.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._g_gterain(gv_vx, p3, p4, p5, p6, p7, p8) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXHXYZ.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXHXYZ(gxapi_cy.WrapHXYZ): """ GXHXYZ class. High Performance Data Point Storage. This is used to put Point data on a DAP server. It is compressed and uses a Quad-Tree design to allow very high speed data extraction. It is also multi-threaded. """ def __init__(self, handle=0): super(GXHXYZ, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXHXYZ <geosoft.gxapi.GXHXYZ>` :returns: A null `GXHXYZ <geosoft.gxapi.GXHXYZ>` :rtype: GXHXYZ """ return GXHXYZ() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, name): """ Create a handle to an `GXHXYZ <geosoft.gxapi.GXHXYZ>` object :param name: File Name :type name: str :returns: `GXHXYZ <geosoft.gxapi.GXHXYZ>` Object :rtype: GXHXYZ .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapHXYZ._create(GXContext._get_tls_geo(), name.encode()) return GXHXYZ(ret_val) def get_meta(self, meta): """ Get the metadata of a grid. :param meta: `GXMETA <geosoft.gxapi.GXMETA>` object to save `GXHXYZ <geosoft.gxapi.GXHXYZ>`'s meta to :type meta: GXMETA .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_meta(meta) @classmethod def h_create_db(cls, db, gvv, name): """ Make an `GXHXYZ <geosoft.gxapi.GXHXYZ>` from GDB :param db: `GXDB <geosoft.gxapi.GXDB>` handle :param gvv: `GXVV <geosoft.gxapi.GXVV>` of channels to export :param name: Name of `GXHXYZ <geosoft.gxapi.GXHXYZ>` object :type db: GXDB :type gvv: GXVV :type name: str :returns: `GXHXYZ <geosoft.gxapi.GXHXYZ>` object :rtype: GXHXYZ .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapHXYZ._h_create_db(GXContext._get_tls_geo(), db, gvv, name.encode()) return GXHXYZ(ret_val) @classmethod def h_create_sql(cls, templ, x, y, z, ipj, name): """ Make an `GXHXYZ <geosoft.gxapi.GXHXYZ>` from SQL Query :param templ: Template File Name :param x: X field name :param y: Y field name :param z: Z field name :param ipj: Projection of data values :param name: Name of `GXHXYZ <geosoft.gxapi.GXHXYZ>` object :type templ: str :type x: str :type y: str :type z: str :type ipj: GXIPJ :type name: str :returns: `GXHXYZ <geosoft.gxapi.GXHXYZ>` object :rtype: GXHXYZ .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapHXYZ._h_create_sql(GXContext._get_tls_geo(), templ.encode(), x.encode(), y.encode(), z.encode(), ipj, name.encode()) return GXHXYZ(ret_val) def set_meta(self, meta): """ Set the metadata of a grid. :param meta: `GXMETA <geosoft.gxapi.GXMETA>` object to add to `GXHXYZ <geosoft.gxapi.GXHXYZ>`'s meta :type meta: GXMETA .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_meta(meta) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXDOCU.rst .. _GXDOCU: GXDOCU class ================================== .. autoclass:: geosoft.gxapi.GXDOCU :members: .. _DOCU_OPEN: DOCU_OPEN constants ----------------------------------------------------------------------- How to open document .. autodata:: geosoft.gxapi.DOCU_OPEN_VIEW :annotation: .. autoattribute:: geosoft.gxapi.DOCU_OPEN_VIEW .. autodata:: geosoft.gxapi.DOCU_OPEN_EDIT :annotation: .. autoattribute:: geosoft.gxapi.DOCU_OPEN_EDIT <file_sep>/geosoft/gxpy/utility.py """ Utility functions to support Geosoft Python scripts and modules. .. note:: Regression tests provide usage examples: `Tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_utility.py>`_ """ import math import decimal import os import sys import numpy as np import uuid as uid import json import datetime import time import subprocess import binascii from time import gmtime, strftime from ._jdcal.jdcal import is_leap, gcal2jd, jd2gcal from distutils.version import StrictVersion from collections import OrderedDict from ._xmltodict import xmltodict import urllib.request import geosoft import geosoft.gxapi as gxapi __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) # cached lookup tables _dummy_map = {} _gx2np_type = {} _np2gx_type = {} # Assign to valid path to override the Geosoft temporary file folder _temp_folder_override = None # Assign callable to override unique ID generation _uuid_callable = None # Global deterministic uuid counter d_uuid_count = 1 # check valid group/parameter string def _validate_parameter(s): if '.' in s: return False return True class UtilityException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.utility`. .. versionadded:: 9.1 """ pass def check_version(v, raise_on_fail=True): """ Check the minimum API version. :param v: minimum version string required (ie "9.1" or "9.2.4") :param raise_on_fail: if True, raises an error if the version check fails, returns False otherwise :returns: True if version is OK, False otherwise(unless raise_on_fail is False) .. note:: A valid version consists of two or three dot-separated numeric components, with an optional development version tag on the end. The development version tag consists of the letter 'b' (for beta) followed by a number. If the numeric components of two version numbers are equal, then a development version will always be deemed earlier (lesser) than one without. The following are valid version numbers (shown in order of used for meeting minimum requirements): * 9.1 or 9.1.0 (they are equivalent) * 9.1.1b0 (development version) * 9.1.1 * 9.2 The following are examples of invalid version numbers: * 1 * 2.7.2.2 * 1.3.a4 * 1.3pl1 * 1.3c4 The rationale for this version numbering system is explained in the `distutils <https://docs.python.org/3/distutils/>`_ documentation. .. versionadded:: 9.1 """ try: if StrictVersion(__version__) >= StrictVersion(str(v)): return True else: if raise_on_fail: raise UtilityException(_t("GX Requires API {}, only {} installed.").format(v, __version__)) return False except ValueError: raise UtilityException(_t('Invalid version string "{}", expecting something like "{}".'.format(v, __version__))) def dict_from_lst(lst, ordered=False): """ Return a dictionary from a Geosoft `geosoft.gxapi.GXLST` instance. :param lst: `geosoft.gxapi.GXLST` instance :param ordered: True to return an OrderedDict :returns: python dictionary from a Geosoft GXLST .. versionadded:: 9.1 """ key = gxapi.str_ref() val = gxapi.str_ref() if ordered: dct = OrderedDict() else: dct = {} for item in range(lst.size()): lst.gt_item(0, item, key) lst.gt_item(1, item, val) dct[key.value] = val.value return dct def geosoft_xml_from_dict(d, pretty=True): """ Return a unicode XML string of a dictionary with geosoft namespace defined. :param d: dictionary :param pretty: True to indent with line-feeds for pretty-printing The tag xmlns="http://www.geosoft.com/schema/geo" is added to the root element. .. versionadded:: 9.4 """ if len(d) > 1: d = {'geosoft': d} return xml_from_dict(d, pretty, xmlns="http://www.geosoft.com/schema/geo") def xml_from_dict(d, pretty=True, xmlns=''): """ Return a unicode XML string of a dictionary. :param d: dictionary :param pretty: True to indent with line-feeds for pretty-printing :param xmlns: xml namespace string. xmlns="http://www.geosoft.com/schema/geo" for geosoft. If the dictionary does not have a single node root, the root XML will be '__gx_xml__'. Based on: https://github.com/martinblech/xmltodict .. seealso:: :func:`dict_from_xml` .. versionadded:: 9.2 .. versionchanged:: 9.4 added support for xmlns """ if not d: raise UtilityException(_t('Cannot create XML from an empty dictionary.')) if len(d) > 1: d = {'__gx_xml__': d} if xmlns: root = tuple(d)[0] d[root]['@xmlns'] = xmlns xml = xmltodict.unparse(d, pretty=pretty) return xml def dict_from_xml(xml): """ Return a dictionary of an xml string. :param xml: xml string :returns: dictionary of the XML content. If the XML root is '__gx_xml__', the root is stripped. Based on: https://github.com/martinblech/xmltodict Tag attributes will become keys with '@' as the first character, and the key value will be the attribute setting. For example, XML string: .. code:: <?xml version="1.0" encoding="UTF-8"?> <gmd:MD_Metadata xsi:schemaLocation="http://www.isotc211.org/2005/gmd ../schemas/iso19139fra/gmd/gmd.xsd"> <geosoft xmlns="http://www.geosoft.com/schema/geo"> <dataset version="1" beta="abc"> <title>test_grid_1</title> <file_format>Geosoft Grid</file_format> </dataset> </geosoft> </gmd:MD_Metadata> returns dictionary: .. code:: {'gmd:MD_Metadata': { '@xsi:schemaLocation': "http://www.isotc211.org/2005/gmd ../schemas/iso19139fra/gmd/gmd.xsd", 'geosoft': { '@xmlns': "http://www.geosoft.com/schema/geo", 'dataset': { '@beta': "abc", '@version': "1", 'title': "test_grid_1", 'file_format': "Geosoft Grid" } } } } .. seealso:: :func:`xml_from_dict` .. versionadded:: 9.2 """ d = xmltodict.parse(xml) # strip the generic dictionary root if '__gx_xml__' in d: d = d['__gx_xml__'] return d def merge_dict(d, d2): """ Update a dictionary by adding key-values from second dictionary. Unlike Python's update(), this adds new keys to nested dictionaries rather than replace everything in a nested dictionary. :param d: dictionary to update :param d2: new items to add or replace :return: merged dictionary .. versionadded:: 9.2 """ def update(old, new): for k, v in new.items(): if k not in old: old[k] = v else: if isinstance(v, dict): update(old[k], v) else: old[k] = v update(d, d2) return d def geosoft_metadata(geosoft_file_name): """ Get the metadata dictionary for a geosoft data file. :param geosoft_file_name: geosoft supported file name :returns: dictionary of the metadata If the metadata for the file does not exist {'metadata': {}} is returned. .. versionadded:: 9.2 """ metadata = None if geosoft_file_name: if not geosoft_file_name.lower().endswith('.xml'): geosoft_file_name = geosoft_file_name + '.xml' if os.path.isfile(geosoft_file_name): with open(geosoft_file_name, mode='r', encoding='utf-8') as f: BOM = '\ufeff' text = f.read() if text.startswith(BOM): text = text[1:] metadata = dict_from_xml(text) if metadata: return metadata return {'metadata': {}} def time_stamp(): """current date-time as a string.""" return strftime("%Y-%m-%d %H:%M:%S", gmtime()) def yearFromJulianDay2(jd1, jd2): """ Julian year :param jd1: part 1 Julian date (https://pypi.python.org/pypi/jdcal) :param jd2: part 2 Julian date :returns: decimal Gregorian year (Western calendar year) .. versionadded:: 9.1 """ y, m, d, f = jd2gcal(jd1, jd2) jdt_1, jdt_2 = gcal2jd(y, 1, 1) day_diff = (jd1 - jdt_1) + (jd2 - jdt_2) return y + (day_diff / (366 if is_leap(y) else 365)) def rdecode_err(s): """ Geosoft string conversion to a number, raising ValueError on failure :param s: string to be decoded :returns: float of decoded string If unable to decode string: | rdecode_err() raises ValueError | rdecode() returns gxapi.rDUMMY Any string that begins with "*" character is interpreted as a dummy. If unable to directly convert simple string, decoder will clean-up the string by removing leading/trailing white space and converting 'o' or 'O' to numeral '0'. If decoder still fails, it will attempt to decode time and geographic formatted strings to return decimal hours or decimal degrees respectively: :date: | yyyy-mm-dd | yyyy/mm/dd | date will be fractional Gregorian year :time: | hh:mm:ss.ssAM/am | hh:mm:ss.ssPM/pm | PM/pm adds 12 hours (for example 2:30pm returns 14.5) :geographic: | [+/-]deg mm ss.ss[N/S/E/W/n/s/e/w] | [+/-]deg.mm.ss.ss[N/S/E/W/n/s/e/w] | S/s and W/w return negative of decoded value | | Example: | "-90 15 18.0" = "90 15 18.0W" = "90.15.18w", returns -90.255 | "14" = "14N" = "14.00.00" = "14 00 00", returns 14.0 Note that mm and ss.ss can go over 60.0 and will be decoded as minutes or seconds as presented. For example, "20 90 0.00" will return 21.5. .. versionadded:: 9.1 """ # nothing there, or a dummy if (not s) or (s[0] == '*'): return gxapi.rDUMMY # try floating point try: return float(s) except ValueError: # date if (len(s) >= 8) and (s[4:5] in '/-'): s = s[0:10] try: smonth = s[5:7] if smonth[-1] in '/-': smonth = smonth[0] sday = s[7:] else: sday = s[8:10] j1, j2 = gcal2jd(s[:4], smonth, sday) return yearFromJulianDay2(j1, j2) except: raise ValueError # tabs are spaces, trim leading white space ss = s.replace("\t", " ") ss = ss.lstrip() ss = ss.rstrip() # replace mistyped "o", "O" ss = ss.replace("o", "0") ss = ss.replace("O", "0") # nothing there, or a dummy if (not ss) or (ss[0] == '*'): return gxapi.rDUMMY try: return float(ss) except ValueError: # look for time or geographic format - two spaces become dots sg = ss.replace(' ', '.', 2) sg = sg.replace(':', '.', 2) if ' ' in sg: # ok, this string is messed up raise ValueError sg = sg.upper() twelve = 0.0 negsuf = negpre = 1.0 suf = sg[len(sg) - 1] if (suf == 'S') or (suf == 'W'): negsuf = -1.0 if sg[0] == '-': negpre = -1.0 sg = sg[1:] else: if 'PM' in sg: twelve = 12.0 sg = sg.rstrip("NSEWAMP") dms = sg.split('.', 2) degrees = float(dms[0]) if len(dms) > 1: minutes = float(dms[1]) else: minutes = 0.0 if len(dms) > 2: seconds = float(dms[2]) else: seconds = 0.0 return (degrees + (minutes + seconds / 60.0) / 60.0) * negpre * negsuf + twelve def rdecode(s): """ Geosoft string (number, date, time, geographic) conversion to a number, always works. :param s: string to decode :returns: decoded number, gxapi.rDUMMY if unable to decode the string See rdecode_err(string) for more details. .. versionadded:: 9.1 """ try: return rdecode_err(s) except ValueError: return gxapi.rDUMMY def decode(s, f): """ Decode a string (s) to a numpy format defined by string (f). :param s: string to decode :param f: format string: === ================================================ b Boolean i (signed) integer u unsigned integer f floating-point S string, interpreted as 'U' unicode a string, interpreted as 'U' unicode U unicode, requires length suffix, ie 'U1', 'U14' === ================================================ :times: Times in the form hh:mm:ss.sss return a decimal hour. :dates: Dates in form yyyy-mm-dd, yyyy-m-d, yy/mm/dd or yyyy/m/d will be decoded naturally and return a decimal Gregorian year. Other date formats will raise a ValueErr. :Errors: ========== ====================================== TypeError if unable to recognize type ValueError if there is a problem with the string. ========== ====================================== .. versionadded:: 9.1 """ # always use Unicode for strings if f[0] in "Sa": f = 'U' + f[1:] # handle strings if f[:1] == 'U': if len(f) < 2: raise TypeError type(f) # to insure valid type return s[0:int(f[1:])] # not currently supporting complex if f[0] == 'c': raise TypeError type(f) # raises error if unknown type r = rdecode_err(s) if f[0] == 'f': return r # boolean if f[0] == 'b': if r == gxapi.rDUMMY: return False else: return not (int(round(r)) == 0) # everything else is returned as an int if r == gxapi.rDUMMY: return gxapi.GS_S4DM return int(round(r)) def display_message(title, message): """ Display a message to the user. :param title: message title :param message: message .. versionadded:: 9.1 """ try: gxapi.GXSYS.display_message(title, message) except geosoft.gxapi.GXAPIError: print('Title: {}\nMessage: {}'.format(title, message)) def gx_dtype(dtype): """ :returns: GX type for a numpy dtype .. versionadded:: 9.1 """ global _np2gx_type if not bool(_np2gx_type): _np2gx_type = { str(np.dtype(np.float)): gxapi.GS_DOUBLE, str(np.dtype(np.int)): gxapi.GS_LONG64, str(np.dtype(np.byte)): gxapi.GS_BYTE, str(np.dtype(np.float64)): gxapi.GS_DOUBLE, str(np.dtype(np.float32)): gxapi.GS_FLOAT, str(np.dtype(np.int64)): gxapi.GS_LONG64, str(np.dtype(np.int32)): gxapi.GS_LONG, str(np.dtype(np.int16)): gxapi.GS_SHORT, str(np.dtype(np.int8)): gxapi.GS_BYTE, str(np.dtype(np.uint8)): gxapi.GS_UBYTE, str(np.dtype(np.uint16)): gxapi.GS_USHORT, str(np.dtype(np.uint32)): gxapi.GS_ULONG, str(np.dtype(np.uint64)): gxapi.GS_ULONG64} dtype = np.dtype(dtype) if dtype.type is np.str_: # x4 to allow for full UTF-8 characters return -int(dtype.str[2:])*4 return _np2gx_type[str(dtype)] def dtype_gx(gtype): """ :returns: numpy dtype from a GX type .. versionadded:: 9.1 """ global _gx2np_type if not bool(_gx2np_type): _gx2np_type = { gxapi.GS_DOUBLE: np.dtype(np.float64), gxapi.GS_FLOAT: np.dtype(np.float32), gxapi.GS_LONG64: np.dtype(np.int64), gxapi.GS_LONG: np.dtype(np.int32), gxapi.GS_BYTE: np.dtype(np.byte), gxapi.GS_SHORT: np.dtype(np.int16), gxapi.GS_UBYTE: np.dtype(np.uint8), gxapi.GS_USHORT: np.dtype(np.uint16), gxapi.GS_ULONG: np.dtype(np.uint32), gxapi.GS_ULONG64: np.dtype(np.uint64), gxapi.GS_FLOAT2D: np.dtype(np.float32), gxapi.GS_DOUBLE2D: np.dtype(np.float64), gxapi.GS_FLOAT3D: np.dtype(np.float32), gxapi.GS_DOUBLE3D: np.dtype(np.float64)} if gtype < 0: return np.dtype('U{}'.format(-gtype)) return _gx2np_type[gtype] def dtype_gx_dimension(gtype): """ :returns: numpy dtype and dimension of the type, 1, 2 or 3. The dimension indicates 1D, 2D or 3D data. .. versionadded:: 9.3.1 """ if (gtype == gxapi.GS_FLOAT2D) or (gtype == gxapi.GS_DOUBLE2D): return dtype_gx(gtype), 2 elif (gtype == gxapi.GS_FLOAT3D) or (gtype == gxapi.GS_DOUBLE3D): return dtype_gx(gtype), 3 return dtype_gx(gtype), 1 def gx_dtype_dimension(dtype, dimension=1): """ :returns: GX type for a numpy dtype, with dimensions 2 and 3 .. versionadded:: 9.3.1 """ gtype = gx_dtype(dtype) if dimension == 1: return gtype if not((gtype == gxapi.GS_DOUBLE) or (gtype == gxapi.GS_FLOAT)): raise UtilityException(_t('Dimensioned data must be float32 or float64')) if dimension == 2: if gtype == gxapi.GS_DOUBLE: return gxapi.GS_DOUBLE2D return gxapi.GS_FLOAT2D if dimension != 3: raise UtilityException(_t('Dimension must be 1, 2 or 3')) if gtype == gxapi.GS_DOUBLE: return gxapi.GS_DOUBLE3D return gxapi.GS_FLOAT3D def is_float(gxtype): """ Return True of gxtype can be stored in a 64-bit float""" if gxtype >= 0 and gxtype in {gxapi.GS_DOUBLE, gxapi.GS_FLOAT, gxapi.GS_DOUBLE2D, gxapi.GS_FLOAT2D, gxapi.GS_DOUBLE3D, gxapi.GS_FLOAT3D}: return True else: return False def is_int(gxtype): """ Return True of gxtype can be stored in a 64-bit integer""" if gxtype >= 0 and not is_float(gxtype): return True else: return False def is_string(gxtype): """ Return length of a gxtype string, 0 (False) if not a string. Note that this is the number of available bytes in UTF-8 encoding and not equivalent to number of Unicode characters """ if gxtype < 0: return -gxtype else: return False def gxDummy(dtype): """ .. deprecated:: 9.2 use gx_dummy() """ return gx_dummy(dtype) def gx_dummy(dtype): """ Return the dummy for this value, or this type. :returns: GX dummy for this data :raises: KeyError if the dtype is not supported .. versionadded:: 9.2 """ global _dummy_map if not bool(_dummy_map): _dummy_map = { np.dtype(np.float): gxapi.rDUMMY, np.dtype(np.float64): gxapi.rDUMMY, np.dtype(np.float32): gxapi.rDUMMY, np.dtype(np.int): gxapi.iDUMMY, np.dtype(np.int8): gxapi.GS_S1DM, np.dtype(np.int16): gxapi.GS_S2DM, np.dtype(np.int32): gxapi.GS_S4DM, np.dtype(np.int64): gxapi.GS_S8DM, np.dtype(np.uint): gxapi.GS_U4DM, np.dtype(np.uint8): gxapi.GS_U1DM, np.dtype(np.uint16): gxapi.GS_U2DM, np.dtype(np.uint32): gxapi.GS_U4DM, np.dtype(np.uint64): gxapi.GS_U8DM, np.dtype(np.str_): ''} try: dtype = np.dtype(dtype) except TypeError: dtype = np.dtype(type(dtype)) try: return _dummy_map[dtype] except KeyError: s = str(dtype) if s[0] == 'U' or s[1] == 'U': return '' raise def dummy_none(v): """ Returns None if dummy, otherwise the value. .. versionadded:: 9.2 """ if v == gx_dummy(v): return None else: return v def dummyMask(npd): """ .. deprecated:: 9.2 use dummy_mask() """ return dummy_mask(npd) def dummy_mask(npd): """ Return a 1-D dummy mask that is True for all rows in a 2D numpy array that have a Geosoft dummy value. :param npd: numpy data array :returns: numpy 1D array, True for any row that had a dummy in any data field .. versionadded:: 9.2 """ dummy = gx_dummy(npd.dtype) if npd.ndim == 1: return npd == dummy if len(npd.shape) != 2: raise UtilityException(_t('Must be a 2D array')) return np.apply_along_axis(lambda a: dummy in a, 1, npd) def dummy_to_nan(data): """ Replaces dummies in float data to numpy.nan. All other data types are returned unchanged. If passed data is a numpy array, dummies are changed in-place. The numpy array is returned. :param data: float value or a numpy array :returns: data with dummies replaced by numpy.nan .. versionadded:: 9.2 """ if isinstance(data, np.ndarray): if not ((data.dtype == np.float64) or (data.dtype == np.float32)): return data else: gxdummy = gx_dummy(data.dtype) data[data == gxdummy] = np.nan return data else: if data == gxapi.rDUMMY: return np.nan else: return data def reg_from_dict(rd, max_size=4096, json_encode=True): """ `geosoft.gxapi.GXREG` instance from a dictionary :param rd: dictionary :param max_size: maximum "key=value" string size :param json_encode: if True, non-string values in the dictionary are converted to JSON strings and stored as "_JSON:json-string". False will encode non-string values as ``str(value)`` :returns: `geosoft.gxapi.GXREG` instance .. versionadded:: 9.1 """ reg = gxapi.GXREG.create(max_size) for key, value in rd.items(): if type(value) is not str: if json_encode: value = "_JSON:{}".format(json.dumps(value)) else: value = str(value) if len(key) + len(value) >= max_size: raise UtilityException(_t("\'key=value\' longer than maximum ({}):\n{}={}") .format(max_size, key, value)) reg.set(key, value) return reg def dict_from_reg(reg, ordered=False): """ dictionary from a `geosoft.gxapi.GXREG` instance :param reg: `geosoft.gxapi.GXREG` instance :param ordered: True to return and OrderedDict :returns: python dictionary from a Geosoft GXREG .. versionadded:: 9.1 """ key = gxapi.str_ref() val = gxapi.str_ref() if ordered: dct = OrderedDict() else: dct = {} for i in range(reg.entries()): reg.get_one(i, key, val) if val.value[:6] == "_JSON:": dct[key.value] = json.loads(val.value[6:]) else: dct[key.value] = val.value return dct def save_parameters(group='_', parms=None): """ Save parameters to the Project Parameter Block. Parameter group names and member names are converted to uppercase. :param group: parameter block group name, default is '_' :param parms: dict containing named parameter settings, must be specified .. versionadded:: 9.1 """ if not isinstance(parms, dict): raise UtilityException(_t('parms dictionary not defined.')) if not(_validate_parameter(group)): raise UtilityException(_t('Group name \'{}\' contains invalid character \'.\''.format(group))) for k, v in parms.items(): if not (_validate_parameter(k)): raise UtilityException(_t('Parameter name \'{}\' contains invalid character \'.\''.format(k))) # remove escaped characters because set_str() puts them back in s = json.dumps(v).replace('\\\\', '\\') gxapi.GXSYS.set_string(group.upper(), k, s) def get_parameters(group='_', parms=None, default=None): """ Get parameters from the Project Parameter Block. :param group: name in the parameter block group name :param parms: if specified only these keys are searched and the value is replaced by the found parameter. Parameter keys are not case sensitive, though if parms is not provided all returned keys will be upper-case. :param default: default value for parameters not found, ignored if parms is provided as a dict, in which case the current key:value settings will be unchanged. :returns: dictionary containing group parameters .. versionchanged:: 9.2.1 Now retains case on keys passed in to parms, which allows callers to maintain case. Note that if not specifying parms, the returned keys will always be upper-case. Fixed bug handling file name construction on Windows. .. versionadded:: 9.1 """ sv = gxapi.str_ref() p = {} if not(_validate_parameter(group)): raise UtilityException(_t('Group name \'{}\' contains invalid character \'.\''.format(group))) group = group.upper() if parms is not None: if not isinstance(parms, dict): for k in parms: p[k] = default parms = p for k, default in parms.items(): k_upper = k.upper() if gxapi.GXSYS.exist_string(group, k_upper): gxapi.GXSYS.gt_string(group, k_upper, sv) try: p[k] = json.loads(sv.value.replace('\\', '\\\\')) except ValueError: p[k] = sv.value else: p[k] = default else: h_reg = gxapi.GXREG.create(4096) gxapi.GXSYS.get_reg(h_reg, group) k = gxapi.str_ref() for i in range(h_reg.entries()): h_reg.get_one(i, k, sv) key = k.value.split('.')[1] try: p[key] = json.loads(sv.value) except ValueError: p[key] = sv.value return p def folder_workspace(): """ Return the Geosoft project folder name. .. versionadded:: 9.1 """ path = gxapi.str_ref() gxapi.GXSYS.get_path(gxapi.SYS_PATH_LOCAL, path) return path.value.replace('\\', os.sep) def folder_user(): """ Return the Geosoft user configurations folder name. .. versionadded:: 9.1 """ path = gxapi.str_ref() gxapi.GXSYS.get_path(gxapi.SYS_PATH_GEOSOFT_USER, path) return path.value.replace('\\', os.sep) def folder_temp(use_override=True): """ Return the Geosoft temporary folder name. :param use_override: True to use the _temp_folder_overide if it is defined (used by tests) .. Note:: If creating temporary files, better to use gx method :meth:`~gx.GXpy.temp_file`, which will create the temporary file in the GX-specific folder :mod:`~gx.GXpy.temp_folder`. .. versionadded:: 9.1 """ global _temp_folder_override if use_override and _temp_folder_override: return _temp_folder_override path = gxapi.str_ref() gxapi.GXSYS.get_path(gxapi.SYS_PATH_GEOTEMP, path) path = path.value.replace('\\', os.sep) return os.path.normpath(path) def normalize_file_name(fn): """ Normalize a file name string by replacing '\' with '/'. This is useful for writing file names to control files. :param fn: file name :returns: normalized file name .. versionadded:: 9.2 """ return fn.replace('\\', '/') def uuid(): """ :returns: a uuid as a string .. versionadded:: 9.2 """ global _uuid_callable global d_uuid_count if _uuid_callable: return _uuid_callable() elif gxapi.GXSYS.testing_system_mode() == 1: d_uuid = str(uid.UUID(fields=(0x12345678, 0x1234, 0x5678, 0x12, 0x34, 0x000000000000 + d_uuid_count))) d_uuid_count += 1 return d_uuid else: return str(str(uid.uuid1())) def _temp_dict_file_name(): """Name of the expected python dictionary as a json file from run_external_python(). .. versionadded:: 9.1 """ return '__shared_dictionary__' def set_shared_dict(shared_dict=None): """ Save a dictionary to be shared by an separate application. This is a companion file to run_external_python(). :param shared_dict: dictionary of parameters to save .. versionadded:: 9.1 """ # if no dictionary, pop the existing one if it is there if shared_dict is None: get_shared_dict() else: with open(_temp_dict_file_name(), 'w') as f: json.dump(shared_dict, f) def get_shared_dict(): """ Get a dictionary shared by an external application. The shared dictionary is cleared (popped) so a subsequent call will return an empty dictionary. .. versionadded:: 9.1 """ try: with open(_temp_dict_file_name(), 'r') as f: shared_dict = json.load(f) os.remove(_temp_dict_file_name()) return shared_dict except (IOError, OSError): return {} def run_external_python(script, script_args='', python_args='', shared_dict=None, console=True, catcherr=True): """ Run a python script as an external program, returning results as a dictionary. External program calls gxpy.utility.get_shared_dict() to get the caller's dictionary, and gxpy.utility.set_shared_dict(return_dictionary) to return a dictionary back to caller. :param script: full path of the python script :param shared_dict: dictionary passed to the external script (get_shared_dict() to retrieve) :param script_args: command line arguments as a string :param python_args: command line arguments as a string :param console: True (default) will create a separate console for the process. :param catcherr: True (default) Catch and re-raise errors from the sub-process. :returns: dictionary passed back from caller via set_shared_dict(dict) .. versionadded:: 9.1 """ if not os.path.isfile(script): raise UtilityException(_t('Cannot find script: {}'.format(script))) py = sys.executable if not py.lower().endswith('python.exe'): s = gxapi.str_ref() gxapi.GXSYS.get_env('PYTHON_HOME', s) if not s.value: gxapi.GXSYS.get_directory(gxapi.SYS_DIR_GEOSOFT_PYTHON, s) py = os.path.join(s.value, 'python.exe') command = "\"{}\" {} \"{}\" {}".format(py, python_args, script, script_args) set_shared_dict(shared_dict) kwargs = {} if console: kwargs['creationflags'] = subprocess.CREATE_NEW_CONSOLE if hasattr(subprocess, 'run'): if catcherr: kwargs['stderr'] = subprocess.PIPE cp = subprocess.run(command, **kwargs) if catcherr and cp.returncode != 0: raise UtilityException(_t('\n\nExternal python error:\n\n{}').format(cp.stderr.decode("utf-8"))) else: # use call, python 3.4... err = subprocess.call(command, **kwargs) if catcherr and err != 0: raise UtilityException(_t('\n\nExternal python error({}) running: {}').format(err, command)) return get_shared_dict() def crc32(byte_buffer, crc=0): """ Return 32-bit CRC of a byte buffer. :param byte_buffer: byte buffer (fulfills the Buffer Protocol) :param crc: seed crc, can be passed along to accumulate the crc .. versionadded:: 9.2 """ crc = binascii.crc32(byte_buffer, crc) return crc def crc32_file(filename, crc=0): """ Return 32-bit CRC of a file. :param filename: file name :param crc: seed crc, default 0 .. versionadded:: 9.2 """ def readbuff(ff, bsize=16384): while True: buff = ff.read(bsize) if not buff: break yield buff with open(filename, 'rb') as f: for b in readbuff(f): crc = crc32(b, crc) return crc def crc32_str(s, crc=0): """ Return 32-bit CRC of a string. :param s: string :param crc: seed crc, default 0 .. versionadded:: 9.2 """ crc = crc32(s.encode(), crc) return crc def year_from_datetime(dt): """ Return a decimal Gregorian calendar year from a Python datetime. :param dt: datetime :returns: decimal Gregorian year to an accuracy of 1 millisecond .. versionadded:: 9.2 """ naive_dt = dt.replace(tzinfo=None) y_start = datetime.datetime(naive_dt.year, 1, 1) y_end = y_start.replace(year=naive_dt.year + 1) return dt.year + (naive_dt - y_start) / (y_end - y_start) def datetime_from_year(year): """ Return the Python datetime from a decimal Gregorian year. :param year: decimal year on the Gregorian calendar. :returns: datetime (resolved to 1 millisecond) .. versionadded:: 9.2 """ yr = int(year) remainder = year - yr y_start = datetime.datetime(yr, 1, 1) y_end = y_start.replace(yr + 1) milliseconds = round(remainder * (y_end - y_start).total_seconds() * 1000.0) return y_start + datetime.timedelta(seconds=milliseconds / 1000.0) def str_significant(value, n, mode=0): """ Return a formatted string to n significant figures. :param value: value to format :param n: number of significant digits :param mode: 0 round, 1 ceiling, -1 floor :returns: string to n significant figures """ if value == 0.0: return '0' value = decimal.Decimal(str(value)) if value < 0.0: mult = decimal.Decimal(-1) value = value * -1 else: mult = decimal.Decimal(1) vstr = '{:33.16f}'.format(value).strip(' 0') power = vstr.index('.') vstr = vstr[:power] + vstr[power + 1:] for i, c in enumerate(vstr): if c != '0': power -= i break vstr = vstr.strip('0') significant = len(vstr) if significant <= n: s = str(value * decimal.Decimal(mult)) if s.endswith('.0'): return s[:-2] else: return s v = float(vstr[:n] + '.' + vstr[n:]) if mode == 0: vstr = str(round(v)) elif mode == 1: vstr = str(math.ceil(v)) else: vstr = str(math.floor(v)) return str(decimal.Decimal(vstr) * mult * (10 ** decimal.Decimal(power - n))) def url_retrieve(url, filename=None, overwrite=False, reporthook=None): """ Retrieve a URL resource as a file. :param url: name of the URL resource :param filename: name of file in which to place the resource, default is the resource base name. :param overwrite: True to overwrite an existing file. Default is False, in which case if the file exists the filename is returned. :param reporthook: a hook function that will be called once on establishment of the network connection and once after each block read thereafter. The hook will be passed three arguments; a count of blocks transferred so far, a block size in bytes, and the total size of the file. :return: filename of the file that is created. .. seealso:: https://docs.python.org/3.6/library/urllib.request.html#urllib.request.urlretrieve .. versionadded:: 9.3 """ if filename is None: filename = os.path.basename(url).replace('%20', ' ') if not overwrite: if os.path.isfile(filename): return filename file, message = urllib.request.urlretrieve(url.replace(' ', '%20'), filename=filename, reporthook=reporthook) return file def delete_file(file_name): """ Delete a file, does nothing if file does not exist. :param file_name: file to delete .. versionadded:: 9.3.1 """ if file_name: try: os.remove(file_name) except (FileNotFoundError, PermissionError): pass def delete_files_by_root(file_root): """ Delete all files that have the same file_root (without extension). This can be safely applied to remove temporary files that use named using `geosoft.gxpy.gx.temp_file`. :param file_root: file root name .. versionadded:: 9.4 """ if file_root: path, root = os.path.split(file_root) if not path: path = os.getcwd() for fn in os.listdir(path): if (fn == root) or (os.path.splitext(fn)[0] == root): delete_file(os.path.join(path, fn)) def unique_name(name, invalid=None, separator='()', maxversion=1000): """ Build a unique name or file name. :param name: seed name, returns this if callback(name) is False the name in unique :param invalid: callback function invalid(name), returns True if name is invalid. If a call-back is not provided a simple os.path.isfile(name) is used. :param separator: single or two-character separator. The unique name is constructed by appending an increasing number to the seed name until a valid name is found. By default the number is enclosed in parentheses (e.g. some_name(4).txt). If a single separator character is defined the number is separted from the name by the single character (e.g. for separator='_', might return some_name_4.txt). :param maxversion: maximum number to try, default is 1000. This protects against infinite loop should there be a bug in your callback. :return: unique name .. versionadded:: 9.3.1 """ def parts(): path, file = os.path.split(name) base, ex = os.path.splitext(file) isep = base.rfind(separator[0]) if isep == -1: n = 0 else: current_base = base if len(separator) > 1: if base[-1] == separator[1]: base = base[:-1] try: n = int(base[isep + 1:]) base = base[:isep] except ValueError: n = 0 base = current_base return path + base, n, ex if invalid is None: invalid = os.path.isfile while invalid(name): path_name, number, ext = parts() number += 1 if number >= maxversion: raise UtilityException(_t("Cannot determine a unique name in {} tries.").format(maxversion)) name = path_name + separator[0] + str(number) if len(separator) > 1: name = name + separator[1] name = name + ext return name def is_file_locked(file_name, age=None): """ Returns True if the file exists and is currently locked by another process or is younger than age. :param file_name: file to test :param age: minimum age in seconds, default ignores age .. versionadded:: 9.3.1 """ if os.path.exists(file_name): if age and file_age(file_name) < age: return True try: f = open(file_name, 'a') f.close() return False except IOError: return True return False def file_age(file_name): """ Returns the age of a file in seconds from now. -1 if the file does not exist. :param file_name: file name .. versionadded:: 9.3.1 """ if not os.path.exists(file_name): return -1 return time.time() - os.path.getmtime(file_name) def is_path_locked(path, age=None): """ Returns True if any files in this folder or sub-folders are locked or younger than age. :param path: name of the folder :param age: age in seconds from now .. versionadded:: 9.3.1 """ if os.path.exists(path): if not os.path.isdir(path): return is_file_locked(path, age=age) for item in os.listdir(path): item = os.path.join(path, item) if os.path.isdir(item): if is_path_locked(item): return True else: if is_file_locked(item): return True if age and file_age(item) < age: return True return False def delete_folder(folder_name, age=None, raise_on_error=False): """ Delete a folder if all files and sub-folders are accessible and deletable. :param folder_name: name of the folder :param age: age in seconds relative to the current date/time :param raise_on_error: True to raise an error if unsuccessful, otherwise just returns False :return: True if successful .. versionadded:: 9.3.1 """ if is_path_locked(folder_name, age=age): if raise_on_error: raise UtilityException(_t("Folder `{}` is locked.").format(folder_name)) return False try: for item in os.listdir(folder_name): if os.path.isdir(item): delete_folder(item, age=age) for item in os.listdir(folder_name): if age and file_age(item) > age: os.remove(item) os.removedirs(folder_name) except IOError: if raise_on_error: raise return True def jupyter_markdown_toc(j_file, numbered=True, start_level=1, max_depth=1, prefix=' '): """ Create a markdoown table-of-content string from a jupyter notebook based on markdown "#". :param j_file: jupyter notebook name. Default file extension is '.ipynb' :param numbered: True (default) to number the main headings, False for all bulletys :param start_level: toc base level, default is 1, which starts TOC at "##" :param max_depth: maximum levels relative to the start level, default is first level only. :param prefix: previx for each TOC line, default is ' ' so TOC will appear indented. :return: toc string. Include this in a jupyter notebook to create a TOC that can then be cut/pasted into the introductory markdown: .. code:: import geosoft.gxpy.utility as gxu print (gxu.jupyter_markdown_toc('my_notebook_name')) .. versionadded:: 9.3.1 """ base, ext = os.path.splitext(j_file) if not ext: j_file = j_file + '.ipynb' data = json.loads(open(j_file).read()) toc = '' i = 1 for k in data['cells']: if k['cell_type'] == 'markdown': for sl in k['source']: sl = sl.strip() if sl and sl[0] == '#': sl = sl[start_level:] if sl[0] == '#': indent, label = sl.split(' ', 1) if len(indent) <= max_depth: if label[-1] == '\n': label = label[:-1] if numbered and len(indent) == 1: lead = str(i) + '. [' i += 1 else: lead = str(' ' * len(indent)) + '- [' toc = toc + prefix + lead + label + '](#' + label.replace(' ', '-') + ')\n' return toc def vector_normalize(v): """ Normalise (Euclidean) the last axis of a numpy array :param v: numpy vector array, any dimension :return: array normalized, 0 vectors will be np.nan .. versionadded:: 9.3.1 """ if v.ndim < 2: return np.array((1.,)) vs = v.shape v = v.reshape((-1, v.shape[-1])) mag = np.linalg.norm(v, axis=1) mag[mag == 0.] = np.nan return (v.T * np.reciprocal(mag)).T.reshape(vs) def dict_from_http_response_text(text, prune_xml_root=True, object_hook=None, object_pairs_hook=None, **kw): """ Decode http response text to a dictionary. Response may be json or xml. :param text: http response.text from requests module response objects :param prune_xml_root: True to remove the xml root, False to keep it :param object_hook: json.loads decoder hook for objects :param object_pairs_hook: json.loads decoder for object pairs (see json.loads documentation). :param kw: arguments passed to `json.loads()` :return: dictionary of content If the content is xml, the root node is removed and a dictionary is constructed from the content above the root node. If one of the hook functions is present, the dictionary is converted to json and a dictionary is reconstructed using the hooks. .. versionadded:: 9.4 """ try: td = json.loads(text, object_hook=object_hook, object_pairs_hook=object_pairs_hook, **kw) except Exception as ejson: try: td = dict_from_xml(text) except Exception as exml: raise UtilityException('json error: {}\nxml_error: {}\ntext:\n{}'. format(str(ejson), str(exml), text)) if prune_xml_root: # TODO: discuss with @Ryan pruning xml root wrt general http usage. td = td[list(td.keys())[0]] if object_hook or object_pairs_hook: td = json.loads(json.dumps(td), object_hook=object_hook, object_pairs_hook=object_pairs_hook, **kw) return td <file_sep>/docs/GXGRID3D.rst .. _GXGRID3D: GXGRID3D class ================================== .. autoclass:: geosoft.gxapi.GXGRID3D :members: .. _GRID3D_TYPE: GRID3D_TYPE constants ----------------------------------------------------------------------- Type of Voxset .. autodata:: geosoft.gxapi.GRID3D_DOUBLE :annotation: .. autoattribute:: geosoft.gxapi.GRID3D_DOUBLE .. autodata:: geosoft.gxapi.GRID3D_VECTOR :annotation: .. autoattribute:: geosoft.gxapi.GRID3D_VECTOR .. autodata:: geosoft.gxapi.GRID3D_THEMATIC :annotation: .. autoattribute:: geosoft.gxapi.GRID3D_THEMATIC <file_sep>/docs/GXMULTIGRID3D.rst .. _GXMULTIGRID3D: GXMULTIGRID3D class ================================== .. autoclass:: geosoft.gxapi.GXMULTIGRID3D :members: .. _DIRECTION3D: DIRECTION3D constants ----------------------------------------------------------------------- Direction in 3D .. autodata:: geosoft.gxapi.DIRECTION3D_XYZ :annotation: .. autoattribute:: geosoft.gxapi.DIRECTION3D_XYZ .. autodata:: geosoft.gxapi.DIRECTION3D_YXZ :annotation: .. autoattribute:: geosoft.gxapi.DIRECTION3D_YXZ .. autodata:: geosoft.gxapi.DIRECTION3D_XZY :annotation: .. autoattribute:: geosoft.gxapi.DIRECTION3D_XZY .. autodata:: geosoft.gxapi.DIRECTION3D_YZX :annotation: .. autoattribute:: geosoft.gxapi.DIRECTION3D_YZX .. autodata:: geosoft.gxapi.DIRECTION3D_ZXY :annotation: .. autoattribute:: geosoft.gxapi.DIRECTION3D_ZXY .. autodata:: geosoft.gxapi.DIRECTION3D_ZYX :annotation: .. autoattribute:: geosoft.gxapi.DIRECTION3D_ZYX .. _GOCAD_ORIENTATION: GOCAD_ORIENTATION constants ----------------------------------------------------------------------- GOCAD Orientations .. autodata:: geosoft.gxapi.GOCAD_ORIENTATIONS_NORMAL :annotation: .. autoattribute:: geosoft.gxapi.GOCAD_ORIENTATIONS_NORMAL .. autodata:: geosoft.gxapi.GOCAD_ORIENTATIONS_INVERTED :annotation: .. autoattribute:: geosoft.gxapi.GOCAD_ORIENTATIONS_INVERTED .. autodata:: geosoft.gxapi.GOCAD_ORIENTATIONS_NORMAL_ZFIRST :annotation: .. autoattribute:: geosoft.gxapi.GOCAD_ORIENTATIONS_NORMAL_ZFIRST .. autodata:: geosoft.gxapi.GOCAD_ORIENTATIONS_INVERTED_ZFIRST :annotation: .. autoattribute:: geosoft.gxapi.GOCAD_ORIENTATIONS_INVERTED_ZFIRST .. _VECTOR_IMPORT: VECTOR_IMPORT constants ----------------------------------------------------------------------- Vector grid3d import direction .. autodata:: geosoft.gxapi.VECTOR_IMPORT_XYZ :annotation: .. autoattribute:: geosoft.gxapi.VECTOR_IMPORT_XYZ .. autodata:: geosoft.gxapi.VECTOR_IMPORT_UVW :annotation: .. autoattribute:: geosoft.gxapi.VECTOR_IMPORT_UVW .. autodata:: geosoft.gxapi.VECTOR_IMPORT_AID :annotation: .. autoattribute:: geosoft.gxapi.VECTOR_IMPORT_AID .. _FILTER3D: FILTER3D constants ----------------------------------------------------------------------- Voxel filter type .. autodata:: geosoft.gxapi.FILTER3D_FILE :annotation: .. autoattribute:: geosoft.gxapi.FILTER3D_FILE .. autodata:: geosoft.gxapi.FILTER3D_SMOOTHING :annotation: .. autoattribute:: geosoft.gxapi.FILTER3D_SMOOTHING .. autodata:: geosoft.gxapi.FILTER3D_LAPLACE :annotation: .. autoattribute:: geosoft.gxapi.FILTER3D_LAPLACE .. autodata:: geosoft.gxapi.FILTER3D_X_GRADIENT :annotation: .. autoattribute:: geosoft.gxapi.FILTER3D_X_GRADIENT .. autodata:: geosoft.gxapi.FILTER3D_Y_GRADIENT :annotation: .. autoattribute:: geosoft.gxapi.FILTER3D_Y_GRADIENT .. autodata:: geosoft.gxapi.FILTER3D_Z_GRADIENT :annotation: .. autoattribute:: geosoft.gxapi.FILTER3D_Z_GRADIENT .. autodata:: geosoft.gxapi.FILTER3D_TOTAL_GRADIENT :annotation: .. autoattribute:: geosoft.gxapi.FILTER3D_TOTAL_GRADIENT .. _MULTIGRID3D_DIRECTGRID_METHOD: MULTIGRID3D_DIRECTGRID_METHOD constants ----------------------------------------------------------------------- How to calculate the cell values for direct gridding. .. autodata:: geosoft.gxapi.MULTIGRID3D_DIRECTGRID_MINIMUM :annotation: .. autoattribute:: geosoft.gxapi.MULTIGRID3D_DIRECTGRID_MINIMUM .. autodata:: geosoft.gxapi.MULTIGRID3D_DIRECTGRID_MAXIMUM :annotation: .. autoattribute:: geosoft.gxapi.MULTIGRID3D_DIRECTGRID_MAXIMUM .. autodata:: geosoft.gxapi.MULTIGRID3D_DIRECTGRID_MEAN :annotation: .. autoattribute:: geosoft.gxapi.MULTIGRID3D_DIRECTGRID_MEAN .. autodata:: geosoft.gxapi.MULTIGRID3D_DIRECTGRID_ITEMS :annotation: .. autoattribute:: geosoft.gxapi.MULTIGRID3D_DIRECTGRID_ITEMS .. autodata:: geosoft.gxapi.MULTIGRID3D_DIRECTGRID_DUMMYITEMS :annotation: .. autoattribute:: geosoft.gxapi.MULTIGRID3D_DIRECTGRID_DUMMYITEMS <file_sep>/geosoft/gxapi/GXDGW.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXLST import GXLST ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXDGW(gxapi_cy.WrapDGW): """ GXDGW class. Provides access to dialog boxes for user I/O. You can use this class to store to, or retrieve information from the current workspace parameter block via dialog boxes **Note:** Setting Fonts in GX dialogs. By default, "new look" GX dialogs uses the "Tahoma" font. This font can be overridden by updating the application settings. This can be done programmatically using the `GXSYS.global_set <geosoft.gxapi.GXSYS.global_set>` function using the following parameters: MONTAJ.GX_FONT="Font_name" This sets the default font to "Font_name". It applies to text in all components of the dialog. Additional customization of individual components can be accomplished using the following parameters: MONTAJ.GX_CAPTION_FONT="Caption_Font": Font for the field captions (labels) MONTAJ.GX_BUTTON_FONT="Button_Font" : Font for buttons, including the "Browse" button MONTAJ.GX_TITLE_FONT="Title_Font" : Font for special titles (see `set_title <geosoft.gxapi.GXDGW.set_title>`). The font used for the text in edit windows remains the default, or the value specified using MONTAJ.GX_FONT. Note that the "OK" button, and the Title, use "Bold" versions of the specified font. If the bolded version does not exist as a normal font, then the operating system may provide its own alternative which may not appear the same as you expect. Before version 6.2. there used to be a parameter, MONTAJ.GX_CHARSET, that affected characters above ASCII 127. 6.2. introduced Unicode in the core montaj engine that eliminated the need for such a setting. All strings on the GX API level are encoded in :ref:`UTF8` during runtime which makes it possible to represent all possible characters without using character sets. """ def __init__(self, handle=0): super(GXDGW, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXDGW <geosoft.gxapi.GXDGW>` :returns: A null `GXDGW <geosoft.gxapi.GXDGW>` :rtype: GXDGW """ return GXDGW() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, name): """ This method creates a Dialog window from a specified resource. The Resource is loaded into memory but not displayed. :param name: Name of the Window Resource to use :type name: str :returns: Handle to the `GXDGW <geosoft.gxapi.GXDGW>` object. :rtype: GXDGW .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapDGW._create(GXContext._get_tls_geo(), name.encode()) return GXDGW(ret_val) def get_info_meta(self, obj, dlg_obj_type, meta, meta_obj, meta_attrib): """ Copies the Dialog information to a `GXMETA <geosoft.gxapi.GXMETA>` attribute. :param obj: Dialog Object :param dlg_obj_type: :ref:`DGW_OBJECT` :param meta_obj: Object :param meta_attrib: Attribute :type obj: int :type dlg_obj_type: int :type meta: GXMETA :type meta_obj: int :type meta_attrib: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._get_info_meta(obj, dlg_obj_type, meta, meta_obj, meta_attrib) def get_info_sys(self, id, info, group, field): """ This method uses the information in a Dialog box to set a `GXSYS <geosoft.gxapi.GXSYS>` variable. :param id: Dialog Object :param info: :ref:`DGW_OBJECT` :param group: Group name of the Variable :param field: Variable name :type id: int :type info: int :type group: str :type field: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._get_info_sys(id, info, group.encode(), field.encode()) def get_list(self, id): """ This method retrieves the list (`GXLST <geosoft.gxapi.GXLST>`) object associated with a Dialog object. :param id: Dialog Object :type id: int :returns: The List Object :rtype: GXLST .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_list(id) return GXLST(ret_val) def gt_info(self, id, info, buff): """ This method fills the specified string with the text from the text object specified. :param id: Handle to the TEXT Object :param info: :ref:`DGW_OBJECT` :param buff: Where to place the String :type id: int :type info: int :type buff: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ buff.value = self._gt_info(id, info, buff.value.encode()) def run_dialogue(self): """ This method runs the Dialog window. :returns: The Exit Code of the Dialog window. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._run_dialogue() return ret_val def set_info(self, id, info, buff): """ This method sets the string of a text object. If the string is too long it will be truncated. :param id: Handle to the TEXT Object :param info: :ref:`DGW_OBJECT` :param buff: String to set the Text To :type id: int :type info: int :type buff: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_info(id, info, buff.encode()) def set_info_meta(self, obj, dlg_obj_type, meta, meta_obj, meta_attrib): """ This sets a text object to the text found in a `GXMETA <geosoft.gxapi.GXMETA>` attribute. :param obj: Dialog Object :param dlg_obj_type: :ref:`DGW_OBJECT` :param meta_obj: Object :param meta_attrib: Attribute :type obj: int :type dlg_obj_type: int :type meta: GXMETA :type meta_obj: int :type meta_attrib: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_info_meta(obj, dlg_obj_type, meta, meta_obj, meta_attrib) def set_info_sys(self, id, info, group, field): """ This sets a text object to the text found in a system parameter variable. If the variable has not been set, the text is not set. :param id: Dialog Object :param info: :ref:`DGW_OBJECT` :param group: Group name of the Variable :param field: Variable name :type id: int :type info: int :type group: str :type field: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_info_sys(id, info, group.encode(), field.encode()) def set_title(self, title): """ Changes the title of the dialog. :param title: Title to set :type title: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** A "Special", additional title can be added to a dialog by passing a title to `set_title <geosoft.gxapi.GXDGW.set_title>` with the following syntax: ``"Window Title\\nAdditional Title"`` In the title argument, a line break character ``'\\n'`` is used to separate the parts. The window title free_appears as the title in the upper bar of the dialog. The additional title free_appears below this, in the main body of the dialog, and is separated from the rest of the fields by a horizontal line. It is printed in the bold version of the default font (or of the special font specified using the MONTAJ.GX_TITLE_FONT parameter noted above in "Setting Fonts in GX dialogs." """ self._set_title(title.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXMAPL.rst .. _GXMAPL: GXMAPL class ================================== .. autoclass:: geosoft.gxapi.GXMAPL :members: <file_sep>/requirements.txt numpy>=1.11 jdcal <file_sep>/docs/geosoft.gxapi.rst Geosoft GX API ============== The complete Geosoft GX API is exposed to python developers through the :doc:`geosoft.gxapi </geosoft.gxapi.classes>` package. This includes all low-level classes and function calls that expose almost all Geosoft functionality to a developer. The GXContext class ------------------- Before calling any other API function from a stand-alone script (a script that is not run as an extension from Geosoft Desktop), a GX Context must be created and held. This can be done by creating an instance of :class:`geosoft.gxapi.GXContext` or an instance of :class:`geosoft.gxpy.gx.GXpy`, which handles the details of ``GXContext`` for you. We recommend using :class:`geosoft.gxpy.gx.GXpy` unless you have chosen to work only with the low-level :mod:`geosoft.gxapi`. Note that the low-level module :mod:`geosoft.gxapi`, although more complex, is more consistent across versions. Creating a GX context requires any version of **Geosoft Desktop** to be installed on the target system, from which the library dll's are located and loaded. **Geosoft Desktop** can be downloaded from `Geosoft Downloads <https://my.geosoft.com/downloads>`_. It is possible to redirect the location of dlls used by setting the **GX_GEOSOFT_BIN_PATH** environment variable to reference the location of the Geosoft binary files. Refer to the `GX Developer Guide <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/overview>`_ for more information. .. autoclass:: geosoft.gxapi.GXContext :members: GXAPI Instance Resource Garbage Collection ------------------------------------------ The GXAPI Python garbage collection is not in reverse-order of creation, which can lead to resources that depend on prior resources being destroyed out of order. For example, the following will assert on exit: .. code:: vox = gxapi.GXVOX.create('v1.geosoft_voxel') voxe = gxapi.GXVOXE.create(vox) This is because Python garbage collection releases the **vox** instance before the **voxe** instance. To avoid this problem, when creating a class instance that depends on a prior class instance, explicitly free the dependant instance as follows: .. code:: vox = gxapi.GXVOX.create('v1.geosoft_voxel') voxe = gxapi.GXVOXE.create(vox) # voxe depends on vox # ... do some work with voxe ... # release the voxe resource to ensure it is released before vox del voxe Helper classes to pass immutable values by reference ---------------------------------------------------- Each of the classes below can be used to pass these immutable types by reference to the GX API. Instances of the objects have a :code:`value` property that holds the reference to the immutable object. .. autoclass:: geosoft.gxapi.str_ref() .. autoclass:: geosoft.gxapi.bool_ref() .. autoclass:: geosoft.gxapi.int_ref() .. autoclass:: geosoft.gxapi.float_ref() Default instances will be intialized with dummy values for :code:`float_ref` and :code:`int_ref`, an empty string for :code:`str_ref` and :code:`False` for :code:`bool_ref`. One can also set the value during intialization or assigning to the :code:`value` property. Example usage: .. code-block:: python import geosoft.gxapi as gxapi ctx = gxapi.GXContext.create("sample", "1.0") _3dn = gxapi.GX3DN.create(ctx) # the GX3DN get_point_of_view() method requires float_ref class to return values distance = gxapi.float_ref() # value property will be initially be gxapi.rDUMMY rot1 = gxapi.float_ref(1.01) # value property will be equal to 1.01 rot2 = gxapi.float_ref(2.0) # value property will be equal to 2.0 rot2.value = 4 # value propertyis changed to 4.0 # the values in the objects will be changed to the current point of view _3dn.get_point_of_view(distance, rot1, rot2) print(distance.value) # value property will now be 8.0 print(rot1.value) # value property will now be 0.0 print(rot2.value) # value property will now be 0.0 Exceptions ---------- .. autoexception:: geosoft.gxapi.GXCancel .. autoexception:: geosoft.gxapi.GXExit .. autoexception:: geosoft.gxapi.GXAPIError .. autoexception:: geosoft.gxapi.GXError <file_sep>/docs/GXMETA.rst .. _GXMETA: GXMETA class ================================== .. autoclass:: geosoft.gxapi.GXMETA :members: .. _H_META_INVALID_TOKEN: H_META_INVALID_TOKEN constants ----------------------------------------------------------------------- `GXMETA <geosoft.gxapi.GXMETA>` Invalid Token .. autodata:: geosoft.gxapi.H_META_INVALID_TOKEN :annotation: .. autoattribute:: geosoft.gxapi.H_META_INVALID_TOKEN .. _META_CORE_ATTRIB: META_CORE_ATTRIB constants ----------------------------------------------------------------------- `GXMETA <geosoft.gxapi.GXMETA>` Core Attributes .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Class_Description :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Class_Description .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Class_Application :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Class_Application .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Class_ReferenceURL :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Class_ReferenceURL .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Class_Type :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Class_Type .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Type_Description :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Type_Description .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Type_ReferenceURL :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Type_ReferenceURL .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Type_FixedSize :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Type_FixedSize .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Type_ByteOrder :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Type_ByteOrder .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Type_MinValue :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Type_MinValue .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Type_MaxValue :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Type_MaxValue .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Type_MaxSize :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Type_MaxSize .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Type_ObjectClass :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Type_ObjectClass .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Type_hCreatS_Func :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Type_hCreatS_Func .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Type_sSerial_Func :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Type_sSerial_Func .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Type_Enum_Value :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Type_Enum_Value .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Attrib_Visible :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Attrib_Visible .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Attrib_Editable :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Attrib_Editable .. autodata:: geosoft.gxapi.META_CORE_ATTRIB_Attrib_FlatName :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_ATTRIB_Attrib_FlatName .. _META_CORE_CLASS: META_CORE_CLASS constants ----------------------------------------------------------------------- Meta Core Class Objects .. autodata:: geosoft.gxapi.META_CORE_CLASS_Base :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_CLASS_Base .. autodata:: geosoft.gxapi.META_CORE_CLASS_Predefined :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_CLASS_Predefined .. autodata:: geosoft.gxapi.META_CORE_CLASS_Attributes :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_CLASS_Attributes .. autodata:: geosoft.gxapi.META_CORE_CLASS_ClassAttributes :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_CLASS_ClassAttributes .. autodata:: geosoft.gxapi.META_CORE_CLASS_TypeAttributes :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_CLASS_TypeAttributes .. autodata:: geosoft.gxapi.META_CORE_CLASS_ObjectAttributes :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_CLASS_ObjectAttributes .. autodata:: geosoft.gxapi.META_CORE_CLASS_EnumAttributes :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_CLASS_EnumAttributes .. autodata:: geosoft.gxapi.META_CORE_CLASS_AttributeAttributes :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_CLASS_AttributeAttributes .. autodata:: geosoft.gxapi.META_CORE_CLASS_ItemAttributes :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_CLASS_ItemAttributes .. autodata:: geosoft.gxapi.META_CORE_CLASS_Types :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_CLASS_Types .. autodata:: geosoft.gxapi.META_CORE_CLASS_Enums :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_CLASS_Enums .. autodata:: geosoft.gxapi.META_CORE_CLASS_Enum_Bool :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_CLASS_Enum_Bool .. autodata:: geosoft.gxapi.META_CORE_CLASS_Enum_ClassType :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_CLASS_Enum_ClassType .. _META_CORE_TYPE: META_CORE_TYPE constants ----------------------------------------------------------------------- `GXMETA <geosoft.gxapi.GXMETA>` Core Data Types .. autodata:: geosoft.gxapi.META_CORE_TYPE_Bytes :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_TYPE_Bytes .. autodata:: geosoft.gxapi.META_CORE_TYPE_Bool :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_TYPE_Bool .. autodata:: geosoft.gxapi.META_CORE_TYPE_I1 :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_TYPE_I1 .. autodata:: geosoft.gxapi.META_CORE_TYPE_U1 :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_TYPE_U1 .. autodata:: geosoft.gxapi.META_CORE_TYPE_I2 :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_TYPE_I2 .. autodata:: geosoft.gxapi.META_CORE_TYPE_U2 :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_TYPE_U2 .. autodata:: geosoft.gxapi.META_CORE_TYPE_I4 :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_TYPE_I4 .. autodata:: geosoft.gxapi.META_CORE_TYPE_U4 :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_TYPE_U4 .. autodata:: geosoft.gxapi.META_CORE_TYPE_I8 :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_TYPE_I8 .. autodata:: geosoft.gxapi.META_CORE_TYPE_U8 :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_TYPE_U8 .. autodata:: geosoft.gxapi.META_CORE_TYPE_R4 :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_TYPE_R4 .. autodata:: geosoft.gxapi.META_CORE_TYPE_R8 :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_TYPE_R8 .. autodata:: geosoft.gxapi.META_CORE_TYPE_String :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_TYPE_String .. autodata:: geosoft.gxapi.META_CORE_TYPE_Object :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_TYPE_Object .. autodata:: geosoft.gxapi.META_CORE_TYPE_Enum :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_TYPE_Enum .. autodata:: geosoft.gxapi.META_CORE_TYPE_ClassType :annotation: .. autoattribute:: geosoft.gxapi.META_CORE_TYPE_ClassType <file_sep>/geosoft/gxapi/GXEUL3.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXEUL3(gxapi_cy.WrapEUL3): """ GXEUL3 class. This is a specialized class which performs 3D Euler deconvolution for potential field interpretation. """ def __init__(self, handle=0): super(GXEUL3, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXEUL3 <geosoft.gxapi.GXEUL3>` :returns: A null `GXEUL3 <geosoft.gxapi.GXEUL3>` :rtype: GXEUL3 """ return GXEUL3() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def destr(self): """ Destroys a `GXEUL3 <geosoft.gxapi.GXEUL3>` object. .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._destr() @classmethod def creat(cls, imgt, imgtx, imgty, imgtz, wnd_siz, gi, tolrnc, max_dis, obs_flg, obs_hght, obs_elev): """ Creates an `GXEUL3 <geosoft.gxapi.GXEUL3>` object. :param imgt: Image of grid T :param imgtx: Image of grid Tx :param imgty: Image of grid Ty :param imgtz: Image of grid Tz :param wnd_siz: Window size (maximum 20) :param gi: Geometric index, from 0.0 to 3.0 :param tolrnc: Max tolerance to allow (percentage) :param max_dis: Max dist. acceptable (0 for infinite) :param obs_flg: ObsFlg Height (0) or Elevation (1) :param obs_hght: Height of observation plane :param obs_elev: Elevation of observation plane :type imgt: GXIMG :type imgtx: GXIMG :type imgty: GXIMG :type imgtz: GXIMG :type wnd_siz: int :type gi: float :type tolrnc: float :type max_dis: float :type obs_flg: int :type obs_hght: float :type obs_elev: float :returns: `GXEUL3 <geosoft.gxapi.GXEUL3>` Object :rtype: GXEUL3 .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapEUL3._creat(GXContext._get_tls_geo(), imgt, imgtx, imgty, imgtz, wnd_siz, gi, tolrnc, max_dis, obs_flg, obs_hght, obs_elev) return GXEUL3(ret_val) def get_result(self, vv_r, pi_res_field): """ Get a result field `GXVV <geosoft.gxapi.GXVV>` from `GXEUL3 <geosoft.gxapi.GXEUL3>` object :param vv_r: `GXVV <geosoft.gxapi.GXVV>` to store the result :param pi_res_field: :ref:`EUL3_RESULT` :type vv_r: GXVV :type pi_res_field: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._get_result(vv_r, pi_res_field) def write(self, out_fil): """ Write the results of `GXEUL3 <geosoft.gxapi.GXEUL3>` object to output file. :param out_fil: Output File Name :type out_fil: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._write(out_fil.encode()) @classmethod def ex_euler_derive(cls, vv_dist, pr_dx, vv_mag, length, vv_gx, vv_gz, max_sol): """ Calculates gradients :param vv_dist: Input distance :param pr_dx: Sample Interval :param vv_mag: Input mag :param length: SampleCount :param vv_gx: Horizontal Gradient out :param vv_gz: Vertical Gradient out :param max_sol: Output array size limit :type vv_dist: GXVV :type pr_dx: float :type vv_mag: GXVV :type length: int :type vv_gx: GXVV :type vv_gz: GXVV :type max_sol: int :returns: 0 for OK, -1 for Error :rtype: int .. versionadded:: 9.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapEUL3._ex_euler_derive(GXContext._get_tls_geo(), vv_dist, pr_dx, vv_mag, length, vv_gx, vv_gz, max_sol) return ret_val @classmethod def ex_euler_calc(cls, typ, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20): """ Does the exeuler depth calculations :param typ: Solution type flag (0 for contacts, 1 for dykes) :param p2: Structural index value (used only when generating dykes) :param p3: Window length :param p4: Field strength in nT :param p5: Inclination :param p6: Declination :param p7: Profile azimuth wrt north :param p8: Minimum depth for returned solutions :param p9: Maximum depth for returned solutions :param p10: Percentage error allowed before rejection :param p11: Number of points in profile :param p12: Array of point distances along profile :param p13: Array of observed values :param p14: Array of horizontal derivative values. Can be NULL for calculated :param p15: Array of vertical derivative values. Can be NULL for calculated :param p16: Length of solutions arrays passed in :param p17: The profile distance for each solution :param p18: The depth for each solution :param p19: The dip for each solution :param p20: The susceptibility for each solution :type typ: int :type p2: float :type p3: int :type p4: float :type p5: float :type p6: float :type p7: float :type p8: float :type p9: float :type p10: float :type p11: int :type p12: GXVV :type p13: GXVV :type p14: GXVV :type p15: GXVV :type p16: int :type p17: GXVV :type p18: GXVV :type p19: GXVV :type p20: GXVV :returns: >0 for OK, -1 for Error :rtype: int .. versionadded:: 9.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapEUL3._ex_euler_calc(GXContext._get_tls_geo(), typ, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13, p14, p15, p16, p17, p18, p19, p20) return ret_val ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/gxapi_perf_test.py import time import geosoft.gxapi as gxa import geosoft.gxapi_old.gxapi as gxo import geosoft.gxapi.gxapi_cy as gxc def simple_set_get_doubles_baremetal(dn): print("simple_set_get_doubles_baremetal start") tstart = time.perf_counter() for i in range(0, 1000000): xv, yv, zv = dn._get_scale(0, 0, 0) dn._set_scale(2.0, 3.0, 4.0) elapsed = time.perf_counter() - tstart print("simple_set_get_doubles_baremetal end: {}".format(elapsed)) def simple_set_get_doubles(dn, x, y, z): print("simple_set_get_doubles start") tstart = time.perf_counter() for i in range(0, 1000000): dn.get_scale(x, y, z) dn.set_scale(2.0, 3.0, 4.0) elapsed = time.perf_counter() - tstart print("simple_set_get_doubles end: {}".format(elapsed)) def simple_set_get_string_baremetal(dn): print("simple_set_get_string_baremetal start") tstart = time.perf_counter() for i in range(0, 1000000): s = dn._get_axis_font("".encode()) dn._set_axis_font("NewFont".encode()) elapsed = time.perf_counter() - tstart print("simple_set_get_string_baremetal end: {}".format(elapsed)) def simple_set_get_string(dn, s): print("simple_set_get_string start") tstart = time.perf_counter() for i in range(0, 1000000): dn.get_axis_font(s) dn.set_axis_font("NewFont") elapsed = time.perf_counter() - tstart print("simple_set_get_string end: {}".format(elapsed)) ctx = gxa.GXContext.create("", "") ctxo = gxo.GXContext.create("", "") dn = gxa.GX3DN.create() dno = gxo.GX3DN.create() x = gxa.float_ref() y = gxa.float_ref() z = gxa.float_ref() s = gxa.str_ref() xo = gxo.float_ref() yo = gxo.float_ref() zo = gxo.float_ref() so = gxo.str_ref() print("OldAPI") simple_set_get_doubles(dno, xo, yo, zo) simple_set_get_string(dno, so) print("NewAPI") simple_set_get_doubles_baremetal(dn) simple_set_get_doubles(dn, x, y, z) simple_set_get_string_baremetal(dn) simple_set_get_string(dn, s) del dn del dno del ctxo del ctx<file_sep>/examples/extra_tests/show_leak_warnings.py # This sample stand-alone Python script shows a minimal use of the Pythonic gxpy module to # create a Geosoft context and say hello to the user. # This example can be run stand-alone or as a Oasis montaj extension. import geosoft.gxpy as gxpy # gxpy methods import numpy as np import gc # running as an extension from Oasis montaj will execute rungx() def rungx(): raise Exception("This is not an extension. Please use a python interpreter.") # running as stand-alone program if __name__ == "__main__": gxpy.utility.check_version('9.2') # Stand-alone programs must create a GX context before calling Geosoft methods. gxc_leaked_1 = gxpy.gx.GXpy(log=print) gxc_manual_close = gxpy.gx.GXpy() with gxpy.gx.GXpy() as gxc: with gxpy.grid.Grid.new(properties={'dtype': np.int16, 'nx': 100, 'ny': 50, 'x0': 4, 'y0': 8, 'dx': 0.1, 'dy': 0.2, 'rot': 5, 'coordinate_system': gxpy.coordinate_system.Coordinate_system('NAD27 / UTM zone 18N')}) as grd: print("Hello {}".format(grd.coordinate_system)) grd_leaked = gxpy.grid.Grid.new(properties={'dtype': np.int16, 'nx': 100, 'ny': 50, 'x0': 4, 'y0': 8, 'dx': 0.1, 'dy': 0.2, 'rot': 5, 'coordinate_system': gxpy.coordinate_system.Coordinate_system('NAD27 / UTM zone 18N')}) # The context has a member 'gid' which contains the user's Geosoft ID. # Say hello to the user print("Hello {}".format(gxc.gid)) #gxc_manual_close._close() del gxc_leaked_1 gc.collect() print("Done.") <file_sep>/geosoft/gxapi/GXAGG.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXST import GXST ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXAGG(gxapi_cy.WrapAGG): """ GXAGG class. The `GXAGG <geosoft.gxapi.GXAGG>` class is used to handle image display on maps. An aggregate contains one or more image layers (LAY) with each layer representing a grid or image file. The `GXAGG <geosoft.gxapi.GXAGG>` will combine all the layers to form one image """ def __init__(self, handle=0): super(GXAGG, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXAGG <geosoft.gxapi.GXAGG>` :returns: A null `GXAGG <geosoft.gxapi.GXAGG>` :rtype: GXAGG """ return GXAGG() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def set_model(self, model): """ Sets the Color Model :param model: :ref:`AGG_MODEL` :type model: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_model(model) def change_brightness(self, brt): """ Change the brightness. :param brt: -1.0 - black; 0.0 no change; 1.0 white :type brt: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 0.0 brightness does nothing. -1.0 to 0.0 makes colors darker, -1.0 is black 0.0 to 1.0 makes colors lighter, 1.0 is white """ self._change_brightness(brt) @classmethod def create(cls): """ Create an aggregate :returns: `GXAGG <geosoft.gxapi.GXAGG>` object :rtype: GXAGG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapAGG._create(GXContext._get_tls_geo()) return GXAGG(ret_val) @classmethod def create_map(cls, map, name): """ Create `GXAGG <geosoft.gxapi.GXAGG>` from Map with Group name. :param map: `GXMAP <geosoft.gxapi.GXMAP>` on which to place the view :param name: `GXAGG <geosoft.gxapi.GXAGG>` group name :type map: GXMAP :type name: str :returns: `GXAGG <geosoft.gxapi.GXAGG>` object :rtype: GXAGG .. versionadded:: 5.0.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The Agg Group name must include the View name with a backslash separating the view name and group name; e.g. "Data\\AGG_test" (when used as a string, the double slash represents as single \\). """ ret_val = gxapi_cy.WrapAGG._create_map(GXContext._get_tls_geo(), map, name.encode()) return GXAGG(ret_val) def get_layer_itr(self, layer, itr): """ Get the `GXITR <geosoft.gxapi.GXITR>` of a layer :param layer: Layer number :type layer: int :type itr: GXITR .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Layers are numbered from 0, consecutively in the order they are placed in the aggregate. An error will occur if the layer does not exist. Caller must create/destroy `GXITR <geosoft.gxapi.GXITR>`. """ self._get_layer_itr(layer, itr) def get_layer_st(self, layer): """ Get a `GXST <geosoft.gxapi.GXST>` filled with layer statistics :param layer: Layer number :type layer: int :returns: `GXST <geosoft.gxapi.GXST>` object :rtype: GXST .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Layers are numbered from 0, consecutively in the order they are placed in the aggregate. An error will occur if the layer does not exist. Caller must create/destroy `GXITR <geosoft.gxapi.GXITR>`. """ ret_val = self._get_layer_st(layer) return GXST(ret_val) def list_img(self, gvv): """ Lists file names of all the IMGs inside of the `GXAGG <geosoft.gxapi.GXAGG>`. :param gvv: `GXVV <geosoft.gxapi.GXVV>` of type -`STR_FILE <geosoft.gxapi.STR_FILE>` :type gvv: GXVV :returns: The number of IMGs. :rtype: int .. versionadded:: 5.0.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The returned `GXVV <geosoft.gxapi.GXVV>` contains the file names. """ ret_val = self._list_img(gvv) return ret_val def num_layers(self): """ Get the number of layers in an aggregate. :returns: The number of layers in an aggregate. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._num_layers() return ret_val def layer_img(self, name, zone, color, cont): """ Add an image as a layer in an aggregate. :param name: Grid name :param zone: :ref:`AGG_LAYER_ZONE` transform to use if color table has none defined. :param color: Color table name, "" for default This can be a .TBL .ZON .`GXITR <geosoft.gxapi.GXITR>` or .`GXAGG <geosoft.gxapi.GXAGG>` file .TBL is the default :param cont: Color contour interval or `rDUMMY <geosoft.gxapi.rDUMMY>` for default :type name: str :type zone: int :type color: str :type cont: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `layer_shade_img <geosoft.gxapi.GXAGG.layer_shade_img>` """ self._layer_img(name.encode(), zone, color.encode(), cont) def layer_img_ex(self, name, zone, color, min, max, cont): """ Add an image as a layer in an aggregate. :param name: Grid name :param zone: :ref:`AGG_LAYER_ZONE` transform to use if color table has none defined. :param color: Color table name, "" for default This can be a .TBL .ZON .`GXITR <geosoft.gxapi.GXITR>` or .`GXAGG <geosoft.gxapi.GXAGG>` file .TBL is the default :param min: Minimum value or `rDUMMY <geosoft.gxapi.rDUMMY>` for default :param max: Maximum value or `rDUMMY <geosoft.gxapi.rDUMMY>` for default :param cont: Color contour interval or `rDUMMY <geosoft.gxapi.rDUMMY>` for default :type name: str :type zone: int :type color: str :type min: float :type max: float :type cont: float .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `layer_shade_img <geosoft.gxapi.GXAGG.layer_shade_img>` """ self._layer_img_ex(name.encode(), zone, color.encode(), min, max, cont) def layer_shade_img(self, name, color, inc, dec, scl): """ Add a shaded image as a layer in an aggregate. :param name: Grid name :param color: Color table name, "" for default :param inc: Inclination :param dec: Declination :param scl: Scale (`rDUMMY <geosoft.gxapi.rDUMMY>` for default, returns value used) :type name: str :type color: str :type inc: float :type dec: float :type scl: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A new grid file will be created to hold the shaded image data. This file will have the same name as the original grid but with "_s" added to the root name. It will always be located in the workspace directory regardless of the location of the original source image. If the file already exists, it will replaced. """ scl.value = self._layer_shade_img(name.encode(), color.encode(), inc, dec, scl.value) def get_brightness(self): """ Get the brightness setting of the `GXAGG <geosoft.gxapi.GXAGG>` :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Brightness can range from -1.0 (black) to 1.0 (white). This brightness control is relative to the normal color when the `GXAGG <geosoft.gxapi.GXAGG>` is created. `GXAGG <geosoft.gxapi.GXAGG>` brightness depends on the brightness of the `GXITR <geosoft.gxapi.GXITR>` of each layer. Calling dGetBright_AGG will poll all layers, and if all have the same brightness, this is returned. If any of the layers have a different brightness, the current brightness of each layer is changed to be the reference brightness (0.0)and the brightness value of 0.0 is returned. .. seealso:: `change_brightness <geosoft.gxapi.GXAGG.change_brightness>`, `get_brightness <geosoft.gxapi.GXAGG.get_brightness>`, `change_brightness <geosoft.gxapi.GXAGG.change_brightness>` """ ret_val = self._get_brightness() return ret_val def set_layer_itr(self, layer, itr): """ Set the `GXITR <geosoft.gxapi.GXITR>` of a layer :param layer: Layer number :type layer: int :type itr: GXITR .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Layers are numbered from 0, consecutively in the order they are placed in the aggregate. An error will occur if the layer does not exist. Caller must create/destroy `GXITR <geosoft.gxapi.GXITR>`. """ self._set_layer_itr(layer, itr) def set_render_method(self, method): """ Sets the Rendering Method :param method: :ref:`AGG_RENDER` :type method: int .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_render_method(method) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXCSYMB.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXCSYMB(gxapi_cy.WrapCSYMB): """ GXCSYMB class. This class is used for generating and modifying colored symbol objects. Symbol fills are assigned colors based on their Z values and a zone, Aggregate or `GXITR <geosoft.gxapi.GXITR>` file which defines what colors are associated with different ranges of Z values. The position of a symbol is defined by its X,Y coordinates. """ def __init__(self, handle=0): super(GXCSYMB, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXCSYMB <geosoft.gxapi.GXCSYMB>` :returns: A null `GXCSYMB <geosoft.gxapi.GXCSYMB>` :rtype: GXCSYMB """ return GXCSYMB() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def set_angle(self, angle): """ Set the symbol angle. :param angle: Symbol angle :type angle: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_angle(angle) def set_base(self, base): """ Set base value to subtract from Z values. :param base: Symbol Base :type base: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_base(base) def set_dynamic_col(self, att): """ Associate symbol edge or fill colors with Z data and color transform. :param att: :ref:`CSYMB_COLOR` :type att: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Use this method after a call to `set_static_col <geosoft.gxapi.GXCSYMB.set_static_col>`. This method reestablishes the symbol color association with their Z data values and color transform. """ self._set_dynamic_col(att) def set_fixed(self, fixed): """ Set symbol sizing to fixed (or proportionate) :param fixed: TRUE = Fixed symbol sizing FALSE = Proportionate sizing :type fixed: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_fixed(fixed) def set_number(self, number): """ Set the symbol number. :param number: Symbol number (0x1-0x1ffff) :type number: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The lower 16 bits of the number is interpreted as UTF-16 with a valid Unicode character code point. GFN fonts wil produce valid symbols depending on the font for 0x01-0x7f and the degree, plus-minus and diameter symbol (latin small letter o with stroke) for 0xB0, 0xB1 and 0xF8 respectively. It is possible to check if a character is valid using `GXUNC.is_valid_utf16_char <geosoft.gxapi.GXUNC.is_valid_utf16_char>`. The high 16-bits are reserved for future use. Also see: `GXUNC.valid_symbol <geosoft.gxapi.GXUNC.valid_symbol>` and `GXUNC.validate_symbols <geosoft.gxapi.GXUNC.validate_symbols>` """ self._set_number(number) def set_scale(self, scale): """ Set the symbol scale. :param scale: Symbol scale (> 0.0) :type scale: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_scale(scale) def add_data(self, vv_x, vv_y, vv_z): """ Add x,y,z data to a color symbol object. :param vv_x: `GXVV <geosoft.gxapi.GXVV>` for X data :param vv_y: `GXVV <geosoft.gxapi.GXVV>` for Y data :param vv_z: `GXVV <geosoft.gxapi.GXVV>` for Z data :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._add_data(vv_x, vv_y, vv_z) @classmethod def create(cls, itr): """ Create a `GXCSYMB <geosoft.gxapi.GXCSYMB>`. :param itr: ZON, `GXAGG <geosoft.gxapi.GXAGG>`, or `GXITR <geosoft.gxapi.GXITR>` file name :type itr: str :returns: `GXCSYMB <geosoft.gxapi.GXCSYMB>` handle :rtype: GXCSYMB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapCSYMB._create(GXContext._get_tls_geo(), itr.encode()) return GXCSYMB(ret_val) def get_itr(self, itr): """ Get the `GXITR <geosoft.gxapi.GXITR>` of the `GXCSYMB <geosoft.gxapi.GXCSYMB>` :param itr: `GXITR <geosoft.gxapi.GXITR>` object :type itr: GXITR .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_itr(itr) def set_font(self, font, geo_font, weight, italic): """ Set the symbol font name. :param font: Font name :param geo_font: Geosoft font? (TRUE or FALSE) :param weight: :ref:`MVIEW_FONT_WEIGHT` :param italic: Italics? (TRUE or FALSE) :type font: str :type geo_font: int :type weight: int :type italic: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_font(font.encode(), geo_font, weight, italic) def set_static_col(self, col, att): """ Set a static color for the symbol edge or fill. :param col: Color value :param att: :ref:`CSYMB_COLOR` :type col: int :type att: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Use this method to set a STATIC color for symbol edge or fill. By default, both edge and fill colors vary according to their Z data values and a color transform. """ self._set_static_col(col, att) def get_stat(self, st): """ Get the `GXST <geosoft.gxapi.GXST>` of the `GXCSYMB <geosoft.gxapi.GXCSYMB>` :param st: `GXST <geosoft.gxapi.GXST>` object :type st: GXST .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_stat(st) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXLST.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXLST(gxapi_cy.WrapLST): """ GXLST class. The `GXLST <geosoft.gxapi.GXLST>` class is used to create and retrieve lists, and to perform specific actions on lists, including retrieving list items, sorting lists and adding or removing list items. """ def __init__(self, handle=0): super(GXLST, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXLST <geosoft.gxapi.GXLST>` :returns: A null `GXLST <geosoft.gxapi.GXLST>` :rtype: GXLST """ return GXLST() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def add_item(self, name, val): """ Adds an item to the end of the list. :param name: Name of the Item :param val: Value of the Item :type name: str :type val: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._add_item(name.encode(), val.encode()) def add_symb_item(self, name, symb): """ Adds a channel/line/blob name and symbol to a list. :param name: Name of the channel, line or blob symbol :param symb: Symbol handle :type name: str :type symb: int .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A number of `GXDB <geosoft.gxapi.GXDB>` functions return LSTs with the channel or line name in the "Name" part of a `GXLST <geosoft.gxapi.GXLST>`, and the handle (DB_SYMB) in the value part. This function lets you quickly add a new item without the need of coverting the handle into a string value. """ self._add_symb_item(name.encode(), symb) def add_unique_item(self, name, val): """ Adds a unique item to the end of the list. :param name: Name of the Item :param val: Value of the Item :type name: str :type val: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Existing items that match the name are first removed. """ self._add_unique_item(name.encode(), val.encode()) def append(self, lst2): """ Add the items in one list to another list. :param lst2: List to append to the above `GXLST <geosoft.gxapi.GXLST>`. :type lst2: GXLST .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Item names and values are added using "`add_unique_item <geosoft.gxapi.GXLST.add_unique_item>`", so that existing items with the same name are replaced, and if items are duplicated in the appended `GXLST <geosoft.gxapi.GXLST>`, the last one will be the one to remain after the process is complete. """ self._append(lst2) @classmethod def assay_channel(cls): """ Create a `GXLST <geosoft.gxapi.GXLST>` of assay channel mask strings from file. :returns: `GXLST <geosoft.gxapi.GXLST>` Object :rtype: GXLST .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Searches the local directory, then user\\etc, then \\etc to see if the file "assaylist.csv" exists. The file contains strings of those channel names which are to be interpreted as assay channels for geochemical processes. Items can be on the same line, separated by commas, or on separate lines (and combinations of both). If this function is used in combination with the lFindItemMask_LST function, then you can use mask-strings such as "``*ppm``" The following is a sample file: ``*ppm, *(ppm), *PPM, *(PPM), FeCl, MnO2`` ``"Fe %"`` ``FeO`` If the file is not found, or if no items are parsed, the list is returned with zero size. See the "assaylist.csv" file in the oasismontaj\\etc directory for more details. .. seealso:: `find_item_mask <geosoft.gxapi.GXLST.find_item_mask>` """ ret_val = gxapi_cy.WrapLST._assay_channel(GXContext._get_tls_geo()) return GXLST(ret_val) def clear(self): """ Clear a list object. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._clear() def convert_from_csv_string(self, buff): """ Load a `GXLST <geosoft.gxapi.GXLST>` with items from a string. :param buff: Comma separated items :type buff: str .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Items in the input buffer must be separated with commas. Both the Name and Value in the list are set to the item. """ self._convert_from_csv_string(buff.encode()) def copy(self, source): """ Copy one `GXLST <geosoft.gxapi.GXLST>` object to another. :param source: Source List to Copy from :type source: GXLST .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._copy(source) @classmethod def create(cls, width): """ creates a user controllable list. The list is empty when created. :param width: Width of the list to make. This number should be large enough for both the item name and the item value. Must be > 2 and <= 4096. :type width: int :returns: Handle to the List Object. :rtype: GXLST .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapLST._create(GXContext._get_tls_geo(), width) return GXLST(ret_val) @classmethod def create_s(cls, bf): """ Create `GXLST <geosoft.gxapi.GXLST>` from serialized source. :type bf: GXBF :returns: `GXLST <geosoft.gxapi.GXLST>` object :rtype: GXLST .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapLST._create_s(GXContext._get_tls_geo(), bf) return GXLST(ret_val) def del_item(self, item): """ Removes an item from the list. All items below it are shifted up one. :param item: Item Number to Delete :type item: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._del_item(item) def find_items(self, type, lst2, vv): """ Searches a `GXLST <geosoft.gxapi.GXLST>` for items in a second `GXLST <geosoft.gxapi.GXLST>`, returns indices of those found. :param type: :ref:`LST_ITEM` data to do the search on :param lst2: Items to search for :param vv: `GS_LONG <geosoft.gxapi.GS_LONG>` `GXVV <geosoft.gxapi.GXVV>` of returned indices into the first `GXLST <geosoft.gxapi.GXLST>`. :type type: int :type lst2: GXLST :type vv: GXVV .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is a much more efficient way of determining if items in one `GXLST <geosoft.gxapi.GXLST>` are found in a second, than by calling `find_item <geosoft.gxapi.GXLST.find_item>` repeatedly in a loop. The returned INT `GXVV <geosoft.gxapi.GXVV>` contains the same number of items as the "search items" `GXLST <geosoft.gxapi.GXLST>`, and contains -1 for items where the value is not found, and the index of items that are found. Comparisons are case-tolerant. """ self._find_items(type, lst2, vv) def gt_item(self, type, item, buff): """ This places the specified item into the buffer provided. :param type: :ref:`LST_ITEM` data to retrieve :param item: Item Number to Get :param buff: Buffer to Place Item Into :type type: int :type item: int :type buff: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If item number is not in the list, the buffer will be "". """ buff.value = self._gt_item(type, item, buff.value.encode()) def gt_symb_item(self, item, name, symb): """ Returns a channel/line/blob name and symbol from a list. :param item: Item number to get :param name: Buffer to Place Symbol name into :param symb: Symbol handle :type item: int :type name: str_ref :type symb: int_ref .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A number of `GXDB <geosoft.gxapi.GXDB>` functions return LSTs with the channel or line name in the "Name" part of a `GXLST <geosoft.gxapi.GXLST>`, and the handle (DB_SYMB) in the value part. This function lets you quickly retrieve both the name and symbol handle for a given item, which needing to convert between types. """ name.value, symb.value = self._gt_symb_item(item, name.value.encode(), symb.value) def convert_to_csv_string(self, buff): """ Load a string with names from a `GXLST <geosoft.gxapi.GXLST>`. :param buff: Buffer to add items to :type buff: str_ref .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The list name values are put into a string, items separated by commas. """ buff.value = self._convert_to_csv_string(buff.value.encode()) def find_item(self, type, name): """ Searches the list for a specified item. :param type: :ref:`LST_ITEM` data to do the search on :param name: String to Search For :type type: int :type name: str :returns: x - Item Number -1 - Not Found :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Comparisons are case-tolerant. """ ret_val = self._find_item(type, name.encode()) return ret_val def find_item_mask(self, type, name): """ Searches the list for a specified item, list contains masks. :param type: :ref:`LST_ITEM` data to search :param name: String to try `GXLST <geosoft.gxapi.GXLST>` mask items on Search For :type type: int :type name: str :returns: x - Item Number -1 - Not Found :rtype: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Comparsions are case-intolerant (unlike `find_item <geosoft.gxapi.GXLST.find_item>`). This means items in the list such as "``*(ppm)``" will be found if the input search string is "Ni (ppm)" or "Ni(ppm)", but not if it is "Ni (PPM)", so you should include both "``*ppm*``" and "``*PPM*``". It is NOT the input string that should be the mask, but the `GXLST <geosoft.gxapi.GXLST>` items themselves This function was designed originally for geochemical processes in order to identify if a given channel name indicates that the channel should be given the "Assay" class. .. seealso:: `assay_channel <geosoft.gxapi.GXLST.assay_channel>` """ ret_val = self._find_item_mask(type, name.encode()) return ret_val def get_int(self, type, item): """ Get an integer item. :param type: :ref:`LST_ITEM` data to retrieve :param item: Item Number to Get :type type: int :type item: int :returns: Integer, `iDUMMY <geosoft.gxapi.iDUMMY>` if conversion fails or string is empty. :rtype: int .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_int(type, item) return ret_val def insert_item(self, item, name, val): """ Adds an item at a given location in the list. :param item: Item index :param name: Name of the Item :param val: Value of the Item :type item: int :type name: str :type val: str .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Index must be 0 >= index >= list size. Items above the list index are shifted up one index value. """ self._insert_item(item, name.encode(), val.encode()) def size(self): """ Get the number of items in the list. :returns: x - Number of items in the list. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._size() return ret_val def load_csv(self, csv, name_field, value_field): """ Load a list with data from a CSV file :param csv: The CSV file :param name_field: Column label for the item name :param value_field: Column label for the item value :type csv: str :type name_field: str :type value_field: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Both the Item and Value fields must be specified. The CSV file must be comma delimited, and have a header line with the field names. Leading and trailing spaces are removed in the names and values. """ self._load_csv(csv.encode(), name_field.encode(), value_field.encode()) def load_file(self, file): """ Set up a list from a list file. :param file: Name of the file :type file: str .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A list file is an ASCII file that contains list entries. Each line for the file contains a list item name and an optional list item value. The name and value must be delimited by a space, tab or comma. If the item name or value contains spaces, tabs or commas, it must be contined in quotes. blank lines and lines that begin with a '/' character are ignored. The default extension is .lst. If the file cannot be found in the local directory, the GEOSOFT\\etc directory is searched. If it cannot be found, the list will be empty. Not finding a file is not an error. """ self._load_file(file.encode()) def resource(self, res): """ Load a GX List Resource into this list object. The entries are placed at the end of the list and are not sorted. :param res: Name of the Resource :type res: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._resource(res.encode()) def get_double(self, type, item): """ Get a real item. :param type: :ref:`LST_ITEM` data to retrieve :param item: Item Number to Get :type type: int :type item: int :returns: Real, `rDUMMY <geosoft.gxapi.rDUMMY>` if conversion fails or string is empty. :rtype: float .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_double(type, item) return ret_val def save_file(self, file): """ Save a list to a file. :param file: Name of the file :type file: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A list file is an ASCII file that contains list entries. Each line for the file contains a list item name and an optional list item value. The name and value must be delimited by a space, tab or comma. If the item name or value contains spaces, tabs or commas, it must be contined in quotes. blank lines and lines that begin with a '/' character are ignored. The default extension is .lst. If the file has a full path it will be created as specified. Otherwise we look for the file in the local then the GEOSOFT\\etc directory. If the file does not exist it will be created in the GEOSOFT\\etc directory. """ self._save_file(file.encode()) def select_csv_string_items(self, buff, ls_to): """ Load a `GXLST <geosoft.gxapi.GXLST>` with items from a second `GXLST <geosoft.gxapi.GXLST>` found in a CSV string. :param buff: Comma separated item names :param ls_to: `GXLST <geosoft.gxapi.GXLST>` to add selected items to :type buff: str :type ls_to: GXLST .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Items in the input string must be separated with commas. Parsing uses the sCommaTokens_GS function. Both the name and value of the input `GXLST <geosoft.gxapi.GXLST>` items whose name matches an item in the input string are copied to the output `GXLST <geosoft.gxapi.GXLST>`. Items are copied in the same order they appear in the input string. Items in the string not found in the input `GXLST <geosoft.gxapi.GXLST>` are ignored, and no error is registered. Item matches are case-tolerant. """ self._select_csv_string_items(buff.encode(), ls_to) def serial(self, bf): """ Serialize `GXLST <geosoft.gxapi.GXLST>` to a `GXBF <geosoft.gxapi.GXBF>`. :type bf: GXBF .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._serial(bf) def set_item(self, type, item, buff): """ Place an item at a specified point in the `GXLST <geosoft.gxapi.GXLST>`. :param type: :ref:`LST_ITEM` data to insert :param item: Item Number to Set :param buff: Item to Set :type type: int :type item: int :type buff: str .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The existing item at the given index will be replaced. """ self._set_item(type, item, buff.encode()) def sort(self, type, ord): """ Sorts a list. :param type: :ref:`LST_ITEM` data to sort on :param ord: 0 - Ascending, 1 - Decending :type type: int :type ord: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._sort(type, ord) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXAGG.rst .. _GXAGG: GXAGG class ================================== .. autoclass:: geosoft.gxapi.GXAGG :members: .. _AGG_LAYER_ZONE: AGG_LAYER_ZONE constants ----------------------------------------------------------------------- Aggregate Layer Zone defines .. autodata:: geosoft.gxapi.AGG_LAYER_ZONE_DEFAULT :annotation: .. autoattribute:: geosoft.gxapi.AGG_LAYER_ZONE_DEFAULT .. autodata:: geosoft.gxapi.AGG_LAYER_ZONE_LINEAR :annotation: .. autoattribute:: geosoft.gxapi.AGG_LAYER_ZONE_LINEAR .. autodata:: geosoft.gxapi.AGG_LAYER_ZONE_NORMAL :annotation: .. autoattribute:: geosoft.gxapi.AGG_LAYER_ZONE_NORMAL .. autodata:: geosoft.gxapi.AGG_LAYER_ZONE_EQUALAREA :annotation: .. autoattribute:: geosoft.gxapi.AGG_LAYER_ZONE_EQUALAREA .. autodata:: geosoft.gxapi.AGG_LAYER_ZONE_SHADE :annotation: .. autoattribute:: geosoft.gxapi.AGG_LAYER_ZONE_SHADE .. autodata:: geosoft.gxapi.AGG_LAYER_ZONE_LOGLINEAR :annotation: .. autoattribute:: geosoft.gxapi.AGG_LAYER_ZONE_LOGLINEAR .. autodata:: geosoft.gxapi.AGG_LAYER_ZONE_LAST :annotation: .. autoattribute:: geosoft.gxapi.AGG_LAYER_ZONE_LAST .. _AGG_MODEL: AGG_MODEL constants ----------------------------------------------------------------------- Aggregation color model defines .. autodata:: geosoft.gxapi.AGG_MODEL_HSV :annotation: .. autoattribute:: geosoft.gxapi.AGG_MODEL_HSV .. autodata:: geosoft.gxapi.AGG_MODEL_RGB :annotation: .. autoattribute:: geosoft.gxapi.AGG_MODEL_RGB .. autodata:: geosoft.gxapi.AGG_MODEL_CMY :annotation: .. autoattribute:: geosoft.gxapi.AGG_MODEL_CMY .. _AGG_RENDER: AGG_RENDER constants ----------------------------------------------------------------------- Aggregation rendering modes .. autodata:: geosoft.gxapi.AGG_RENDER_ADD :annotation: .. autoattribute:: geosoft.gxapi.AGG_RENDER_ADD .. autodata:: geosoft.gxapi.AGG_RENDER_BLEND :annotation: .. autoattribute:: geosoft.gxapi.AGG_RENDER_BLEND .. autodata:: geosoft.gxapi.AGG_RENDER_BLEND_ALL :annotation: .. autoattribute:: geosoft.gxapi.AGG_RENDER_BLEND_ALL .. autodata:: geosoft.gxapi.AGG_RENDER_FADE :annotation: .. autoattribute:: geosoft.gxapi.AGG_RENDER_FADE <file_sep>/docs/GXFLT.rst .. _GXFLT: GXFLT class ================================== .. autoclass:: geosoft.gxapi.GXFLT :members: <file_sep>/docs/GXDATALINKD.rst .. _GXDATALINKD: GXDATALINKD class ================================== .. autoclass:: geosoft.gxapi.GXDATALINKD :members: <file_sep>/examples/tutorial/Grids and Images/grid_statistics_iterator.py import geosoft.gxapi as gxapi import geosoft.gxpy.gx as gx import geosoft.gxpy.grid as gxgrid import geosoft.gxpy.utility as gxu # this example requires version 9.2.1, which adds iteration support gxu.check_version('9.2.1') # create context gxc = gx.GXpy() # create a gxapi.GXST instance to accumulate statistics stats = gxapi.GXST.create() # add each data to stats point-by-point (slow, better to use numpy or vector approach) number_of_dummies = 0 with gxgrid.Grid.open('elevation_surfer.grd(SRF;VER=V7)') as grid: for x, y, z, v in grid: if v is None: number_of_dummies += 1 else: stats.data(v) total_points = grid.nx * grid.ny # print statistical properties print('minimum: ', stats.get_info(gxapi.ST_MIN)) print('maximum: ', stats.get_info(gxapi.ST_MAX)) print('mean: ', stats.get_info(gxapi.ST_MEAN)) print('standard deviation:', stats.get_info(gxapi.ST_STDDEV)) print('number of dummies: ', number_of_dummies) print('number of valid data points: ', total_points - number_of_dummies)<file_sep>/geosoft/gxapi/GXMAPL.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXMAPL(gxapi_cy.WrapMAPL): """ GXMAPL class. The `GXMAPL <geosoft.gxapi.GXMAPL>` class is the interface with the MAPPLOT program, which reads a MAPPLOT control file and plots graphical entities to a map. The `GXMAPL <geosoft.gxapi.GXMAPL>` object is created for a given control file, then passed to the MAPPLOT program, along with the target `GXMAP <geosoft.gxapi.GXMAP>` object on which to do the drawing """ def __init__(self, handle=0): super(GXMAPL, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXMAPL <geosoft.gxapi.GXMAPL>` :returns: A null `GXMAPL <geosoft.gxapi.GXMAPL>` :rtype: GXMAPL """ return GXMAPL() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, name, ref_name, line): """ Create a `GXMAPL <geosoft.gxapi.GXMAPL>`. :param name: `GXMAPL <geosoft.gxapi.GXMAPL>` file name :param ref_name: Map base reference name :param line: Start line number in file (0 is first) :type name: str :type ref_name: str :type line: int :returns: `GXMAPL <geosoft.gxapi.GXMAPL>`, aborts if creation fails :rtype: GXMAPL .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The default map groups will use the reference name with "_Data" and "_Base" added. If no reference name is specified, the name "`GXMAPL <geosoft.gxapi.GXMAPL>`" is used """ ret_val = gxapi_cy.WrapMAPL._create(GXContext._get_tls_geo(), name.encode(), ref_name.encode(), line) return GXMAPL(ret_val) @classmethod def create_reg(cls, name, ref_name, line, reg): """ Create a `GXMAPL <geosoft.gxapi.GXMAPL>` with `GXREG <geosoft.gxapi.GXREG>`. :param name: `GXMAPL <geosoft.gxapi.GXMAPL>` file name :param ref_name: Map base reference name :param line: Start line number in file (0 is first) :type name: str :type ref_name: str :type line: int :type reg: GXREG :returns: `GXMAPL <geosoft.gxapi.GXMAPL>`, aborts if creation fails :rtype: GXMAPL .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The default map groups will use the reference name with "_Data" and "_Base" added. If no reference name is specified, the name "`GXMAPL <geosoft.gxapi.GXMAPL>`" is used """ ret_val = gxapi_cy.WrapMAPL._create_reg(GXContext._get_tls_geo(), name.encode(), ref_name.encode(), line, reg) return GXMAPL(ret_val) def process(self, map): """ Process a `GXMAPL <geosoft.gxapi.GXMAPL>` :type map: GXMAP .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._process(map) def replace_string(self, var, repl): """ Adds a replacement string to a mapplot control file. :param var: Variable :param repl: Replacement :type var: str :type repl: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._replace_string(var.encode(), repl.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXLMSG.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXLMSG(gxapi_cy.WrapLMSG): """ GXLMSG class. Message class methods. """ def __init__(self, handle=0): super(GXLMSG, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXLMSG <geosoft.gxapi.GXLMSG>` :returns: A null `GXLMSG <geosoft.gxapi.GXLMSG>` :rtype: GXLMSG """ return GXLMSG() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def goto_line(cls, line): """ Sends a 'go to line' message :param line: Line name :type line: str .. versionadded:: 2022.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapLMSG._goto_line(GXContext._get_tls_geo(), line.encode()) @classmethod def goto_point(cls, x, y, z, ipj): """ Sends a move cursor message :param x: X location :param y: Y location :param z: Z location :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` (if (`GXIPJ <geosoft.gxapi.GXIPJ>`)0, default coordinate system) :type x: float :type y: float :type z: float :type ipj: GXIPJ .. versionadded:: 5.0.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapLMSG._goto_point(GXContext._get_tls_geo(), x, y, z, ipj) @classmethod def view_area(cls, x0, y0, x1, y1, ipj): """ Sends a view area message :param x0: X0 location :param y0: Y0 location :param x1: X1 location :param y1: Y1 location :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` (if (`GXIPJ <geosoft.gxapi.GXIPJ>`)0, default coordinate system) :type x0: float :type y0: float :type x1: float :type y1: float :type ipj: GXIPJ .. versionadded:: 5.0.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapLMSG._view_area(GXContext._get_tls_geo(), x0, y0, x1, y1, ipj) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXPGEXP.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXPGEXP(gxapi_cy.WrapPGEXP): """ GXPGEXP class. The `GXPGEXP <geosoft.gxapi.GXPGEXP>` class is similar to the `GXEXP <geosoft.gxapi.GXEXP>` class, but is used to apply math expressions to pagers (`GXPG <geosoft.gxapi.GXPG>` objects). It works only on PGs of the same dimensions. """ def __init__(self, handle=0): super(GXPGEXP, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXPGEXP <geosoft.gxapi.GXPGEXP>` :returns: A null `GXPGEXP <geosoft.gxapi.GXPGEXP>` :rtype: GXPGEXP """ return GXPGEXP() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def add_pager(self, pg, var): """ This method adds an pager to the `GXPGEXP <geosoft.gxapi.GXPGEXP>` object with a variable name. :param pg: Pager to add :param var: Variable name :type pg: GXPG :type var: str .. versionadded:: 7.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._add_pager(pg, var.encode()) @classmethod def create(cls): """ This method creates an `GXPGEXP <geosoft.gxapi.GXPGEXP>` object. :returns: `GXPGEXP <geosoft.gxapi.GXPGEXP>` Object :rtype: GXPGEXP .. versionadded:: 7.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapPGEXP._create(GXContext._get_tls_geo()) return GXPGEXP(ret_val) def do_formula(self, formula, unused): """ This method runs a formula on the pagers. :param formula: Formula :param unused: Legacy parameter, no longer used. :type formula: str :type unused: int .. versionadded:: 7.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._do_formula(formula.encode(), unused) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/tests/test_vox_display.py import unittest import os import geosoft.gxpy.system as gsys import geosoft.gxpy.vox as gxvox import geosoft.gxpy.vox_display as gxvoxd import geosoft.gxpy.map as gxmap import geosoft.gxpy.view as gxview import geosoft.gxpy.group as gxgroup import geosoft.gxpy.surface as gxsurf import geosoft.gxpy.viewer as gxviewer from base import GXPYTest class Test(GXPYTest): @classmethod def setUpClass(cls): cls.setUpGXPYTest() cls.folder, files = gsys.unzip(os.path.join(os.path.dirname(cls._test_case_py), 'testvoxset.zip'), folder=cls._gx.temp_folder()) cls.vox_file = os.path.join(cls.folder, 'test.geosoft_voxel') cls.vectorvox_file = os.path.join(cls.folder, 'mvi.geosoft_vectorvoxel') def test_voxd(self): self.start() with gxvox.Vox.open(self.vox_file) as vox: vox.unit_of_measure = 'maki' with gxvoxd.VoxDisplay.solid(vox) as voxd: self.assertFalse(voxd.is_thematic) self.assertTrue(voxd.opacity, 1.0) voxd.opacity = 0.2 self.assertTrue(voxd.opacity, 0.2) self.assertEqual(voxd.color_map.unit_of_measure, 'maki') self.assertEqual(voxd.shell_limits, (None, None)) voxd.shell_limits = (None, 0.00002) self.assertEqual(voxd.shell_limits, (None, 0.00002)) voxd.shell_limits = (0.00002, None) self.assertEqual(voxd.shell_limits, (0.00002, None)) voxd.shell_limits = (0.00002, 0.00003) self.assertEqual(voxd.shell_limits, (0.00002, 0.00003)) def test_figure_map(self): self.start() with gxvox.Vox.open(self.vox_file) as vox: vox.unit_of_measure = 'SI Susc' with gxvoxd.VoxDisplay.solid(vox) as voxd: voxd.shell_limits = (0.0001, None) fig_map = voxd.figure_map(title="My Test Vox").file_name self.crc_map(fig_map) # gxviewer.view_document(fig_map) def test_figure_map_smooth(self): self.start() with gxvox.Vox.open(self.vox_file) as vox: vox.unit_of_measure = 'SI Susc' with gxvoxd.VoxDisplay.solid(vox) as voxd: voxd.shell_limits = (0.0001, None) self.assertEqual(voxd.render_mode, gxvoxd.RENDER_FILL) voxd.render_mode = gxvoxd.RENDER_SMOOTH self.assertEqual(voxd.render_mode, gxvoxd.RENDER_SMOOTH) fig_map = voxd.figure_map(title="My Test Vox").file_name # gxviewer.view_document(fig_map) self.crc_map(fig_map) def test_figure_map_zone(self): self.start() with gxvox.Vox.open(self.vox_file) as vox: vox.unit_of_measure = 'SI Susc' with gxvoxd.VoxDisplay.solid(vox, color_map='grey.tbl', zone=gxvoxd.ZONE_LINEAR) as voxd: voxd.shell_limits = (0.0001, None) fig_map = voxd.figure_map(title="My Test Vox in grey").file_name self.crc_map(fig_map) # gxviewer.view_document(fig_map) def test_figure_map_vectorvoxel(self): self.start() with gxvox.Vox.open(self.vectorvox_file) as vox: vox.unit_of_measure = 'SI Susc' with gxvoxd.VoxDisplay.solid(vox) as voxd: voxd.shell_limits = (0.0001, None) fig_map = voxd.figure_map(title="My Test VectorVox").file_name # gxviewer.view_document(fig_map) self.crc_map(fig_map) def test_figure_map_vectorvoxel_vector(self): self.start() with gxvox.Vox.open(self.vectorvox_file) as vox: vox.unit_of_measure = 'SI Susc' with gxvoxd.VoxDisplay.vector(vox) as voxd: voxd.shell_limits = (0.01, None) voxd.vector_cone_specs = (3, None, None, 100) fig_map = voxd.figure_map(title="My Test VectorVox").file_name # gxviewer.view_document(fig_map) self.crc_map(fig_map) def test_open(self): self.start() with gxvox.Vox.open(self.vox_file) as vox: vox.unit_of_measure = 'maki' with gxvoxd.VoxDisplay.solid(vox) as voxd: with gxview.View_3d.new() as v3d: v3d_file = v3d.file_name group_name = gxgroup.VoxDisplayGroup.new(v3d, voxd).name with gxview.View_3d.open(v3d_file) as v3d: self.assertEqual(len(v3d.group_list_voxel), 1) self.assertTrue(group_name in v3d.group_list_voxel) with gxgroup.VoxDisplayGroup.open(v3d, group_name) as gvd: self.assertEqual(gvd.name, group_name) self.assertEqual(gvd.voxd.name, group_name) self.assertEqual(gvd.voxd.vox.name, group_name) self.assertEqual(gvd.unit_of_measure, 'maki') with gxvox.Vox.open(self.vectorvox_file) as vox: vox.unit_of_measure = 'vecmaki' with gxvoxd.VoxDisplay.solid(vox) as voxd: with gxview.View_3d.new() as v3d: v3d_file = v3d.file_name group_name = gxgroup.VoxDisplayGroup.new(v3d, voxd).name with gxview.View_3d.open(v3d_file) as v3d: self.assertEqual(len(v3d.group_list_voxel), 1) self.assertTrue(group_name in v3d.group_list_voxel) with gxgroup.VoxDisplayGroup.open(v3d, group_name) as gvd: self.assertEqual(gvd.name, group_name) self.assertEqual(gvd.voxd.name, group_name) self.assertEqual(gvd.voxd.vox.name, group_name) self.assertEqual(gvd.unit_of_measure, 'vecmaki') with gxvox.Vox.open(self.vectorvox_file) as vox: vox.unit_of_measure = 'vecmaki' with gxvoxd.VoxDisplay.vector(vox) as voxd: with gxview.View_3d.new() as v3d: v3d_file = v3d.file_name group_name = gxgroup.VoxDisplayGroup.new(v3d, voxd).name with gxview.View_3d.open(v3d_file) as v3d: self.assertEqual(len(v3d.group_list_vectorvoxel), 1) self.assertTrue(group_name in v3d.group_list_vectorvoxel) with gxgroup.VoxDisplayGroup.open(v3d, group_name) as gvd: self.assertEqual(gvd.name, group_name) self.assertEqual(gvd.voxd.name, group_name) self.assertEqual(gvd.unit_of_measure, 'vecmaki') # @unittest.skip('Fails in 9.5') def test_isosurface(self): self.start() with gxvox.Vox.open(self.vectorvox_file) as vox: fn = gxsurf.SurfaceDataset.vox_surface(vox, (0.005, 0.01, 0.02, 0.04, 0.08, 0.16, 0.32, 0.64, 1.28), temp=True).file_name with gxview.View_3d.new() as v3d: v3d_file = v3d.file_name gxgroup.surface_group_from_file(v3d, fn) self.crc_map(v3d_file) if __name__ == '__main__': unittest.main() <file_sep>/docs/GXTRND.rst .. _GXTRND: GXTRND class ================================== .. autoclass:: geosoft.gxapi.GXTRND :members: .. _TRND_NODE: TRND_NODE constants ----------------------------------------------------------------------- Node to find .. autodata:: geosoft.gxapi.TRND_MIN :annotation: .. autoattribute:: geosoft.gxapi.TRND_MIN .. autodata:: geosoft.gxapi.TRND_MAX :annotation: .. autoattribute:: geosoft.gxapi.TRND_MAX <file_sep>/geosoft/gxpy/tests/test_spatialdata.py import unittest import os import geosoft.gxpy.spatialdata as gxspd import geosoft.gxpy.coordinate_system as gxcs from base import GXPYTest class Test(GXPYTest): @classmethod def setUpClass(cls): cls.setUpGXPYTest() @classmethod def tearDownClass(cls): cls.tearDownGXPYTest() def test_properties(self): self.start() with gxspd.SpatialData() as spd: self.assertEqual(spd.file_name, None) self.assertEqual(spd.name, '_geometry_') self.assertTrue(isinstance(spd.metadata, dict)) self.assertEqual(spd.unit_of_measure, '') self.assertEqual(spd.extent, None) self.assertEqual(spd.extent_xy, (None, None, None, None)) self.assertEqual(spd.extent_minimum_xyz, (None, None, None)) self.assertEqual(spd.extent_maximum_xyz, (None, None, None)) self.assertEqual(spd.coordinate_system, None) with gxspd.SpatialData(file_name='maki.spd') as spd: self.assertEqual(spd.file_name, 'maki.spd') self.assertEqual(spd.name, 'maki') with gxspd.SpatialData(name='maki') as spd: self.assertEqual(spd.name, 'maki') self.assertEqual(spd.file_name, None) with gxspd.SpatialData('maki', file_name='bogus.spd') as spd: self.assertEqual(spd.file_name, 'bogus.spd') self.assertEqual(spd.name, 'maki') self.assertRaises(gxspd.SpatialException, gxspd.SpatialData, mode=gxspd.MODE_READ) self.assertRaises(gxspd.SpatialException, gxspd.SpatialData, mode=gxspd.MODE_READWRITE) gxspd.SpatialData().close() def test_setters(self): self.start() with gxspd.SpatialData() as spd: spd.unit_of_measure = 'ohms' self.assertEqual(spd.unit_of_measure, 'ohms') spd.coordinate_system = 'NAD83 / UTM zone 19N' self.assertEqual(str(spd.coordinate_system), 'NAD83 / UTM zone 19N') def test_modes(self): self.start() fn = 'maki.spd' gxspd.delete_files(fn) self.assertFalse(os.path.exists(fn)) self.assertFalse(os.path.exists(fn + '.xml')) gxspd.SpatialData(fn, mode=gxspd.MODE_NEW) self.assertRaises(gxspd.SpatialException, gxspd.SpatialData, 'maki.spd', mode=gxspd.MODE_READ) self.assertRaises(gxspd.SpatialException, gxspd.SpatialData, 'maki.spd', mode=gxspd.MODE_READWRITE) try: with open(fn, 'w') as f: f.write('maki') with gxspd.SpatialData(file_name=fn, mode=gxspd.MODE_READ) as spd: self.assertEqual(spd.name, 'maki') self.assertEqual(spd.file_name, fn) spd.metadata = {'name': 'some_name'} self.assertEqual(spd.metadata['name'], 'some_name') self.assertTrue(os.path.exists(fn)) self.assertFalse(os.path.exists(fn + '.xml')) finally: gxspd.delete_files(fn) try: with open(fn, 'w') as f: f.write('maki') with gxspd.SpatialData(file_name=fn, mode=gxspd.MODE_READWRITE) as spd: self.assertEqual(spd.name, 'maki') spd.metadata = {'name': 'some_name'} self.assertTrue(os.path.exists(fn)) self.assertTrue(os.path.exists(fn + '.xml')) with gxspd.SpatialData(file_name=fn, mode=gxspd.MODE_READ) as spd: self.assertEqual(spd.metadata['name'], 'some_name') finally: gxspd.delete_files(fn) self.assertFalse(os.path.exists(fn)) self.assertFalse(os.path.exists(fn + '.xml')) try: with open(fn, 'w') as f: f.write('maki') self.assertRaises(gxspd.SpatialException, gxspd.SpatialData, 'maki', 'maki.spd', mode=gxspd.MODE_NEW) with gxspd.SpatialData(file_name=fn, mode=gxspd.MODE_NEW, overwrite=True) as spd: self.assertEqual(spd.name, 'maki') spd.metadata = {'name': 'some_name'} self.assertTrue(os.path.exists(fn)) self.assertTrue(os.path.exists(fn + '.xml')) with gxspd.SpatialData(file_name=fn, mode=gxspd.MODE_READ) as spd: self.assertEqual(spd.metadata['name'], 'some_name') finally: gxspd.delete_files(fn) self.assertFalse(os.path.exists(fn)) self.assertFalse(os.path.exists(fn + '.xml')) ############################################################################################### if __name__ == '__main__': unittest.main() <file_sep>/geosoft/gxapi/GXGRID3D.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXGRID3D(gxapi_cy.WrapGRID3D): """ GXGRID3D class. High Performance 3D Grid. """ def __init__(self, handle=0): super(GXGRID3D, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXGRID3D <geosoft.gxapi.GXGRID3D>` :returns: A null `GXGRID3D <geosoft.gxapi.GXGRID3D>` :rtype: GXGRID3D """ return GXGRID3D() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def get_type(self): """ Get the type of this GRID3D :rtype: int .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_type() return ret_val def is_thematic(self): """ Does this grid3d contain thematic data :rtype: bool .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_thematic() return ret_val def is_double(self): """ Does this grid3d contain floating point data :rtype: bool .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_double() return ret_val def is_vector(self): """ Does this grid3d contain vector data :rtype: bool .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_vector() return ret_val def get_tpat(self, ipj): """ Get the TPAT from the thematic grid3d. :param ipj: `GXTPAT <geosoft.gxapi.GXTPAT>` object :type ipj: GXTPAT .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_tpat(ipj) def set_tpat(self, ipj): """ Set the TPAT of a thematic grid3d. :param ipj: `GXTPAT <geosoft.gxapi.GXTPAT>` object :type ipj: GXTPAT .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_tpat(ipj) def get_double_stats(self, num_valid, num_dummies, min, max, mean, stddev): """ Get Double statistics. :param num_valid: Number of valid values :param num_dummies: Number of invalid values :param min: Min value :param max: Maximum value :param mean: Mean value :param stddev: Standard Deviation :type num_valid: int_ref :type num_dummies: int_ref :type min: float_ref :type max: float_ref :type mean: float_ref :type stddev: float_ref .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ num_valid.value, num_dummies.value, min.value, max.value, mean.value, stddev.value = self._get_double_stats(num_valid.value, num_dummies.value, min.value, max.value, mean.value, stddev.value) def get_thematic_stats(self, num_valid, num_dummies, min, max, mean, stddev): """ Get Thematic Data statistics. :param num_valid: Number of valid values :param num_dummies: Number of invalid values :param min: Min value :param max: Maximum value :param mean: Mean value :param stddev: Standard Deviation :type num_valid: int_ref :type num_dummies: int_ref :type min: int_ref :type max: int_ref :type mean: int_ref :type stddev: int_ref .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ num_valid.value, num_dummies.value, min.value, max.value, mean.value, stddev.value = self._get_thematic_stats(num_valid.value, num_dummies.value, min.value, max.value, mean.value, stddev.value) def get_vector_stats(self, num_valid, num_dummies, min_x, min_y, min_z, max_x, max_y, max_z, mean_x, mean_y, mean_z, stddev_x, stddev_y, stddev_z): """ Get Vector Data statistics. :param num_valid: Number of valid values :param num_dummies: Number of invalid values :param min_x: Min X value :param min_y: Min Y value :param min_z: Min Z value :param max_x: Maximum X value :param max_y: Maximum Y value :param max_z: Maximum Z value :param mean_x: Mean X value :param mean_y: Mean Y value :param mean_z: Mean Z value :param stddev_x: Standard X Deviation :param stddev_y: Standard Y Deviation :param stddev_z: Standard Z Deviation :type num_valid: int_ref :type num_dummies: int_ref :type min_x: float_ref :type min_y: float_ref :type min_z: float_ref :type max_x: float_ref :type max_y: float_ref :type max_z: float_ref :type mean_x: float_ref :type mean_y: float_ref :type mean_z: float_ref :type stddev_x: float_ref :type stddev_y: float_ref :type stddev_z: float_ref .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ num_valid.value, num_dummies.value, min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value, mean_x.value, mean_y.value, mean_z.value, stddev_x.value, stddev_y.value, stddev_z.value = self._get_vector_stats(num_valid.value, num_dummies.value, min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value, mean_x.value, mean_y.value, mean_z.value, stddev_x.value, stddev_y.value, stddev_z.value) def fill_double(self, value): """ Fill the grid3d with a single double value. :param value: Fill Value :type value: float .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._fill_double(value) def fill_thematic(self, value): """ Fill the grid3d with a single thematic value. :param value: Fill Value :type value: int .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._fill_thematic(value) def fill_vector(self, value_x, value_y, value_z): """ Fill the grid3d with a single vector value. :param value_x: Fill Value X :param value_y: Fill Value Y :param value_z: Fill Value Z :type value_x: float :type value_y: float :type value_z: float .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._fill_vector(value_x, value_y, value_z) def get_elements_in_block_x(self): """ Get the number of cells in the block in the X direction :rtype: int .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_elements_in_block_x() return ret_val def get_elements_in_block_y(self): """ Get the number of cells in the block in the Y direction :rtype: int .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_elements_in_block_y() return ret_val def get_elements_in_block_z(self): """ Get the number of cells in the block in the Z direction :rtype: int .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_elements_in_block_z() return ret_val def read_x(self, x, y, z, VV): """ Read data from a GRID3D in the x direction (MOST EFFICIENT) :param x: X location :param y: Y location :param z: Z location :param VV: VV Containing Data :type x: int :type y: int :type z: int :type VV: GXVV :rtype: int .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._read_x(x, y, z, VV) return ret_val def write_x(self, x, y, z, VV): """ Write data to a GRID3D in the X direction (MOST EFFICIENT) :param x: X location :param y: Y location :param z: Z location :param VV: VV Containing Data :type x: int :type y: int :type z: int :type VV: GXVV :rtype: int .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._write_x(x, y, z, VV) return ret_val def read_y(self, x, y, z, VV): """ Read data from a GRID3D in the Y direction :param x: X location :param y: Y location :param z: Z location :param VV: VV Containing Data :type x: int :type y: int :type z: int :type VV: GXVV :rtype: int .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._read_y(x, y, z, VV) return ret_val def write_y(self, x, y, z, VV): """ Write data to a GRID3D in the Y direction :param x: X location :param y: Y location :param z: Z location :param VV: VV Containing Data :type x: int :type y: int :type z: int :type VV: GXVV :rtype: int .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._write_y(x, y, z, VV) return ret_val def read_z(self, x, y, z, VV): """ Read data from a GRID3D in the Z direction :param x: X location :param y: Y location :param z: Z location :param VV: VV Containing Data :type x: int :type y: int :type z: int :type VV: GXVV :rtype: int .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._read_z(x, y, z, VV) return ret_val def write_z(self, x, y, z, VV): """ Write data to a GRID3D in the Z direction :param x: X location :param y: Y location :param z: Z location :param VV: VV Containing Data :type x: int :type y: int :type z: int :type VV: GXVV :rtype: int .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._write_z(x, y, z, VV) return ret_val ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXST.rst .. _GXST: GXST class ================================== .. autoclass:: geosoft.gxapi.GXST :members: .. _ST_INFO: ST_INFO constants ----------------------------------------------------------------------- Information to retrieve .. autodata:: geosoft.gxapi.ST_ITEMS :annotation: .. autoattribute:: geosoft.gxapi.ST_ITEMS .. autodata:: geosoft.gxapi.ST_NPOS :annotation: .. autoattribute:: geosoft.gxapi.ST_NPOS .. autodata:: geosoft.gxapi.ST_NZERO :annotation: .. autoattribute:: geosoft.gxapi.ST_NZERO .. autodata:: geosoft.gxapi.ST_TOTAL :annotation: .. autoattribute:: geosoft.gxapi.ST_TOTAL .. autodata:: geosoft.gxapi.ST_DUMMIES :annotation: .. autoattribute:: geosoft.gxapi.ST_DUMMIES .. autodata:: geosoft.gxapi.ST_MIN :annotation: .. autoattribute:: geosoft.gxapi.ST_MIN .. autodata:: geosoft.gxapi.ST_MAX :annotation: .. autoattribute:: geosoft.gxapi.ST_MAX .. autodata:: geosoft.gxapi.ST_RANGE :annotation: .. autoattribute:: geosoft.gxapi.ST_RANGE .. autodata:: geosoft.gxapi.ST_MEAN :annotation: .. autoattribute:: geosoft.gxapi.ST_MEAN .. autodata:: geosoft.gxapi.ST_MEDIAN :annotation: .. autoattribute:: geosoft.gxapi.ST_MEDIAN .. autodata:: geosoft.gxapi.ST_MODE :annotation: .. autoattribute:: geosoft.gxapi.ST_MODE .. autodata:: geosoft.gxapi.ST_SIMPLE_MODE :annotation: .. autoattribute:: geosoft.gxapi.ST_SIMPLE_MODE .. autodata:: geosoft.gxapi.ST_GEOMEAN :annotation: .. autoattribute:: geosoft.gxapi.ST_GEOMEAN .. autodata:: geosoft.gxapi.ST_VARIANCE :annotation: .. autoattribute:: geosoft.gxapi.ST_VARIANCE .. autodata:: geosoft.gxapi.ST_STDDEV :annotation: .. autoattribute:: geosoft.gxapi.ST_STDDEV .. autodata:: geosoft.gxapi.ST_STDERR :annotation: .. autoattribute:: geosoft.gxapi.ST_STDERR .. autodata:: geosoft.gxapi.ST_SKEW :annotation: .. autoattribute:: geosoft.gxapi.ST_SKEW .. autodata:: geosoft.gxapi.ST_KURTOSIS :annotation: .. autoattribute:: geosoft.gxapi.ST_KURTOSIS .. autodata:: geosoft.gxapi.ST_BASE :annotation: .. autoattribute:: geosoft.gxapi.ST_BASE .. autodata:: geosoft.gxapi.ST_SUM :annotation: .. autoattribute:: geosoft.gxapi.ST_SUM .. autodata:: geosoft.gxapi.ST_SUM2 :annotation: .. autoattribute:: geosoft.gxapi.ST_SUM2 .. autodata:: geosoft.gxapi.ST_SUM3 :annotation: .. autoattribute:: geosoft.gxapi.ST_SUM3 .. autodata:: geosoft.gxapi.ST_SUM4 :annotation: .. autoattribute:: geosoft.gxapi.ST_SUM4 .. autodata:: geosoft.gxapi.ST_MINPOS :annotation: .. autoattribute:: geosoft.gxapi.ST_MINPOS .. autodata:: geosoft.gxapi.ST_HIST_MAXCOUNT :annotation: .. autoattribute:: geosoft.gxapi.ST_HIST_MAXCOUNT <file_sep>/docs/GXINTERNET.rst .. _GXINTERNET: GXINTERNET class ================================== .. autoclass:: geosoft.gxapi.GXINTERNET :members: <file_sep>/docs/GXVOXE.rst .. _GXVOXE: GXVOXE class ================================== .. autoclass:: geosoft.gxapi.GXVOXE :members: .. _VOXE_EVAL: VOXE_EVAL constants ----------------------------------------------------------------------- Voxel Evaluation modes .. autodata:: geosoft.gxapi.VOXE_EVAL_NEAR :annotation: .. autoattribute:: geosoft.gxapi.VOXE_EVAL_NEAR .. autodata:: geosoft.gxapi.VOXE_EVAL_INTERP :annotation: .. autoattribute:: geosoft.gxapi.VOXE_EVAL_INTERP .. autodata:: geosoft.gxapi.VOXE_EVAL_BEST :annotation: .. autoattribute:: geosoft.gxapi.VOXE_EVAL_BEST <file_sep>/geosoft/gxapi/GXTEST.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXTEST(gxapi_cy.WrapTEST): """ GXTEST class. Used to place special testing methods """ def __init__(self, handle=0): super(GXTEST, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXTEST <geosoft.gxapi.GXTEST>` :returns: A null `GXTEST <geosoft.gxapi.GXTEST>` :rtype: GXTEST """ return GXTEST() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def enable_disable_arc_engine_license(cls, enable): """ Forcefully disable ArEngine license availability for testing purposes :param enable: Enable/disable? :type enable: bool .. versionadded:: 6.4.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapTEST._enable_disable_arc_engine_license(GXContext._get_tls_geo(), enable) @classmethod def arc_engine_license(cls): """ Test availability of an ArEngine license on this system :returns: 0 - Not available, 1 - Available :rtype: int .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapTEST._arc_engine_license(GXContext._get_tls_geo()) return ret_val @classmethod def test_mode(cls): """ Checks to see if we are running inside testing system :rtype: bool .. versionadded:: 6.4.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapTEST._test_mode(GXContext._get_tls_geo()) return ret_val @classmethod def wrapper_test(cls, funcs, log): """ Test to make sure all wrappers are valid linking :param funcs: List of functions to test :param log: Output log file :type funcs: str :type log: str .. versionadded:: 6.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapTEST._wrapper_test(GXContext._get_tls_geo(), funcs.encode(), log.encode()) @classmethod def core_class(cls, cl, log): """ Generic Class Test Wrapper :param cl: Name of class to test :param log: Output log file :type cl: str :type log: str .. versionadded:: 9.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapTEST._core_class(GXContext._get_tls_geo(), cl.encode(), log.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/tutorial/Hello World/hello_world.py import geosoft.gxapi as gxapi import geosoft.gxpy as gxpy def rungx(): gxapi.GXSYS.display_message("GX Python", "Hello {}".format(gxpy.gx.gx().gid)) if __name__ == "__main__": print('Hello {}'.format(gxpy.gx.gx().gid))<file_sep>/geosoft/gxapi/GXPGU.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXPGU(gxapi_cy.WrapPGU): """ GXPGU class. A collection of methods applied to `GXPG <geosoft.gxapi.GXPG>` objects, including fills, trending and 2-D `GXFFT <geosoft.gxapi.GXFFT>` operations. """ def __init__(self, handle=0): super(GXPGU, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXPGU <geosoft.gxapi.GXPGU>` :returns: A null `GXPGU <geosoft.gxapi.GXPGU>` :rtype: GXPGU """ return GXPGU() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # General @classmethod def bool_mask(cls, pg, ref_fil): """ Apply reference file boolean mask to pager :param pg: Pager obj :param ref_fil: sRefFil - reference file for boolean mask flag. :type pg: GXPG :type ref_fil: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapPGU._bool_mask(GXContext._get_tls_geo(), pg, ref_fil.encode()) @classmethod def expand(cls, pg_i, pg_o, ex_pcnt, ex_shp, ex_x, ex_y): """ Expand a pager by filling the dummies for expanded edges :param pg_i: Original pager obj :param pg_o: Expanded pager obj :param ex_pcnt: % expansion :param ex_shp: Option 0 - rectangular, 1 - square :param ex_x: X dimension to expand to (0 for expansion to FFT2D legal dimension) :param ex_y: Y dimension to expand to (0 for expansion to FFT2D legal dimension) :type pg_i: GXPG :type pg_o: GXPG :type ex_pcnt: float :type ex_shp: int :type ex_x: int :type ex_y: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** 3D pagers are expanded in X,Y direction the number of slices(Z) is unchanged . """ gxapi_cy.WrapPGU._expand(GXContext._get_tls_geo(), pg_i, pg_o, ex_pcnt, ex_shp, ex_x, ex_y) @classmethod def fill(cls, pg, fl_roll_wt, fl_roll_base, fl_roll_dist, fl_mxf, fl_mxp, fl_amp_lmt, fl_edge_lmt, fl_edge_wid, fl_npass, ref_fil): """ Replace all dummies in a pager by predict values. :param pg: Pager obj :param fl_roll_wt: Roll off weighting option: 1 - linear, 2 - square :param fl_roll_base: dRollBase - the value to roll off to, `GS_R8DM <geosoft.gxapi.GS_R8DM>` for roll off to mean value line by line. :param fl_roll_dist: lRollDist - (at unit of cell dist.) for roll-off. 0 for no roll of, -1 for the default: 2 times of min. dummy edge dim. :param fl_mxf: lMxf - max. filter length. -1 for no max. entropy. 0 for the default of MIN(minimum dummy edge dim, 32). :param fl_mxp: lMxp - max. pred. sample 0 for the default of 2*lMxf. :param fl_amp_lmt: dAmpLmt - limit (abs. value) amplitudes to this level. Amplitudes are limited starting at half this value. <=0.0 for no amp limit. :param fl_edge_lmt: dEdgeLmt - limit edge (abs. value) amplitudes to this level. <0.0 for no edge limit. :param fl_edge_wid: lEdgeWidth - within this dist. (at unit of cell size) for amp. limited. -1 for no edge limit. 0 for the default of minimum dummy edge dim. :param fl_npass: iNPass - number of time to pass smooth filter :param ref_fil: sRefFil - reference file for smooth filter flag. :type pg: GXPG :type fl_roll_wt: int :type fl_roll_base: float :type fl_roll_dist: int :type fl_mxf: int :type fl_mxp: int :type fl_amp_lmt: float :type fl_edge_lmt: float :type fl_edge_wid: int :type fl_npass: int :type ref_fil: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapPGU._fill(GXContext._get_tls_geo(), pg, fl_roll_wt, fl_roll_base, fl_roll_dist, fl_mxf, fl_mxp, fl_amp_lmt, fl_edge_lmt, fl_edge_wid, fl_npass, ref_fil.encode()) @classmethod def fill_value(cls, pg, value): """ Set all values in a pager to a single value. :param pg: Pager obj :param value: Value to set in pager :type pg: GXPG :type value: float .. versionadded:: 5.0.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapPGU._fill_value(GXContext._get_tls_geo(), pg, value) @classmethod def filt_sym(cls, pg, npass, usefile, file, size, vv): """ Apply 5x5, 7x7 or 9X9 symmetric convolution filter to a `GXPG <geosoft.gxapi.GXPG>`. :param pg: Pager obj :param npass: Number of time to pass smooth filter :param usefile: Flag to use filter file :param file: File for filter values :param size: Size of filter window, 5/7/9 :param vv: Array of 6/10/15 filter coefficients :type pg: GXPG :type npass: int :type usefile: int :type file: str :type size: int :type vv: GXVV .. versionadded:: 5.1.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapPGU._filt_sym(GXContext._get_tls_geo(), pg, npass, usefile, file.encode(), size, vv) @classmethod def filt_sym5(cls, pg, npass, usefile, file, vv): """ Apply 5x5 symmetric convolution filter to a `GXPG <geosoft.gxapi.GXPG>`. :param pg: Pager obj :param npass: Number of time to pass smooth filter :param usefile: Flag to use filter file :param file: File for filter values :param vv: Array of 6 filter coefficients at position 00, 10, 11, 20, 21, 22. Symmetric filters look like : 22 21 20 21 22 21 11 10 11 21 20 10 00 10 20 21 11 10 11 21 22 21 20 21 22 :type pg: GXPG :type npass: int :type usefile: int :type file: str :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapPGU._filt_sym5(GXContext._get_tls_geo(), pg, npass, usefile, file.encode(), vv) @classmethod def grid_peak(cls, grid, nlmt, vv_x, vv_y, vv_z): """ Pick grid peaks. :param grid: Grid file name :param nlmt: :ref:`BLAKEY_TEST` :param vv_x: X of found peaks :param vv_y: Y of found peaks :param vv_z: Z values of found peaks :type grid: str :type nlmt: int :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Blakey test limit defines how grid peaks are to be found. For example, with the `BLAKEY_TEST_ONESIDE <geosoft.gxapi.BLAKEY_TEST_ONESIDE>`, a grid point will be picked if its grid value is greater than the value of one or more of its four neighouring points. """ gxapi_cy.WrapPGU._grid_peak(GXContext._get_tls_geo(), grid.encode(), nlmt, vv_x, vv_y, vv_z) @classmethod def dw_gridding_dat(cls, pg, dat, reg): """ `dw_gridding_dat <geosoft.gxapi.GXPGU.dw_gridding_dat>` Inverse-distance weighting gridding method, `GXDAT <geosoft.gxapi.GXDAT>` version. :param pg: Input grid :param dat: `GXDAT <geosoft.gxapi.GXDAT>` source :param reg: Parameters (see above) :type pg: GXPG :type dat: GXDAT :type reg: GXREG .. versionadded:: 7.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See the notes for `dw_gridding_db <geosoft.gxapi.GXPGU.dw_gridding_db>`. """ gxapi_cy.WrapPGU._dw_gridding_dat(GXContext._get_tls_geo(), pg, dat, reg) @classmethod def dw_gridding_dat_3d(cls, pg, dat, reg): """ `dw_gridding_dat_3d <geosoft.gxapi.GXPGU.dw_gridding_dat_3d>` Inverse-distance weighting gridding method, `GXDAT <geosoft.gxapi.GXDAT>` version, 3D. :param pg: Input 3D `GXPG <geosoft.gxapi.GXPG>` :param dat: `GXDAT <geosoft.gxapi.GXDAT>` source :param reg: Parameters (see above) :type pg: GXPG :type dat: GXDAT :type reg: GXREG .. versionadded:: 8.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See the notes for `dw_gridding_db_3d <geosoft.gxapi.GXPGU.dw_gridding_db_3d>`. """ gxapi_cy.WrapPGU._dw_gridding_dat_3d(GXContext._get_tls_geo(), pg, dat, reg) @classmethod def dw_gridding_db(cls, pg, db, x, y, z, reg): """ `dw_gridding_db <geosoft.gxapi.GXPGU.dw_gridding_db>` Inverse-distance weighting gridding method, `GXDB <geosoft.gxapi.GXDB>` version. :param pg: Input grid :param db: Database :param x: X Channel [READONLY] :param y: Y Channel [READONLY] :param z: Data Channel [READONLY] :param reg: Parameters (see above) :type pg: GXPG :type db: GXDB :type x: int :type y: int :type z: int :type reg: GXREG .. versionadded:: 7.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Grid cells take on the averaged values within a search radius, weighted inversely by distance. Weighting can be controlled using the power and slope properties; weighting = 1 / (distance^wtpower + 1/slope) where distance is in units of grid cells (X dimenstion). Default is 0.0, If the blanking distance is set, all cells whose center point is not within the blanking distance of at least one data point are set to dummy. `GXREG <geosoft.gxapi.GXREG>` Parameters: X0, Y0, DX, DY: Grid origin, and cell sizes (required) WT_POWER (default=2), WT_SLOPE (default=1) Weighting function parameters SEARCH_RADIUS: Distance weighting limit (default = 4 * SQRT(DX*DY)) BLANKING_DISTANCE: Dummy values farther from data than this distance. (default = 4 * SQRT(DX*DY)) LOG: Apply log transform to input data before gridding (0:No (default), 1:Yes)? LOG_BASE: One of `VV_LOG_BASE_10 <geosoft.gxapi.VV_LOG_BASE_10>` (default) or `VV_LOG_BASE_E <geosoft.gxapi.VV_LOG_BASE_E>` LOG_NEGATIVE: One of `VV_LOG_NEGATIVE_NO <geosoft.gxapi.VV_LOG_NEGATIVE_NO>` (default) or `VV_LOG_NEGATIVE_YES <geosoft.gxapi.VV_LOG_NEGATIVE_YES>` """ gxapi_cy.WrapPGU._dw_gridding_db(GXContext._get_tls_geo(), pg, db, x, y, z, reg) @classmethod def dw_gridding_db_3d(cls, pg, db, x, y, z, data, reg): """ `dw_gridding_db_3d <geosoft.gxapi.GXPGU.dw_gridding_db_3d>` Inverse-distance weighting gridding method, `GXDB <geosoft.gxapi.GXDB>` version, 3D. :param pg: Input 3D `GXPG <geosoft.gxapi.GXPG>` :param db: Database :param x: X Channel [READONLY] :param y: Y Channel [READONLY] :param z: Z Channel [READONLY] :param data: Data Channel [READONLY] :param reg: Parameters (see above) :type pg: GXPG :type db: GXDB :type x: int :type y: int :type z: int :type data: int :type reg: GXREG .. versionadded:: 8.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** 3D cells take on the averaged values within a search radius, weighted inversely by distance. Weighting can be controlled using the power and slope properties; weighting = 1 / (distance^wtpower + 1/slope) where distance is in units of grid cells (X dimenstion). Default is 0.0, If the blanking distance is set, all cells whose center point is not within the blanking distance of at least one data point are set to dummy. `GXREG <geosoft.gxapi.GXREG>` Parameters: X0, Y0, Z0, DX, DY, DZ: Grid origin, and cell sizes (required) WT_POWER (default=2), WT_SLOPE (default=1) Weighting function parameters SEARCH_RADIUS: Distance weighting limit (default = 4 * CUBE_ROOT(DX*DY*DZ)) BLANKING_DISTANCE: Dummy values farther from data than this distance. (default = 4 * CUBE_ROOT(DX*DY*DZ)) LOG: Apply log transform to input data before gridding (0:No (default), 1:Yes)? LOG_BASE: One of `VV_LOG_BASE_10 <geosoft.gxapi.VV_LOG_BASE_10>` (default) or `VV_LOG_BASE_E <geosoft.gxapi.VV_LOG_BASE_E>` LOG_NEGATIVE: One of `VV_LOG_NEGATIVE_NO <geosoft.gxapi.VV_LOG_NEGATIVE_NO>` (default) or `VV_LOG_NEGATIVE_YES <geosoft.gxapi.VV_LOG_NEGATIVE_YES>` """ gxapi_cy.WrapPGU._dw_gridding_db_3d(GXContext._get_tls_geo(), pg, db, x, y, z, data, reg) @classmethod def dw_gridding_vv(cls, pg, vv_x, vv_y, vv_z, reg): """ `dw_gridding_vv <geosoft.gxapi.GXPGU.dw_gridding_vv>` Inverse-distance weighting gridding method, `GXVV <geosoft.gxapi.GXVV>` version. :param pg: Input grid :param vv_x: X locations :param vv_y: Y locations :param vv_z: Data values to grid :param reg: Parameters (see above) :type pg: GXPG :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type reg: GXREG .. versionadded:: 7.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See the notes for `dw_gridding_db <geosoft.gxapi.GXPGU.dw_gridding_db>`. """ gxapi_cy.WrapPGU._dw_gridding_vv(GXContext._get_tls_geo(), pg, vv_x, vv_y, vv_z, reg) @classmethod def numeric_to_thematic(cls, pg_i, vv, pg_o): """ `numeric_to_thematic <geosoft.gxapi.GXPGU.numeric_to_thematic>` Set index values in a pager based on a numeric pager with translation `GXVV <geosoft.gxapi.GXVV>`. Returns Nothing :param pg_i: Input numeric `GXPG <geosoft.gxapi.GXPG>` :param vv: Translation `GXVV <geosoft.gxapi.GXVV>` (see notes above) :param pg_o: Output thematic `GXPG <geosoft.gxapi.GXPG>` :type pg_i: GXPG :type vv: GXVV :type pg_o: GXPG .. versionadded:: 7.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The values in the input data `GXVV <geosoft.gxapi.GXVV>` represent the center-of-range values of unique properties with indices 0 to N-1, where N is the number of items in the input `GXVV <geosoft.gxapi.GXVV>`. This `GXVV <geosoft.gxapi.GXVV>` is sorted from smallest to largest, and each value in in the input numeric `GXPG <geosoft.gxapi.GXPG>` is tested to see into which range it goes. The closest range value for each item is used, so the half-way point is the dividing point. The top and bottom-most range widths are determined by the "inside half-width" to the nearest range. The INDEX of the closest range is then inserted into the output `GXPG <geosoft.gxapi.GXPG>`, so it can be used in a thematic voxel (for instance). """ gxapi_cy.WrapPGU._numeric_to_thematic(GXContext._get_tls_geo(), pg_i, vv, pg_o) @classmethod def peakedness(cls, grid, pkness, vv_x, vv_y, vv_z): """ Find all peaks in peakedneess grid pager :param grid: Grid file name :param pkness: Cutoff limit for finding peaks :param vv_x: X of found peaks :param vv_y: Y of found peaks :param vv_z: Z values of found peaks :type grid: str :type pkness: int :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 5.0.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapPGU._peakedness(GXContext._get_tls_geo(), grid.encode(), pkness, vv_x, vv_y, vv_z) @classmethod def peakedness_grid(cls, grdi, grdo, radius, percent_lesser): """ Create peakedneess grid from input grid. :param grdi: Input grid file name :param grdo: Output grid (peakedness) file name :param radius: Radius :param percent_lesser: Percent Lesser value (see notes) :type grdi: str :type grdo: str :type radius: int :type percent_lesser: float .. versionadded:: 5.0.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This function creates a peakedneess grid from input grid. Radius, is the maximum radius at which the value of the parent pixel is compared to the value of surrounding pixels. ``percent_lesser``, is used to indicate the percentage of pixels at each radii smaller than or equal to Radius that must have value lower than the parent pixel in order to call that radius true or equal to 1. Description: For each pixel in the grid a series of radii are evaluated from 1 to Radius. If the percentage of pixels for a given radius is less than ``percent_lesser`` the parent pixel receives an additional 1. For examples if the Radius is set to 5 and the ``percent_lesser`` is set to 70%. And radius 1 = 90%, radius 2 = 85%, radius 3 = 75%, radius 4 = 70% and radius 5 = 65% then the parent pixel would receive 1+1+1+1+0 = 4. Use: This function is useful in isolating the anomaly peaks in data that has a large value range for anomalies. For example the 1 mV anomaly could quite possibly have the same representation as the 100 mV anomaly using this function. """ gxapi_cy.WrapPGU._peakedness_grid(GXContext._get_tls_geo(), grdi.encode(), grdo.encode(), radius, percent_lesser) @classmethod def ref_file(cls, pg, ref_fil): """ Create a reference file (boolean mask flag) from pager. :param pg: `GXPG <geosoft.gxapi.GXPG>` object :param ref_fil: Reference file name :type pg: GXPG :type ref_fil: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** A reference file is a binary file with the following format: The first 8 bytes are the pager dimensions NX and NY as longs. The remaining bits, one bit per pager cell - (NX * NY)/8 bytes are zero where the pager is dummy, and 1 where the pager is defined. The reference file is used in various operations where it is necessary to mask some output to the original defined cells. """ gxapi_cy.WrapPGU._ref_file(GXContext._get_tls_geo(), pg, ref_fil.encode()) @classmethod def save_file(cls, pg, xo, yo, dx, dy, rot, tr, ipj, file): """ Writes a `GXPG <geosoft.gxapi.GXPG>` to an image file. :param pg: Input `GXPG <geosoft.gxapi.GXPG>` object :param xo: X origin :param yo: Y origin :param dx: DX :param dy: DY :param rot: Rotation angle :param tr: Trend information or NULL :param ipj: Projection or NULL :param file: Output file name :type pg: GXPG :type xo: float :type yo: float :type dx: float :type dy: float :type rot: float :type tr: GXTR :type ipj: GXIPJ :type file: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The trend object and projection are optional. """ gxapi_cy.WrapPGU._save_file(GXContext._get_tls_geo(), pg, xo, yo, dx, dy, rot, tr, ipj, file.encode()) @classmethod def thematic_to_numeric(cls, pg_i, vv, pg_o): """ Set numeric values in a pager based on an index pager with translation `GXVV <geosoft.gxapi.GXVV>`. Returns Nothing :param pg_i: Input Index `GXPG <geosoft.gxapi.GXPG>` :param vv: Translation `GXVV <geosoft.gxapi.GXVV>` :param pg_o: Output Data `GXPG <geosoft.gxapi.GXPG>` :type pg_i: GXPG :type vv: GXVV :type pg_o: GXPG .. versionadded:: 7.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The items in the input data `GXVV <geosoft.gxapi.GXVV>` are inserted into the output `GXPG <geosoft.gxapi.GXPG>` using the indices in the index `GXPG <geosoft.gxapi.GXPG>`. This function is useful when converting a thematic voxel, which is type `GS_LONG <geosoft.gxapi.GS_LONG>` and contains indices into its own internal `GXTPAT <geosoft.gxapi.GXTPAT>` object, and you provide a numeric mapping `GXVV <geosoft.gxapi.GXVV>`, calculated using SetupTranslateToNumericVV_TPAT. """ gxapi_cy.WrapPGU._thematic_to_numeric(GXContext._get_tls_geo(), pg_i, vv, pg_o) @classmethod def trend(cls, pg_i, pg_o, tr, tr_opt, tr_pt_bs, xo, yo, dx, dy): """ Trend remove or replace back in pager :param pg_i: Original pager obj :param pg_o: Trended pager obj :param tr: Trend obj :param tr_opt: Option 0 - calculate, 1 - given in `GXTR <geosoft.gxapi.GXTR>`, 2 - replace back from `GXTR <geosoft.gxapi.GXTR>` :param tr_pt_bs: Trend base on: 0 - all points, 1 - edge points :param xo: Trend origin rXo, :param yo: Trend origin rYo, :param dx: Increment in X direction rDx, :param dy: Increment in Y direction rDy :type pg_i: GXPG :type pg_o: GXPG :type tr: GXTR :type tr_opt: int :type tr_pt_bs: int :type xo: float :type yo: float :type dx: float :type dy: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapPGU._trend(GXContext._get_tls_geo(), pg_i, pg_o, tr, tr_opt, tr_pt_bs, xo, yo, dx, dy) # Math Operations @classmethod def add_scalar(cls, pg, scalar): """ Add a scalar value to a pager :param pg: Pager :param scalar: Scalar Value :type pg: GXPG :type scalar: float .. versionadded:: 7.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Only available for FLOAT or DOUBLE pagers """ gxapi_cy.WrapPGU._add_scalar(GXContext._get_tls_geo(), pg, scalar) @classmethod def multiply_scalar(cls, pg, scalar): """ Multiply a scalar value and a pager :param pg: Pager :param scalar: Scalar Value :type pg: GXPG :type scalar: float .. versionadded:: 7.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Only available for FLOAT or DOUBLE pagers """ gxapi_cy.WrapPGU._multiply_scalar(GXContext._get_tls_geo(), pg, scalar) # Matrix Operation @classmethod def correlation_matrix(cls, pg_u, pg_o): """ Find the correlations between columns in a matrix :param pg_u: Input matrix :param pg_o: Returned correlation matrix :type pg_u: GXPG :type pg_o: GXPG .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The input matrix is M rows by N columns. The returned matrix is a symmetric N by N matrix whose elements are the normalized dot products of the columns of the input matrix with themselves. The elements take on values from 0 (orthogonal) to 1 (parallel). """ gxapi_cy.WrapPGU._correlation_matrix(GXContext._get_tls_geo(), pg_u, pg_o) @classmethod def correlation_matrix2(cls, pg_u, corr, pg_o): """ Same as `correlation_matrix <geosoft.gxapi.GXPGU.correlation_matrix>`, but select correlation type. :param pg_u: Input matrix :param corr: :ref:`PGU_CORR` :param pg_o: Returned correlation matrix :type pg_u: GXPG :type corr: int :type pg_o: GXPG .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapPGU._correlation_matrix2(GXContext._get_tls_geo(), pg_u, corr, pg_o) @classmethod def invert_matrix(cls, pg_i, pg_o): """ Inverts a square matrix using LU decomp. and back-substitution :param pg_i: Input matrix :param pg_o: Output inverted matrix (can be same as input). :type pg_i: GXPG :type pg_o: GXPG .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This is an "in-place" operation, and set up so that the input and output pagers may be the same handle. (If they are different, the input pager remains unchanged). Pagers and VVs must be type `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`. """ gxapi_cy.WrapPGU._invert_matrix(GXContext._get_tls_geo(), pg_i, pg_o) @classmethod def jacobi(cls, pg_i, vv_d, pg_eigen): """ Find eigenvalues, eigenvectors of a real symmetric matrix. :param pg_i: Input Pager :param vv_d: Eigenvalues (returned) :param pg_eigen: Eigenvectors (returned) :type pg_i: GXPG :type vv_d: GXVV :type pg_eigen: GXPG .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The number of rows must equal the number of columns. Eienvalues, vectors are sorted in descending order. """ gxapi_cy.WrapPGU._jacobi(GXContext._get_tls_geo(), pg_i, vv_d, pg_eigen) @classmethod def lu_back_sub(cls, pg_a, vv_i, vv_b, vv_sol): """ Solve a linear system using LU decomposition and back-substitution. :param pg_a: LU decomposition of A :param vv_i: Permutation vector (type INT) :param vv_b: Right hand side vector B (input) :param vv_sol: Solution vector (output) :type pg_a: GXPG :type vv_i: GXVV :type vv_b: GXVV :type vv_sol: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Solves the system Ax = b for a given b, using the LU decomposition of the matrix a The LU decomposition and the permutation vector are obtained from `lu_back_sub <geosoft.gxapi.GXPGU.lu_back_sub>`. Pagers and VVs must be type `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` except for the permutation vector, which should be INT """ gxapi_cy.WrapPGU._lu_back_sub(GXContext._get_tls_geo(), pg_a, vv_i, vv_b, vv_sol) @classmethod def lu_decomp(cls, pg_i, pg_o, vv_perm): """ Perform an LU decomposition on a square pager. :param pg_i: Input :param pg_o: LU decomposition (may be same pager as input) :param vv_perm: Permutation vector (type INT) :type pg_i: GXPG :type pg_o: GXPG :type vv_perm: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The L and U matrix are both contained in the returned pager; The "L" matrix is composed of the sub-diagonal elements of the output pager, as well as "1" values on the diagonal. The "U" matrix is composed of the diagonal elements (sub-diagonal elements set to 0). This is an "in-place" operation, and set up so that the input and output pagers may be the same handle. (If they are different, the input pager remains unchanged). The LU decomposition, and the permutation vector are used for `lu_back_sub <geosoft.gxapi.GXPGU.lu_back_sub>`. Pagers must be type `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` and the permutation vector type INT """ gxapi_cy.WrapPGU._lu_decomp(GXContext._get_tls_geo(), pg_i, pg_o, vv_perm) @classmethod def matrix_mult(cls, pg_u, transpose_u, pg_v, transpose, pg_uv): """ Multiply two pagers as if they were matrices. :param pg_u: Matrix U :param transpose_u: TRUE (1) if U should be transposed before multiplication :param pg_v: Matrix V :param transpose: TRUE (1) if V should be transposed before multiplication :param pg_uv: Returned matrix U*V :type pg_u: GXPG :type transpose_u: int :type pg_v: GXPG :type transpose: int :type pg_uv: GXPG .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The matrices must be correctly dimensioned, taking into account whether transposition should occur before multiplication. The input matrices are not altered on output (even if transposition is requested). Assertions if: Matrices are not expected sizes Dummies are treated as 0 values. """ gxapi_cy.WrapPGU._matrix_mult(GXContext._get_tls_geo(), pg_u, transpose_u, pg_v, transpose, pg_uv) @classmethod def matrix_vector_mult(cls, pg_u, vv_x, vv_o): """ Multiply a `GXVV <geosoft.gxapi.GXVV>` by a pager like a matrix*vector multiply. :param pg_u: Matrix U :param vv_x: Vector x :param vv_o: Returned vector U*x :type pg_u: GXPG :type vv_x: GXVV :type vv_o: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The matrix is input as an M rows (data) by N columns (variables) `GXPG <geosoft.gxapi.GXPG>`. The vector must be of length N. The output `GXVV <geosoft.gxapi.GXVV>` is set to length M. The `GXPG <geosoft.gxapi.GXPG>` and VVs must be type `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`. Terminates if: Matrices, `GXVV <geosoft.gxapi.GXVV>` are not expected sizes (taken from U) PGs are not `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`. Dummies are treated as 0 values. """ gxapi_cy.WrapPGU._matrix_vector_mult(GXContext._get_tls_geo(), pg_u, vv_x, vv_o) @classmethod def sv_decompose(cls, pg_a, pg_u, vv_w, pg_v): """ Do a singular value decomposition on a matrix stored as a `GXPG <geosoft.gxapi.GXPG>` :param pg_a: Input A matrix, M data (rows), N variables (columns) :param pg_u: The returned U Matrix :param vv_w: Returned weights (W) :param pg_v: Returned V matrix :type pg_a: GXPG :type pg_u: GXPG :type vv_w: GXVV :type pg_v: GXPG .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The matrix is input as an N rows (data) by M columns (variables) `GXPG <geosoft.gxapi.GXPG>`. On return, the matrix is decomposed to A = U * W * Vt. If M<N, then an error will be registered. In this case, augment the "A" `GXPG <geosoft.gxapi.GXPG>` with rows of zero values. The input matrices must be A[M,N], U[M.N] and V[N,N]. The length of the W `GXVV <geosoft.gxapi.GXVV>` is set by sSVD_PGU to N. The Pagers must be type `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`. Terminates if: U is not M by N. (Taken from size of A) V is not N by N. (Taken from #columns in A). PGs, VV are not `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` """ gxapi_cy.WrapPGU._sv_decompose(GXContext._get_tls_geo(), pg_a, pg_u, vv_w, pg_v) @classmethod def sv_recompose(cls, pg_u, vv_w, pg_v, min_w, pg_a): """ Reconstitute the original matrix from an SVD. :param pg_u: U matrix :param vv_w: Weights (W) :param pg_v: V matrix :param min_w: Minimum weight to use (Dummy for all) :param pg_a: A matrix (returned) :type pg_u: GXPG :type vv_w: GXVV :type pg_v: GXPG :type min_w: float :type pg_a: GXPG .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The matrix is input as an N rows (data) by M columns (variables) `GXPG <geosoft.gxapi.GXPG>`. On return, the matrix is decomposed to A = U * W * Vt. If M<N, then an error will be registered. In this case, augment the "A" `GXPG <geosoft.gxapi.GXPG>` with rows of zero values. The input matrices must be A[M,N], U[M.N] and V[N,N]. The length of the W `GXVV <geosoft.gxapi.GXVV>` is set by sSVDecompose_PGU to N. The Pagers must be type `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`. Terminates if: U is not M by N. (Taken from size of A) V is not N by N. (Taken from #columns in A). PGs, VV are not `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`. Dummies are treated as 0 values. """ gxapi_cy.WrapPGU._sv_recompose(GXContext._get_tls_geo(), pg_u, vv_w, pg_v, min_w, pg_a) # Principal Component Analysis @classmethod def pc_communality(cls, pg_i, vv_c): """ Determines principal component communalities. :param pg_i: Input pager of the principal components :param vv_c: Returned communality values :type pg_i: GXPG :type vv_c: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Calculate communalities (sums of the squares of the column values in each row) Pagers and VVs must be type `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`. """ gxapi_cy.WrapPGU._pc_communality(GXContext._get_tls_geo(), pg_i, vv_c) @classmethod def pc_loadings(cls, pg_x, pg_loadings): """ Compute the principal component loadings from the standardized data. :param pg_x: Standardized data matrix (M by N) :param pg_loadings: Principal component loadings (N by N) :type pg_x: GXPG :type pg_loadings: GXPG .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Works on columns of the `GXPG <geosoft.gxapi.GXPG>`. Calculates the correlation matrix from the columns of the standardized data, then computes the eigen values and eigenvectors of the correlation matrix. The loadings are the eigenvectors, ordered by descending eigenvalues, scaled by the square root of the eigenvalues. The returned pager must be sized the same as the input pager. Correlations are performed using "`PGU_CORR_SIMPLE <geosoft.gxapi.PGU_CORR_SIMPLE>`", so if you want Pearson correlations, or wish to use a modified correlation matrix, use `pc_loadings2 <geosoft.gxapi.GXPGU.pc_loadings2>` and input the correlation matrix directly. """ gxapi_cy.WrapPGU._pc_loadings(GXContext._get_tls_geo(), pg_x, pg_loadings) @classmethod def pc_loadings2(cls, pg_c, pg_loadings): """ Same as PCLoading_PGU, but input correlation matrix. :param pg_c: Correllation matrix (N by N) :param pg_loadings: Principal component loadings (N by N) :type pg_c: GXPG :type pg_loadings: GXPG .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `pc_loadings <geosoft.gxapi.GXPGU.pc_loadings>`. """ gxapi_cy.WrapPGU._pc_loadings2(GXContext._get_tls_geo(), pg_c, pg_loadings) @classmethod def pc_scores(cls, pg_x, pg_loadings, pg_scores): """ Compute the principal component scores from the standardized data. :param pg_x: Standardized data matrix (M by N) :param pg_loadings: Principal component loadings (input) (N by L, L<=N) :param pg_scores: Principal component scores (returned) (M by L, L<=N) :type pg_x: GXPG :type pg_loadings: GXPG :type pg_scores: GXPG .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** t -1 Forms the product X Ap (Ap Ap), where X is the standardized data matrix, and Ap is the matrix of principal component loadings (see `pc_loadings <geosoft.gxapi.GXPGU.pc_loadings>`). The loadings must be input, and can be calculated by calling `pc_loadings <geosoft.gxapi.GXPGU.pc_loadings>`. Pagers and VVs must be type `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`. """ gxapi_cy.WrapPGU._pc_scores(GXContext._get_tls_geo(), pg_x, pg_loadings, pg_scores) @classmethod def pc_standardize(cls, pg, vv_m, vv_s, dir): """ Remove/Replace mean and standard deviation :param pg: Matrix to standardize :param vv_m: Means :param vv_s: Standard deviations :param dir: :ref:`PGU_DIRECTION` :type pg: GXPG :type vv_m: GXVV :type vv_s: GXVV :type dir: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Works on columns of the `GXPG <geosoft.gxapi.GXPG>`. """ gxapi_cy.WrapPGU._pc_standardize(GXContext._get_tls_geo(), pg, vv_m, vv_s, dir) @classmethod def pc_standardize2(cls, pg, vv_mask, vv_m, vv_s, dir): """ Remove/Replace mean and standard deviation, subset values. :param pg: Matrix to standardize :param vv_mask: Mask `GXVV <geosoft.gxapi.GXVV>` for data selection (forward only) :param vv_m: Means :param vv_s: Standard deviations :param dir: Forward or reverse :type pg: GXPG :type vv_mask: GXVV :type vv_m: GXVV :type vv_s: GXVV :type dir: int .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Like `pc_standardize <geosoft.gxapi.GXPGU.pc_standardize>`, except that not all the values are included in the calculation of the means and standard deviations. The inclusion is controlled by a mask `GXVV <geosoft.gxapi.GXVV>`, The rows where the mask is dummy are not included in the calculation, but ALL the values are standardized. """ gxapi_cy.WrapPGU._pc_standardize2(GXContext._get_tls_geo(), pg, vv_mask, vv_m, vv_s, dir) @classmethod def pc_transform(cls, pg, vv_d, vv_f, vv_t, dir): """ Transform/De-transform data. :param pg: Matrix to transform :param vv_d: Detection limits for the columns :param vv_f: Maximum values for the columns :param vv_t: :ref:`PGU_TRANS` :param dir: :ref:`PGU_DIRECTION` :type pg: GXPG :type vv_d: GXVV :type vv_f: GXVV :type vv_t: GXVV :type dir: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Works on columns of the `GXPG <geosoft.gxapi.GXPG>`. Forward direction: Applies the selected transform to the data. Backward direction: Applies the inverse transform to the data. The detection limits are input with a `GXVV <geosoft.gxapi.GXVV>`. In the forward transform, data values less than the detection limit are set to the limit. The factor limits are input with a `GXVV <geosoft.gxapi.GXVV>`. In the forward transform, data values greater than the maximum values are set to the maximum. """ gxapi_cy.WrapPGU._pc_transform(GXContext._get_tls_geo(), pg, vv_d, vv_f, vv_t, dir) @classmethod def pc_varimax(cls, pg_i, pg_o): """ Perform the Kaiser Varimax transformation on pr. comp. loadings :param pg_i: Principal component loadings (input) (N by M, M<=N) :param pg_o: Rotated principal component loadings (returned) (N by L, L<=M) :type pg_i: GXPG :type pg_o: GXPG .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Rotates the principal components using the Kaiser's varimax scheme to move move each factor axis to positions so that projections from each variable on the factor axes are either near the extremities or near the origin. Pagers must be type `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`. """ gxapi_cy.WrapPGU._pc_varimax(GXContext._get_tls_geo(), pg_i, pg_o) # Specialized Operations @classmethod def maximum_terrain_steepness(cls, pg, annular_size): """ Compute the Maximum Steepness of a topography Pager :param pg: Topography Pager :param annular_size: Annular Size :type pg: GXPG :type annular_size: int :returns: Maximum Terrain Steepness Computation. :rtype: float .. versionadded:: 7.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Calculates forward-looking slopes SX and SY in the X and Y directions using pager locations (ix, iy), (ix+size, iy), (ix, iy+isize) and returns SX*SX + SY*SY. The values in the last "size" rows and columns are not processed. The wrapper was created for testing and development purposes. """ ret_val = gxapi_cy.WrapPGU._maximum_terrain_steepness(GXContext._get_tls_geo(), pg, annular_size) return ret_val # Obsolete @classmethod def direct_gridding_db(cls, pg, xo, yo, dx, dy, rot, db, x, y, z, method): """ Direct-gridding method, `GXDB <geosoft.gxapi.GXDB>` version. :param pg: Input grid :param xo: X origin of grid :param yo: Y origin of grid :param dx: X cell size :param dy: Y cell size :param rot: Rotation angle (degrees CCW). :param db: Database :param x: X Channel [READONLY] :param y: Y Channel [READONLY] :param z: Data Channel [READONLY] :param method: :ref:`PGU_DIRECTGRID` :type pg: GXPG :type xo: float :type yo: float :type dx: float :type dy: float :type rot: float :type db: GXDB :type x: int :type y: int :type z: int :type method: int .. versionadded:: 7.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Grid cells take on the specified statistic of the values inside the cell area. Grid cells containing no data values are set to dummy. """ gxapi_cy.WrapPGU._direct_gridding_db(GXContext._get_tls_geo(), pg, xo, yo, dx, dy, rot, db, x, y, z, method) @classmethod def direct_gridding_db2(cls, pg, xo, yo, xMax, yMax, dx, dy, rot, db, x, y, z, method): """ Direct-gridding method, `GXDB <geosoft.gxapi.GXDB>` version. :param pg: Input grid :param xo: X origin of grid :param yo: Y origin of grid :param xMax: X extent of grid :param yMax: Y extent of grid :param dx: X cell size :param dy: Y cell size :param rot: Rotation angle (degrees CCW). :param db: Database :param x: X Channel [READONLY] :param y: Y Channel [READONLY] :param z: Data Channel [READONLY] :param method: :ref:`PGU_DIRECTGRID` :type pg: GXPG :type xo: float :type yo: float :type xMax: float :type yMax: float :type dx: float :type dy: float :type rot: float :type db: GXDB :type x: int :type y: int :type z: int :type method: int .. versionadded:: 2023.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Grid cells take on the specified statistic of the values inside the cell area. Grid cells containing no data values are set to dummy. """ gxapi_cy.WrapPGU._direct_gridding_db2(GXContext._get_tls_geo(), pg, xo, yo, xMax, yMax, dx, dy, rot, db, x, y, z, method) @classmethod def direct_gridding_db_3d(cls, pg, xo, yo, zo, dx, dy, dz, rot, db, x, y, z, data, method): """ Direct-gridding method, `GXDB <geosoft.gxapi.GXDB>` version, 3D. :param pg: Input 3D `GXPG <geosoft.gxapi.GXPG>` :param xo: X origin of 3D grid :param yo: Y origin of 3D grid :param zo: Z origin of 3D grid :param dx: X cell size :param dy: Y cell size :param dz: Z cell size :param rot: Rotation angle (degrees CCW, vertical axis only). :param db: Database :param x: X Channel [READONLY] :param y: Y Channel [READONLY] :param z: Z Channel [READONLY] :param data: Data Channel [READONLY] :param method: :ref:`PGU_DIRECTGRID` :type pg: GXPG :type xo: float :type yo: float :type zo: float :type dx: float :type dy: float :type dz: float :type rot: float :type db: GXDB :type x: int :type y: int :type z: int :type data: int :type method: int .. versionadded:: 8.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** 3D grid cells take on the specified statistic of the values inside the cell area. Grid cells containing no data values are set to dummy. """ gxapi_cy.WrapPGU._direct_gridding_db_3d(GXContext._get_tls_geo(), pg, xo, yo, zo, dx, dy, dz, rot, db, x, y, z, data, method) @classmethod def direct_gridding_vv(cls, pg, xo, yo, dx, dy, rot, v_vx, v_vy, v_vz, method): """ Direct-gridding method, `GXVV <geosoft.gxapi.GXVV>` version. :param pg: Input grid :param xo: X origin of grid :param yo: Y origin of grid :param dx: X cell size :param dy: Y cell size :param rot: Rotation angle (degrees CCW). :param v_vx: X locations of values :param v_vy: Y locations of values :param v_vz: Z values to grid :param method: :ref:`PGU_DIRECTGRID` :type pg: GXPG :type xo: float :type yo: float :type dx: float :type dy: float :type rot: float :type v_vx: GXVV :type v_vy: GXVV :type v_vz: GXVV :type method: int .. versionadded:: 7.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Grid cells take on the specified statistic of the values inside the cell area. Grid cells containing no data values are set to dummy. """ gxapi_cy.WrapPGU._direct_gridding_vv(GXContext._get_tls_geo(), pg, xo, yo, dx, dy, rot, v_vx, v_vy, v_vz, method) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/geosoft_research/self_organizing_maps/README.md # Self-Organizing Maps Python script with a Qt UX for classifying multi-variate data in a Geosoft GDB using a Self-Organing Map Neural Network (AI). ## Reference [Documentation (installation, references)](https://geosoftgxdev.atlassian.net/wiki/spaces/GGR/pages/490635382/Self-Organizing+Maps) <file_sep>/geosoft/gxapi/GXITR.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXREG import GXREG ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXITR(gxapi_cy.WrapITR): """ GXITR class. The `GXITR <geosoft.gxapi.GXITR>` class provides access to `GXITR <geosoft.gxapi.GXITR>` files. An `GXITR <geosoft.gxapi.GXITR>` file maps ranges of values to specific colors. The `GXITR <geosoft.gxapi.GXITR>` object is typically used in conjunction with `GXMVIEW <geosoft.gxapi.GXMVIEW>` objects (see `GXMVIEW <geosoft.gxapi.GXMVIEW>` and `GXMVU <geosoft.gxapi.GXMVU>`). **Note:** Histogram ranges and color zone ranges Histogram bins are defined with inclusive minima and exclusive maxima; for instance if Min = 0 and Inc = 1, then the second bin would include all values z such that 0 <= z < 1 (the first bin has all values < 0). Color zones used in displaying grids (`GXITR <geosoft.gxapi.GXITR>`, ZON etc...) are the opposite, with exclusive minima and inclusive maxima. For instance, if a zone is defined from 0 to 1, then it would contain all values of z such that 0 < z <= 1. These definitions mean that it is impossible to perfectly assign `GXITR <geosoft.gxapi.GXITR>` colors to individual bars of a histogram. The best work-around when the data values are integers is to define the color zones using 0.5 values between the integers. A general work-around is to make the number of histogram bins much larger than the number of color zones. The `ITR_NULL <geosoft.gxapi.ITR_NULL>` is used to hold a NULL handle to an `GXITR <geosoft.gxapi.GXITR>` class. """ def __init__(self, handle=0): super(GXITR, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXITR <geosoft.gxapi.GXITR>` :returns: A null `GXITR <geosoft.gxapi.GXITR>` :rtype: GXITR """ return GXITR() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def set_name(self, name): """ Set the name of the `GXITR <geosoft.gxapi.GXITR>`. :param name: Name to set :type name: str .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_name(name.encode()) def get_name(self, name): """ Get the name of the `GXITR <geosoft.gxapi.GXITR>`. :param name: Name returned :type name: str_ref .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ name.value = self._get_name(name.value.encode()) def change_brightness(self, brt): """ Change the brightness. :param brt: -1.0 - black; 0.0 no change; 1.0 white :type brt: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 0.0 brightness does nothing. -1.0 to 0.0 makes colors darker, -1.0 is black 0.0 to 1.0 makes colors lighter, 1.0 is white """ self._change_brightness(brt) def color_vv(self, vv_d, vv_c): """ Get color transform of a `GXVV <geosoft.gxapi.GXVV>`. :param vv_d: Input `GXVV <geosoft.gxapi.GXVV>` of values (none-string) :param vv_c: Output `GXVV <geosoft.gxapi.GXVV>` of colors (type INT) :type vv_d: GXVV :type vv_c: GXVV .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the input value is a dummy, then the output color is 0 (no color). """ self._color_vv(vv_d, vv_c) def copy(self, it_rs): """ Copies ITRs :param it_rs: `GXITR <geosoft.gxapi.GXITR>` Source :type it_rs: GXITR .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._copy(it_rs) @classmethod def create(cls): """ Create an `GXITR <geosoft.gxapi.GXITR>` object :returns: `GXITR <geosoft.gxapi.GXITR>` object :rtype: GXITR .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapITR._create(GXContext._get_tls_geo()) return GXITR(ret_val) @classmethod def create_file(cls, file): """ Create an `GXITR <geosoft.gxapi.GXITR>` object from an itr, tbl, zon, lut file. :param file: File name, type determined from extension :type file: str :returns: `GXITR <geosoft.gxapi.GXITR>` object :rtype: GXITR .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapITR._create_file(GXContext._get_tls_geo(), file.encode()) return GXITR(ret_val) @classmethod def create_img(cls, img, tbl, zone, contour): """ Create an `GXITR <geosoft.gxapi.GXITR>` for an image. :param tbl: Color table name, NULL for default :param zone: :ref:`ITR_ZONE` :param contour: Color contour interval or `rDUMMY <geosoft.gxapi.rDUMMY>` :type img: GXIMG :type tbl: str :type zone: int :type contour: float :returns: `GXITR <geosoft.gxapi.GXITR>` object :rtype: GXITR .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The `ITR_ZONE_DEFAULT <geosoft.gxapi.ITR_ZONE_DEFAULT>` model will ask the `GXIMG <geosoft.gxapi.GXIMG>` to provide a model if it can. If a shaded relief model is selected, a shaded image will be created and a shaded image file will be created with the same name as the original grid but with the suffux "_s" added to the name part of the grid. """ ret_val = gxapi_cy.WrapITR._create_img(GXContext._get_tls_geo(), img, tbl.encode(), zone, contour) return GXITR(ret_val) @classmethod def create_map(cls, map, name): """ Create `GXITR <geosoft.gxapi.GXITR>` from Map with `GXAGG <geosoft.gxapi.GXAGG>` Group name. :param map: `GXMAP <geosoft.gxapi.GXMAP>` on which to place the view :param name: `GXAGG <geosoft.gxapi.GXAGG>` Group name :type map: GXMAP :type name: str :returns: `GXITR <geosoft.gxapi.GXITR>` object :rtype: GXITR .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapITR._create_map(GXContext._get_tls_geo(), map, name.encode()) return GXITR(ret_val) @classmethod def create_s(cls, bf): """ Create an `GXITR <geosoft.gxapi.GXITR>` object from a `GXBF <geosoft.gxapi.GXBF>` :param bf: `GXBF <geosoft.gxapi.GXBF>` to serialize from :type bf: GXBF :returns: `GXITR <geosoft.gxapi.GXITR>` object :rtype: GXITR .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapITR._create_s(GXContext._get_tls_geo(), bf) return GXITR(ret_val) def equal_area(self, st, contour): """ Calculate an equal area transform. :param st: Stat object with a histogram :param contour: Color contour interval or dummy for none :type st: GXST :type contour: float .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the number of individual populated zones calculated using an equal-area ITR is less than the number of colours in the ITR, then an error message is displayed to that effect. If you want to avoid this error message in your work-flow, call EqualAreaOrLinear_ITR, which will apply a linear transform with the input number of colours, covering the whole range, should the equal-area transform have a problem, and no error message will appear, even if there is no valid data at all. """ self._equal_area(st, contour) def equal_area_or_linear(self, st, contour): """ Calculate an equal area transform. :param st: Stat object with a histogram :param contour: Color contour interval or dummy for none :type st: GXST :type contour: float .. versionadded:: 2022.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the number of individual populated zones calculated using an equal-area ITR is less than the number of colours in the ITR, then EqualArea_ITR displays an error message is displayed to that effect. If you want to avoid this error message in your work-flow, call this function, EqualAreaOrLinear_ITR, which will apply a linear transform with the input number of colours, covering the whole range, should the equal-area transform have a problem, and no error message will appear, even if there is no valid data at all. """ self._equal_area_or_linear(st, contour) def get_data_limits(self, min, max): """ Get `GXITR <geosoft.gxapi.GXITR>` max/min data limits. :param min: Data minimum value (or `rDUMMY <geosoft.gxapi.rDUMMY>` if not set) :param max: Data maximum value (or `rDUMMY <geosoft.gxapi.rDUMMY>` if not set) :type min: float_ref :type max: float_ref .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** In some ITRs, especially those defined and embedded inside grid (`GXIMG <geosoft.gxapi.GXIMG>`) objects, the actual data minimum and maximum values are stored. This function retrieves those values. This is NOT true of all `GXITR <geosoft.gxapi.GXITR>` objects, and in those cases dummy values will be returned. """ min.value, max.value = self._get_data_limits(min.value, max.value) def get_reg(self): """ Get the `GXITR <geosoft.gxapi.GXITR>`'s `GXREG <geosoft.gxapi.GXREG>` :returns: `GXREG <geosoft.gxapi.GXREG>` object :rtype: GXREG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_reg() return GXREG(ret_val) def get_zone_color(self, zone, color): """ Get the color in a zone of the `GXITR <geosoft.gxapi.GXITR>` :param zone: Number of the zone to set. :param color: :ref:`MVIEW_COLOR` :type zone: int :type color: int_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Valid indices are 0 to N-1, where N is the size of the `GXITR <geosoft.gxapi.GXITR>`. """ color.value = self._get_zone_color(zone, color.value) def get_zone_base_color(self, zone, color): """ Get the base color in a zone of the `GXITR <geosoft.gxapi.GXITR>` :param zone: Number of the zone to set. :param color: :ref:`MVIEW_COLOR` :type zone: int :type color: int_ref .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Valid indices are 0 to N-1, where N is the size of the `GXITR <geosoft.gxapi.GXITR>`. The base colour is the colour in the ITR before any brightness is applied. """ color.value = self._get_zone_base_color(zone, color.value) def color_value(self, val): """ Transform single data value to color :param val: Data value :type val: float :returns: :ref:`MVIEW_COLOR` :rtype: int .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._color_value(val) return ret_val def get_size(self): """ Get the number of zones in an `GXITR <geosoft.gxapi.GXITR>` :returns: The number of zones. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_size() return ret_val def get_zone_model_type(self): """ Get the `GXITR <geosoft.gxapi.GXITR>` zone model (e.g. Linear, LogLin, Equal Area). :returns: :ref:`ITR_ZONE_MODEL` :rtype: int .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This function may be used to determine if a color transform is included in an `GXITR <geosoft.gxapi.GXITR>`. """ ret_val = self._get_zone_model_type() return ret_val def get_zone_model(self, model, parameters): """ Get the `GXITR <geosoft.gxapi.GXITR>` zone model (e.g. Linear, LogLin, Equal Area) and the accompanying values (if defined) :param model: :ref:`ITR_ZONE_MODEL` :param parameters: `GXVV <geosoft.gxapi.GXVV>` object (REAL): see notes for values returned :type model: int_ref :type parameters: GXVV .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The following parameters are returned for the following values returned for :ref:`ITR_ZONE_MODEL`: ITR_ZONE_MODEL_LINEAR: [0]: Contour, [1]: Min, [2]: Max ITR_ZONE_MODEL_LOGLIN: [0]: Contour, [1]: Min, [2]: Max ITR_ZONE_MODEL_NORMAL: [0]: Contour, [1]: Mean, [2]: Standard Deviation, [3]: Exponent ITR_ZONE_MODEL_EQUAL: [0]: Contour All other types do not return any values (the VV will be empty). """ model.value = self._get_zone_model(model.value, parameters) def set_zone_model(self, model, parameters): """ Set the `GXITR <geosoft.gxapi.GXITR>` zone model (e.g. Linear, LogLin, Equal Area) and the accompanying values (if defined) :param model: :ref:`ITR_ZONE_MODEL` :param parameters: `GXVV <geosoft.gxapi.GXVV>` object (REAL): see notes for values returned :type model: int :type parameters: GXVV .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The following parameters are required for the following values of :ref:`ITR_ZONE_MODEL`: ITR_ZONE_MODEL_LINEAR: [0]: Contour, [1]: Min, [2]: Max ITR_ZONE_MODEL_LOGLIN: [0]: Contour, [1]: Min, [2]: Max ITR_ZONE_MODEL_NORMAL: [0]: Contour, [1]: Mean, [2]: Standard Deviation, [3]: Exponent ITR_ZONE_MODEL_EQUAL: [0]: Contour All other types do not require any values (the VV will be empty). NOTE: This will not change the actual ranges, so care is needed when using this function that the input specification accurately reflects the actual ranges. """ self._set_zone_model(model, parameters) def linear(self, min, max, contour): """ Calculate a linear transform. :param min: Minimum :param max: Maximum :param contour: Color contour interval or dummy for none :type min: float :type max: float :type contour: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._linear(min, max, contour) def load_a(self, file): """ Load to an ASCII file, ZON, TBL or ER-Mapper LUT :param file: File name :type file: str .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._load_a(file.encode()) def log_linear(self, min, max, contour): """ Calculate a log transform. :param min: Minimum ( > 0) :param max: Maximum ( > minimum) :param contour: Color contour interval or dummy for none :type min: float :type max: float :type contour: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The function name is a misnomer. This is a pure log transform. """ self._log_linear(min, max, contour) def normal(self, std_dev, mean, exp, contour): """ Calculate a normal distribution transform. :param std_dev: Standard deviation :param mean: Mean :param exp: Expansion, normally 1.0 :param contour: Color contour interval or dummy for none :type std_dev: float :type mean: float :type exp: float :type contour: float .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._normal(std_dev, mean, exp, contour) def power_zone(self, pow): """ Modified `GXITR <geosoft.gxapi.GXITR>` zone values to 10 (or e) raized to the power of the values :param pow: :ref:`ITR_POWER` :type pow: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._power_zone(pow) def get_brightness(self): """ Get the brightness setting of the `GXITR <geosoft.gxapi.GXITR>` :returns: The brightness setting of the `GXITR <geosoft.gxapi.GXITR>` :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Brightness can range from -1.0 (black) to 1.0 (white). This brightness control is relative to the normal color when the `GXITR <geosoft.gxapi.GXITR>` is created. .. seealso:: `change_brightness <geosoft.gxapi.GXITR.change_brightness>`, `get_contrast <geosoft.gxapi.GXITR.get_contrast>`, `set_bright_contrast <geosoft.gxapi.GXITR.set_bright_contrast>` """ ret_val = self._get_brightness() return ret_val def get_contrast(self): """ Get the contrast setting of the `GXITR <geosoft.gxapi.GXITR>` :returns: The contrast setting of the `GXITR <geosoft.gxapi.GXITR>` :rtype: float .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Brightness can range from 0.0 (flat) to 1.0 (normal full contrast). .. seealso:: `change_brightness <geosoft.gxapi.GXITR.change_brightness>`, `GXAGG.get_brightness <geosoft.gxapi.GXAGG.get_brightness>`, `set_bright_contrast <geosoft.gxapi.GXITR.set_bright_contrast>` """ ret_val = self._get_contrast() return ret_val def get_contour(self): """ Get the contour value associated with the current transform model of the `GXITR <geosoft.gxapi.GXITR>` :returns: The contour setting of the `GXITR <geosoft.gxapi.GXITR>` :rtype: float .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The Linear, Normal and Equal-Area transforms accept a contour value as part of their definitions on creation. All the colour-breaks are rounded to the nearest integral multiple of the contour value (if defined). This function returns the value defined, and rDUMMY if none is defined. """ ret_val = self._get_contour() return ret_val def get_zone_value(self, zone): """ Get the value in a zone of the `GXITR <geosoft.gxapi.GXITR>` :param zone: Number of the zone to set. :type zone: int :returns: The value of the specified zone. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Valid indices are 0 to N-2, where N is the size of the `GXITR <geosoft.gxapi.GXITR>`. """ ret_val = self._get_zone_value(zone) return ret_val def save_a(self, file): """ Save to an ASCII file, ZON, TBL or ER-Mapper LUT :param file: File name :type file: str .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._save_a(file.encode()) def save_file(self, file): """ Save to any type (based on the extension of the input file name). :param file: File name :type file: str .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._save_file(file.encode()) def serial(self, bf): """ Serialize an `GXITR <geosoft.gxapi.GXITR>` to a `GXBF <geosoft.gxapi.GXBF>` :param bf: `GXBF <geosoft.gxapi.GXBF>` to serialize to :type bf: GXBF .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._serial(bf) @classmethod def set_agg_map(cls, map, name, itr): """ Set `GXITR <geosoft.gxapi.GXITR>` to an `GXAGG <geosoft.gxapi.GXAGG>` in map :param map: `GXMAP <geosoft.gxapi.GXMAP>` on which to place the view :param name: `GXAGG <geosoft.gxapi.GXAGG>` group name :param itr: `GXITR <geosoft.gxapi.GXITR>` object to set :type map: GXMAP :type name: str :type itr: GXITR .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See the `create_map <geosoft.gxapi.GXITR.create_map>` function """ gxapi_cy.WrapITR._set_agg_map(GXContext._get_tls_geo(), map, name.encode(), itr) def set_bright_contrast(self, brt, con): """ Set the brightness of the `GXITR <geosoft.gxapi.GXITR>` colors :param brt: 0.0 - black; 0.5 normal; 1.0 white :param con: 0.0 - flat; 1.0 normal :type brt: float :type con: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Brightness settings: 0.0 - black 0.5 - normal (no change) 1.0 - white Contrast 0.0 - flat 1.0 - full contrast (normal) """ self._set_bright_contrast(brt, con) def set_color_model(self, model): """ Set the color model of an `GXITR <geosoft.gxapi.GXITR>`. :param model: :ref:`ITR_COLOR_MODEL` :type model: int .. versionadded:: 5.0.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_color_model(model) @classmethod def default_color_method(cls): """ Return the user-defined global default color method. :returns: One of `ITR_ZONE_EQUALAREA <geosoft.gxapi.ITR_ZONE_EQUALAREA>`, `ITR_ZONE_LINEAR <geosoft.gxapi.ITR_ZONE_LINEAR>`, `ITR_ZONE_NORMAL <geosoft.gxapi.ITR_ZONE_NORMAL>` or `ITR_ZONE_LOGLINEAR <geosoft.gxapi.ITR_ZONE_LOGLINEAR>` :rtype: int .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapITR._default_color_method(GXContext._get_tls_geo()) return ret_val def set_data_limits(self, min, max): """ Set `GXITR <geosoft.gxapi.GXITR>` max/min data limits. :param min: Data minimum value :param max: Data maximum value :type min: float :type max: float .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_data_limits(min, max) def set_size(self, zones): """ Set the number of zones in an `GXITR <geosoft.gxapi.GXITR>` :param zones: Number of zones to set `GXITR <geosoft.gxapi.GXITR>` to. :type zones: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_size(zones) def set_zone_color(self, zone, color): """ Set the color in a zone of the `GXITR <geosoft.gxapi.GXITR>` :param zone: Number of the zone to set. :param color: :ref:`MVIEW_COLOR` :type zone: int :type color: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** " Valid indices are 0 to N-1, where N is the size of the `GXITR <geosoft.gxapi.GXITR>`. This function modifies the colours as rendered, including applied brightness, and triggers an internal reset of the ITR brightness to zero, with all the "rendered" colours becoming the "base" colours. """ self._set_zone_color(zone, color) def set_zone_base_color(self, zone, color): """ Set the color in a zone of the `GXITR <geosoft.gxapi.GXITR>` :param zone: Number of the zone to set. :param color: :ref:`MVIEW_COLOR` :type zone: int :type color: int .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** " Valid indices are 0 to N-1, where N is the size of the `GXITR <geosoft.gxapi.GXITR>`. This function modifies the "base" colours directly. The base colours have the current brightness applied to produce the rendered colours. The internal brightness remains unchanged. """ self._set_zone_base_color(zone, color) def set_zone_active(self, zone, active): """ Set whether a zone of the `GXITR <geosoft.gxapi.GXITR>` is active (1) or rendered trasparent (0) :param zone: Number of the zone to set. :param active: 1: active, 0: inactive (transparent) :type zone: int :type active: int .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Valid indices are 0 to N-1, where N is the size of the `GXITR <geosoft.gxapi.GXITR>`. """ self._set_zone_active(zone, active) def get_zone_active(self, zone): """ Get whether a zone of the `GXITR <geosoft.gxapi.GXITR>` is active (1) or rendered trasparent (0) :param zone: Number of the zone to set. :type zone: int :rtype: int .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Valid indices are 0 to N-1, where N is the size of the `GXITR <geosoft.gxapi.GXITR>`. """ ret_val = self._get_zone_active(zone) return ret_val def set_zone_value(self, zone, value): """ Set the value in a zone of the `GXITR <geosoft.gxapi.GXITR>` :param zone: Number of the zone to set. :param value: The value to set :type zone: int :type value: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Valid indices are 0 to N-2, where N is the size of the `GXITR <geosoft.gxapi.GXITR>`. """ self._set_zone_value(zone, value) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXMULTIGRID3DUTIL.rst .. _GXMULTIGRID3DUTIL: GXMULTIGRID3DUTIL class ================================== .. autoclass:: geosoft.gxapi.GXMULTIGRID3DUTIL :members: .. _RBFKERNEL: RBFKERNEL constants ----------------------------------------------------------------------- Math kernel to use in the RBF Computation .. autodata:: geosoft.gxapi.RBFKERNEL_DISTANCE :annotation: .. autoattribute:: geosoft.gxapi.RBFKERNEL_DISTANCE .. autodata:: geosoft.gxapi.RBFKERNEL_MULTIQUADRATIC :annotation: .. autoattribute:: geosoft.gxapi.RBFKERNEL_MULTIQUADRATIC <file_sep>/geosoft/gxapi/GXE3DV.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXMVIEW import GXMVIEW ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXE3DV(gxapi_cy.WrapE3DV): """ GXE3DV class. Methods to manipulate an active 3D View """ def __init__(self, handle=0): super(GXE3DV, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXE3DV <geosoft.gxapi.GXE3DV>` :returns: A null `GXE3DV <geosoft.gxapi.GXE3DV>` :rtype: GXE3DV """ return GXE3DV() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def get_data_view(self): """ Get the current data (3D) `GXMVIEW <geosoft.gxapi.GXMVIEW>` :returns: `GXMVIEW <geosoft.gxapi.GXMVIEW>` object :rtype: GXMVIEW .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_data_view() return GXMVIEW(ret_val) def get_base_view(self): """ Get the current Base `GXMVIEW <geosoft.gxapi.GXMVIEW>` (used to draw 2D legends for groups) :returns: `GXMVIEW <geosoft.gxapi.GXMVIEW>` object :rtype: GXMVIEW .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_base_view() return GXMVIEW(ret_val) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/geometry.py """ Spatial geometric objects. :Classes: :`Geometry`: base class for all geometries :`Point`: (x, y, z) point :`Point2`: pair of `Point` instances that define a line, or box, etc. :`PPoint`: multiple `Point` instances :`Mesh`: mesh surface made up of triangular faces defined by verticies .. note:: Regression tests provide usage examples: `geometry tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_geometry.py>`_ """ import numpy as np from collections.abc import Sequence import geosoft import geosoft.gxapi as gxapi from . import coordinate_system as gxcs from . import vv as gxvv __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) def _geo_cs(g, geo_class, coordinate_system, **kwargs): if hasattr(g, 'coordinate_system') and g.coordinate_system == coordinate_system: return g return geo_class(g, coordinate_system, **kwargs) def first_coordinate_system(geo_objects): """ Return the first found known coordinate system in the list :param geo_objects: objects as iterable :return: valid coordinate system or `None` if none found .. versionadded:: 9.3.1 """ for o in geo_objects: if hasattr(o, 'coordinate_system'): if gxcs.is_known(o.coordinate_system): return o.coordinate_system return None class GeometryException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.geometry`. """ pass def extent_union(g1, g2): """ Return the spatial union of two spatial objects. :param g1: extent (p0 < p1), returned extent will be in this coordinate system :param g2: second object :return: `Point2` instance in the coordinate system of g1 .. versionadded:: 9.3.1 """ def ext(g): if g is None or isinstance(g, Point2): return g if isinstance(g, Geometry): return g.extent return Point2(g).extent g1 = ext(g1) g2 = ext(g2) if g1 is None: return g2 if g2 is None: return g1 g2p0x, g2p0y, g2p0z = g2.p0.xyz g2p1x, g2p1y, g2p1z = g2.p1.xyz if g1.coordinate_system != g2.coordinate_system: corners = np.array([(g2p0x, g2p0y, g2p0z), (g2p0x, g2p1y, g2p0z), (g2p1x, g2p1y, g2p0z), (g2p1x, g2p0y, g2p0z), (g2p0x, g2p0y, g2p1z), (g2p0x, g2p1y, g2p1z), (g2p1x, g2p1y, g2p1z), (g2p1x, g2p0y, g2p1z)], dtype=np.float64) ex = PPoint(PPoint(corners, g2.coordinate_system), g1.coordinate_system).extent return extent_union(g1, ex) g1p0x, g1p0y, g1p0z = g1.p0.xyz g1p1x, g1p1y, g1p1z = g1.p1.xyz if g2p0x >= g1p0x and g2p0y >= g1p0y and g2p0z >= g1p0z and\ g2p1x <= g1p1x and g2p1y <= g1p1y and g2p1z <= g1p1z: return g1 min_x = g1p0x if g1p0x < g2p0x else g2p0x min_y = g1p0y if g1p0y < g2p0y else g2p0y min_z = g1p0z if g1p0z < g2p0z else g2p0z max_x = g1p1x if g1p1x > g2p1x else g2p1x max_y = g1p1y if g1p1y > g2p1y else g2p1y max_z = g1p1x if g1p1z > g2p1z else g2p1z return Point2(((min_x, min_y, min_z), (max_x, max_y, max_z)), g1.coordinate_system) class Geometry: """ Geometry base class for all geometries and spatial objects in Geosoft. :param coordinate_system: `geosoft.gxpy.coordinate_system.Coordinate_system` instance. :param name: instance name string :param gxobj: optional gxapi instance that can satisfy get_ipj() and/or get_extent() :Properties: :`Geometry.name`: name for the geometry :`Geometry.coordinate_system`: spatial coordinate system of the x, y, z locations :`Geometry.extent`: spatial extent as a `Point2` :`Geometry.extent_xyz`: (min_x, min_y, min_z, max_x, max_y, max_z) :`Geometry.extent_xy`: (min_x, min_y, max_x, max_y) :`Geometry.dimension`: (dx, dy, dz) dimension :`Geometry.dimension_xy`: (dx, dy) dimension :`Geometry.centroid`: center point as a `Point` :`Geometry.centroid_xyz`: (x, y, z) location of the object center :`Geometry.centroid_xy`: (x, y) center .. versionadded:: 9.2 """ def __enter__(self): return self def __exit__(self, xtype, xvalue, xtraceback): pass def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __init__(self, coordinate_system=None, name=None, gxobj=None): if name is None: name = '_geometry_' self._cs = coordinate_system self._name = name self._gxobj = gxobj def __eq__(self, other): if self.coordinate_system != other.coordinate_system: return False if self._gxobj != other.gxobj: return False return True @property def coordinate_system(self): """`geosoft.gxpy.coordinate_system.Coordinate_system` instance or None. Can be set.""" if self._cs and not isinstance(self._cs, gxcs.Coordinate_system): self._cs = gxcs.Coordinate_system(self._cs) if self._gxobj and hasattr(self._gxobj, 'get_ipj'): ipj = gxapi.GXIPJ.create() self._gxobj.get_ipj(ipj) self._cs = gxcs.Coordinate_system(ipj) return self._cs @coordinate_system.setter def coordinate_system(self, cs): if cs and self._gxobj and hasattr(self._gxobj, 'set_ipj'): if not isinstance(cs, gxcs.Coordinate_system): cs = gxcs.Coordinate_system(cs) self._gxobj.set_ipj(cs.gxipj) self._cs = cs @property def gxobj(self): """An associated gxapi object, or None.""" return self._gxobj @property def name(self): """Spatial object name, can be set.""" return self._name @name.setter def name(self, name): self._name = name @property def extent(self): """ Object extent as a `Point2` instance.""" if self._gxobj and hasattr(self._gxobj, 'get_extents'): rx0 = gxapi.float_ref() ry0 = gxapi.float_ref() rz0 = gxapi.float_ref() rx1 = gxapi.float_ref() ry1 = gxapi.float_ref() rz1 = gxapi.float_ref() self._gxobj.get_extents(rx0, ry0, rz0, rx1, ry1, rz1) cs = self.coordinate_system return Point2(((rx0.value, ry0.value, rz0.value), (rx1.value, ry1.value, rz1.value)), cs) else: return None @property def extent_xyz(self): """Object extent as a tuple (xmin, ymin, zmin, xmax, ymax, zmax).""" e = self.extent if e is None: return None, None, None, None, None, None return e[0].x, e[0].y, e[0].z, e[1].x, e[1].y, e[1].z @property def extent_xy(self): """ Horizontal extent as a tuple (min_x, min_y, max_x, max_y).""" e = self.extent if e is None: return None, None, None, None return e[0].x, e[0].y, e[1].x, e[1].y @property def extent_minimum(self): """Minimum geometry extent as `Point` instance.""" if self.extent is None: return None return self.extent[0] @property def extent_maximum(self): """Maximum geometry extent as `Point` instance.""" if self.extent is None: return None return self.extent[1] @property def extent_minimum_xyz(self): """Minimum geometry extent as tuple (x, y, z).""" e = self.extent if e is None: return None, None, None p = e[0] return p.x, p.y, p.z @property def extent_maximum_xyz(self): """Maximum geometry extent as tuple (x, y, z).""" e = self.extent if e is None: return None, None, None p = e[1] return p.x, p.y, p.z @property def extent_minimum_xy(self): """Minimum horizontal extent as tuple (min_x, min_y).""" e = self.extent if e is None: return None, None p = e[0] return p.x, p.y @property def extent_maximum_xy(self): """Maximum horizontal extent as tuple (max_x, max_y).""" e = self.extent if e is None: return None, None p = e[1] return p.x, p.y @property def centroid(self): """Object centroid as a `Point` instance.""" e = self.extent if e is None: return None cx = (e[0].x + e[1].x) * 0.5 cy = (e[0].y + e[1].y) * 0.5 cz = (e[0].z + e[1].z) * 0.5 return Point((cx, cy, cz), e.coordinate_system) @property def dimension(self): """Object dimensions as tuple (dx, dy, dz)""" e = self.extent if e is None: return None, None, None dx = abs(e[1].x - e[0].x) dy = abs(e[1].y - e[0].y) dz = abs(e[1].z - e[0].z) return dx, dy, dz @property def centroid_xy(self): """Horizontal centroid as a tuple (x, y).""" c = self.centroid if c is None: return None, None return c.x, c.y @property def centroid_xyz(self): """Horizontal centroid as a tuple (x, y, z).""" c = self.centroid if c is None: return None, None, None return c.x, c.y, c.z @property def dimension_xy(self): """Horizontal dimension as a tuple (dx, dy).""" dx, dy, _ = self.dimension if dx is None: return None, None return dx, dy class Point(Geometry, Sequence): """ Spatial location (x,y,z). Basic instance arithmetic and equality testing is supported. :param p: point in one of the following forms: `Point` instance, returns a copy (x, y [,z]) implied z is as defined by z= k makes a point (k, k, k) :param coordinate_system: coordinate system or None :param z: implied z if len(p) is 2. :param kwargs: passed to base class `Geometry` Iterates on [x, y, z] Operators supported: = + - * / .. versionadded:: 9.2 .. versionchanged:: 9.3.1 added coordinate_system parameter """ def __str__(self): return "{}({}, {}, {})".format(self.name, self.x, self.y, self.z) def __init__(self, p, coordinate_system=None, name=None, z=0., **kwargs): if name is None: name = '_point_' super().__init__(coordinate_system=coordinate_system, name=name, **kwargs) if isinstance(p, Point): if coordinate_system is None: coordinate_system = p.coordinate_system super().__init__(coordinate_system=coordinate_system, name=name, **kwargs) if coordinate_system != p.coordinate_system: self.p = gxcs.Coordinate_translate(p.coordinate_system, coordinate_system).convert(p.p) else: self.p = p.p.copy() else: super().__init__(coordinate_system=coordinate_system, name=name, **kwargs) if isinstance(p, np.ndarray): if len(p) > 2: self.p = p[:3].copy() else: self.p = np.empty(3) self.p[:2] = p self.p[2] = z elif hasattr(p, '__len__'): lp = len(p) if lp == 1: v = float(p[0]) self.p = np.array((v, v, v), dtype=np.float) else: self.p = np.empty(3) if lp == 2: self.p[0] = float(p[0]) if p[0] is not None else np.nan self.p[1] = float(p[1]) if p[1] is not None else np.nan self.p[2] = z else: self.p[0] = float(p[0]) if p[0] is not None else np.nan self.p[1] = float(p[1]) if p[1] is not None else np.nan self.p[2] = float(p[2]) if p[2] is not None else np.nan else: p = float(p) self.p = np.array((p, p, p)) self._next = 0 def __len__(self): return 1 def __iter__(self): return self def __next__(self): if self._next >= 3: self._next = 0 raise StopIteration else: item = self._next self._next += 1 return self.p[item] def __getitem__(self, item): return self.p[item] def __add__(self, p): if not isinstance(p, Point): p = Point(p) else: p = _geo_cs(p, Point, self.coordinate_system) return Point(self.p + p.p, self.coordinate_system) def __sub__(self, p): if not isinstance(p, Point): p = Point(p) else: p = _geo_cs(p, Point, self.coordinate_system) return Point(self.p - p.p, self.coordinate_system) def __neg__(self): return Point(-self.p, coordinate_system=self.coordinate_system) def __mul__(self, p): if not isinstance(p, Point): p = Point(p) else: p = _geo_cs(p, Point, self.coordinate_system) return Point(self.p * p.p, self.coordinate_system) def __truediv__(self, p): if not isinstance(p, Point): p = Point(p) else: p = _geo_cs(p, Point, self.coordinate_system) return Point(self.p / p.p, self.coordinate_system) def __eq__(self, other): if not super(Point, self).__eq__(other): return False return np.array_equal(self.p, other.p) @property def x(self): """ x value, can be set""" return self.p[0] @x.setter def x(self, value): self.p[0] = float(value) @property def y(self): """ y value, can be set""" return self.p[1] @y.setter def y(self, value): self.p[1] = float(value) @property def z(self): """ z value, can be set""" return self.p[2] @z.setter def z(self, value): self.p[2] = float(value) @property def xy(self): """ (x, y), can be set""" return self.p[0], self.p[1] @xy.setter def xy(self, xy): self.p[0] = float(xy[0]) self.p[1] = float(xy[1]) @property def xyz(self): """ (x, y, z), can be set""" return self.p[0], self.p[1], self.p[2] @xyz.setter def xyz(self, xyz): self.p[0] = float(xyz[0]) self.p[1] = float(xyz[1]) self.p[2] = float(xyz[2]) @property def extent(self): return Point2((self, self)) @property def pp(self): """Point as a numpy array shaped (1, 3)""" return self.p.reshape((1, 3)) def copy(self): """Return a copy""" return Point(self) class Point2(Geometry, Sequence): """ Two points, for a line, or a rectangle, or a cube. Basic instance arithmetic and equality testing is supported. :param p: Points in one of the following forms: `Point2` makes a copy in the required coordinate system (`Point`, `Point`) (x, y [, z]) two points at the same location ((x, y [, z]), (x, y [, z])) (x0, y0, x1, y1) implied z is 0 (x0, y0, z0, x1, y1, z1) :param coordinate_system: coordinate system or None :param z: implied z value when only (x, y) is passed :param kwargs: passed to base class `Geometry` Iterates on two points [p0, p1]. Operators supported: = + - * / Second operand may be a `Point2` or a `Point`. .. versionadded:: 9.2 .. versionchanged:: 9.3.1 added coordinate_system parameter """ def __str__(self): return "{}[({}, {}, {}) ({}, {}, {})]".format(self.name, self.p0.x, self.p0.y, self.p0.z, self.p1.x, self.p1.y, self.p1.z) def __init__(self, p, coordinate_system=None, name=None, z=0, **kwargs): if name is None: name = '_point2_' super().__init__(coordinate_system=coordinate_system, name=name, **kwargs) if isinstance(p, Point): if coordinate_system is None: coordinate_system = p.coordinate_system self.p0 = self.p1 = Point(p, coordinate_system=coordinate_system) elif isinstance(p, Point2): if coordinate_system is None: coordinate_system = p.coordinate_system self.p0 = Point(p.p0, coordinate_system=coordinate_system) self.p1 = Point(p.p1, coordinate_system=coordinate_system) else: if not hasattr(p, '__iter__'): self.p0 = self.p1 = Point(p, coordinate_system, z=z) elif len(p) == 2: if coordinate_system is None: coordinate_system = first_coordinate_system((p[0], p[1])) if hasattr(p[0], '__iter__'): self.p0 = Point(p[0], coordinate_system, z=z) self.p1 = Point(p[1], coordinate_system, z=z) else: self.p0 = Point(p, coordinate_system, z=z) self.p1 = Point(self.p0) elif len(p) == 3: self.p0 = self.p1 = Point((p[0], p[1], p[2]), coordinate_system, z=z) elif len(p) == 4: self.p0 = Point((p[0], p[1]), coordinate_system, z=z) self.p1 = Point((p[2], p[3]), coordinate_system, z=z) elif len(p) == 6: self.p0 = Point((p[0], p[1], p[2]), coordinate_system, z=z) self.p1 = Point((p[3], p[4], p[5]), coordinate_system, z=z) else: raise GeometryException(_t('Invalid points: {}').format(p)) self.coordinate_system = coordinate_system self._next = 0 def __len__(self): return 2 def __iter__(self): return self def __next__(self): if self._next >= 2: self._next = 0 raise StopIteration else: if self._next: p = self.p1 else: p = self.p0 self._next += 1 return p def __getitem__(self, item): if item == 0: return self.p0 elif item == 1: return self.p1 else: raise IndexError def __eq__(self, other): if not super(Point2, self).__eq__(other): return False return (self.p0 == other.p0) and (self.p1 == other.p1) or (self.p0 == other.p1) and (self.p1 == other.p0) def __add__(self, p): if isinstance(p, Point2): p = _geo_cs(p, Point2, self.coordinate_system) return Point2((self.p0 + p.p0, self.p1 + p.p1), coordinate_system=self.coordinate_system) if not isinstance(p, Point): p = Point(p) else: p = _geo_cs(p, Point, self.coordinate_system) return Point2((self.p0 + p, self.p1 + p), coordinate_system=self.coordinate_system) def __sub__(self, p): if isinstance(p, Point2): p = _geo_cs(p, Point2, self.coordinate_system) return Point2((self.p0 - p.p0, self.p1 - p.p1), coordinate_system=self.coordinate_system) if not isinstance(p, Point): p = Point(p) else: p = _geo_cs(p, Point, self.coordinate_system) return Point2((self.p0 - p, self.p1 - p), coordinate_system=self.coordinate_system) def __neg__(self): return Point2((-self.p0, -self.p1), coordinate_system=self.coordinate_system) def __mul__(self, p): if isinstance(p, Point2): p = _geo_cs(p, Point2, self.coordinate_system) return Point2((self.p0 * p.p0, self.p1 * p.p1), coordinate_system=self.coordinate_system) if isinstance(p, Point): p = _geo_cs(p, Point, self.coordinate_system) else: p = Point(p) return Point2((self.p0 * p, self.p1 * p), coordinate_system=self.coordinate_system) def __truediv__(self, p): if isinstance(p, Point2): p = _geo_cs(p, Point2, self.coordinate_system) return Point2((self.p0 / p.p0, self.p1 / p.p1), coordinate_system=self.coordinate_system) if isinstance(p, Point): p = _geo_cs(p, Point, self.coordinate_system) else: p = Point(p) return Point2((self.p0 / p, self.p1 / p), coordinate_system=self.coordinate_system) @property def x2(self): """(x0, x1), can be set""" return self.p0.x, self.p1.x @x2.setter def x2(self, value): self.p0.x = value[0] self.p1.x = value[1] @property def y2(self): """ (y0, y1), can be set""" return self.p0.y, self.p1.y @y2.setter def y2(self, value): self.p0.y = value[0] self.p1.y = value[1] @property def z2(self): """ (z0, z1), can be set""" return self.p0.z, self.p1.z @z2.setter def z2(self, value): self.p0.z = value[0] self.p1.z = value[1] @property def extent(self): """Extent as (xmin, ymin, zmin, xmax, ymax, zmax)""" p1 = Point((min(self.p0.x, self.p1.x), min(self.p0.y, self.p1.y), min(self.p0.z, self.p1.z)), self.coordinate_system) p2 = Point((max(self.p0.x, self.p1.x), max(self.p0.y, self.p1.y), max(self.p0.z, self.p1.z)), self.coordinate_system) return Point2((p1, p2), self.coordinate_system) def copy(self): """Return a copy""" return Point2(self) @property def pp(self): """Point2 as a numpy array shaped (2, 3)""" pp = np.empty((2, 3), dtype=np.float64) pp[0] = self.p0.p pp[1] = self.p1.p return pp class PPoint(Geometry, Sequence): """ Poly-Point class. Basic instance arithmetic and equality testing is supported. :param xyz: array-like: (p1, p2, ...), ((x, y), ...), ((x, y, z), ...) or (vv_x, vv_y, [vv_z]). vv data is resampled to match the first vv. :param coordinate_system: coordinate system or `None` :param z: constant z value for (x, y) data, ignored for (x, y, z) data :param kwargs: passed to base class `Geometry` Operators supported: = + - * / .. versionadded:: 9.2 .. versionchanged:: 9.3.1 added coordinate_system parameter """ def __str__(self): return "{}({} points)".format(self.name, len(self)) def __init__(self, xyz, coordinate_system=None, z=0.0, name=None, **kwargs): if name is None: name = '_ppoint_' super().__init__(coordinate_system=coordinate_system, name=name, **kwargs) def blankpp(length): pp = np.empty(length * 3, dtype=np.float).reshape((length, 3)) pp.fill(np.nan) pp[:, 2] = z return pp def np_setup(npxyz): pp = blankpp(npxyz.shape[0]) pp[:, 0] = npxyz[:, 0] pp[:, 1] = npxyz[:, 1] if npxyz.shape[1] > 2: pp[:, 2] = npxyz[:, 2] else: pp[:, 2] = z return pp def vv_setup(): pp = blankpp(xyz[0].length) pp[:, 0] = xyz[0].get_data()[0][:] xyz[1].refid(xyz[0].fid, pp.shape[0]) pp[:, 1] = xyz[1].get_data()[0][:] if len(xyz) > 2: xyz[2].refid(xyz[0].fid, pp.shape[0]) pp[:, 2] = xyz[2].np else: pp[:, 2] = z return pp def point_setup(_xyz): pp = blankpp(len(_xyz)) i = 0 if isinstance(_xyz, Point): _xyz = (_xyz,) for pt in _xyz: if isinstance(pt, Point): pp[i, :] = _geo_cs(pt, Point, coordinate_system, z=z).p else: try: pp[i, :] = pt[:3] except: pp[i, :] = _geo_cs(pt, Point, coordinate_system, z=z).p i += 1 return pp if isinstance(xyz, np.ndarray): self.pp = np_setup(xyz) elif isinstance(xyz[0], gxvv.GXvv): self.pp = vv_setup() else: if coordinate_system is None: coordinate_system = first_coordinate_system(xyz) self.pp = point_setup(xyz) self.coordinate_system = coordinate_system self._next = 0 @classmethod def from_list(cls, xyzlist, z=0.0): """ .. deprecated:: 9.3 `PPoint` can create directly from a list """ return cls(xyzlist, z=z) def __len__(self): return self.pp.shape[0] def __iter__(self): return self def __next__(self): if self._next >= self.pp.shape[0]: self._next = 0 raise StopIteration else: self._next += 1 return self.__getitem__(self._next - 1) def __getitem__(self, item): return Point(self.pp[item], self.coordinate_system) def __add__(self, p): if isinstance(p, PPoint): p = _geo_cs(p, PPoint, self.coordinate_system) return PPoint(self.pp + p.pp) if isinstance(p, Point): p = _geo_cs(p, Point, self.coordinate_system) return PPoint(self.pp + p.p) try: p = Point(p, self.coordinate_system) return PPoint(self.pp + p.p) except TypeError: p = PPoint(p, self.coordinate_system) return PPoint(self.pp + p.pp) def __sub__(self, p): if isinstance(p, PPoint): p = _geo_cs(p, PPoint, self.coordinate_system) return PPoint(self.pp - p.pp) if isinstance(p, Point): p = _geo_cs(p, Point, self.coordinate_system) return PPoint(self.pp - p.p) return PPoint(self.pp - Point(p).p) def __neg__(self): return PPoint(self.pp * -1.0) def __mul__(self, p): if isinstance(p, PPoint): p = _geo_cs(p, PPoint, self.coordinate_system) return PPoint(self.pp * p.pp) if isinstance(p, Point): p = _geo_cs(p, Point, self.coordinate_system) return PPoint(self.pp * p.p) return PPoint(self.pp * Point(p).p) def __truediv__(self, p): if isinstance(p, PPoint): p = _geo_cs(p, PPoint, self.coordinate_system) return PPoint(self.pp / p.pp) if isinstance(p, Point): p = _geo_cs(p, Point, self.coordinate_system) return PPoint(self.pp / p.p) return PPoint(self.pp / Point(p).p) def __eq__(self, other): if not super(PPoint, self).__eq__(other): return False return np.array_equal(self.pp, other.pp) @classmethod def merge(cls, pp_list): """ Create a `PPoint` from a list of `Point`, 'Point2` or `PPoint` instances or point arrays. :param pp_list: list of `Point`, 'Point2` or `PPoint` instances or point arrays. :return: `PPoint` instance that contains all points .. versionadded:: 9.4 """ # count points, get first coordinate system npt = 0 cs = None for pp in pp_list: npt += len(pp) if cs is None and isinstance(pp, Geometry): cs = pp.coordinate_system npp = np.zeros((npt, 3)) i = 0 for pp in pp_list: if not isinstance(pp, Geometry): pp = PPoint(pp, coordinate_system=cs) if pp.coordinate_system != cs: pp = PPoint(pp, coordinate_system=cs) npp[i:(i+len(pp))] = pp.pp i += len(pp) return PPoint(npp, coordinate_system=cs) @property def length(self): """number of points""" return self.__len__() @property def x(self): """ x array slice, can be set""" return self.pp[:, 0] @x.setter def x(self, v): self.pp[:, 0] = v @property def y(self): """ y array slice, can be set""" return self.pp[:, 1] @y.setter def y(self, v): self.pp[:, 1] = v @property def z(self): """ z array slice, can be set""" return self.pp[:, 2] @z.setter def z(self, v): self.pp[:, 2] = v @property def xy(self): """ (x, y) array slice, can be set""" return self.pp[:, 0:2] @xy.setter def xy(self, v): self.pp[:, 0:2] = v @property def xyz(self): """ xyz point array""" return self.pp @property def extent(self): """ Volume extent as `Point2` for (min, max). .. versionadded:: 9.2 """ p1 = Point((np.nanmin(self.x), np.nanmin(self.y), np.nanmin(self.z)), self.coordinate_system) p2 = Point((np.nanmax(self.x), np.nanmax(self.y), np.nanmax(self.z)), self.coordinate_system) return Point2((p1, p2)) def make_xyz_vv(self): """ Return x, y and z as a set of :class:`geosoft.gxpy.vv.GXvv`. :returns: (xvv, yvv, zvv) .. versionadded:: 9.2 """ return gxvv.GXvv(self.x), gxvv.GXvv(self.y), gxvv.GXvv(self.z) def copy(self): """Return a copy""" return PPoint(self) class Mesh(Geometry, Sequence): """ Mesh - set of triangular faces, which are indexes into verticies. :param mesh: (faces, verticies) that define a trangulated mesh surface. See below. :param coordinate_system: coordinate system or `None` :param kwargs: passed to base class `Geometry` A mesh is a set of triangles, where each triangle has three indexes into a set of verticies. Verticies are defined by a set of (x, y, z) locations. A Mesh instance can be constructed from two arrays in the form (faces, verticies), or from two sets of `geosoft.gxpy.vv.GXvv` instances in the form ((f1vv, f2vv, f3vv), (xvv, yvv, zvv)). In array form, each array is shaped (-1, 3), with faces being an integer array that references vertexes in the float vertex array. Operators supported: = + -, where '+' can be used to combine two meshes or add a constant offset. Iterating yields triangular faces as `PPoint` instances. :Example: .. code:: import numpy as np import geosoft.gxpy.geometry as gxgm import geosoft.gxpy.vv as gxvv # create from lists faces = [[0, 1, 2], [0, 2, 3], [3, 2, 4]] verticies = [[0, 0, 0], [5, 0, 0], [5, 5, 0], [0, 3, 5], [2.5, 2, 10]] mesh = gxgm.Mesh((faces, verticies)) # create from numpy arrays faces = np.array(faces, dtype=np.int32) verticies = np.array(verticies, dtype=np.float64) mesh = gxgm.Mesh((faces, verticies)) # create from vv f1vv, f2vv, f3vv = gxvv.vvset_from_np(faces) xvv, yvv, zvv = gxvv.vvset_from_np(verticies) mesh = gxgm.Mesh(((f1vv, f2vv, f3vv), (xvv, yvv, zvv))) .. versionadded:: 9.3.1 """ def __str__(self): return "{}({} faces)".format(self.name, len(self)) def __init__(self, mesh, coordinate_system=None, **kwargs): if isinstance(mesh, Mesh): if coordinate_system and coordinate_system != mesh.coordinate_system: t = gxcs.Coordinate_translate(mesh.coordinate_system, coordinate_system) verticies = t.convert(mesh.verticies) else: verticies = mesh.verticies.copy() faces = mesh.faces.copy() else: faces, verticies = mesh if isinstance(faces, list): faces = np.array(faces) if isinstance(verticies, list): verticies = np.array(verticies) if not isinstance(faces, np.ndarray): f1, f2, f3 = faces faces = np.empty((len(f1), 3), dtype=np.int32) faces[:, 0] = f1.np faces[:, 1] = f2.np faces[:, 2] = f3.np else: faces = faces.copy() if not isinstance(verticies, np.ndarray): vx, vy, vz = verticies verticies = np.empty((len(vx), 3), dtype=np.float64) verticies[:, 0] = vx.np verticies[:, 1] = vy.np verticies[:, 2] = vz.np else: verticies = verticies.copy() # validate faces/verticies try: verticies[faces] except IndexError: raise GeometryException(_t('Verticies do not support all face indicies')) if 'name' not in kwargs: kwargs['name'] = '_mesh_' super().__init__(coordinate_system=coordinate_system, **kwargs) self._faces = faces self._verticies = verticies self._next = 0 def __len__(self): return len(self._faces) def __iter__(self): return self def __next__(self): if self._next >= len(self._faces): self._next = 0 raise StopIteration else: item = self._next self._next += 1 return self.__getitem__(item) def __getitem__(self, item): return PPoint(self._verticies[self._faces[item]], self.coordinate_system) def __add__(self, m): if isinstance(m, Mesh): f2 = np.append(self._faces, m.faces + len(self._verticies), axis=0) if self.coordinate_system == m.coordinate_system: v2 = m.verticies else: v2 = gxcs.Coordinate_translate(m.coordinate_system, self.coordinate_system).convert(m.verticies) v2 = np.append(self._verticies, v2, axis=0) return Mesh((f2, v2), self.coordinate_system) if hasattr(m, '__iter__'): dx = m[0] dy = m[1] dz = m[2] else: dx = dy = dz = float(m) m = Mesh(self) m._verticies[:, 0] += dx m._verticies[:, 1] += dy m._verticies[:, 2] += dz return m def __sub__(self, m): if hasattr(m, '__iter__'): dx = m[0] dy = m[1] dz = m[2] else: dx = dy = dz = float(m) m = Mesh(self) m._verticies[:, 0] -= dx m._verticies[:, 1] -= dy m._verticies[:, 2] -= dz return m def __eq__(self, other): if not super(Mesh, self).__eq__(other): return False if not np.array_equal(self._faces, other.faces): return False if not np.array_equal(self._verticies[self._faces], other.verticies[other.faces]): return False return True @property def faces(self): """Faces as an integer numpy array, shape (n_faces, 3).""" return self._faces @property def verticies(self): """Verticies as a float numpy array, shape (n_verticies, 3).""" return self._verticies @property def pp(self): """Verticies as a numpy array shaped (n_verticies, 3).""" return self.verticies @property def length(self): """Number of faces""" return self.__len__() @property def extent(self): """ Volume extent as `Point2`. .. versionadded:: 9.3.1 """ v = self._verticies[self._faces].reshape((-1, 3)) vx = v[:, 0] vy = v[:, 1] vz = v[:, 2] p1 = Point((np.nanmin(vx), np.nanmin(vy), np.nanmin(vz)), self.coordinate_system) p2 = Point((np.nanmax(vx), np.nanmax(vy), np.nanmax(vz)), self.coordinate_system) return Point2((p1, p2)) def point_array(self, unique=True): """ Return numpy array of face corner locations. :param unique: `True` to limit to unique points, otherwise returns all points by unwinding each face. If unique the order will not be related to the faces. .. versionadded:: 9.3.1 """ if unique: return self._verticies[np.unique(self._faces.flatten())].reshape(-1, 3) return self._verticies[self._faces].reshape(-1, 3) def faces_vv(self): """Return faces in `geosoft.gxpy.vv.GXvv` tuple (f1vv, f2vv, f3vv).""" return gxvv.GXvv(self._faces[:, 0], dtype=np.int32),\ gxvv.GXvv(self._faces[:, 1], dtype=np.int32),\ gxvv.GXvv(self._faces[:, 2], dtype=np.int32) def faces_vv_fast(self): """Return faces in list (f1vv, f2vv, f3vv).""" return [self.faces[:, 0], self.faces[:, 1], self.faces[:, 2]] def verticies_vv(self): """Return verticies in `geosoft.gxpy.vv.GXvv` tuple (xvv, yvv, zvv).""" return gxvv.GXvv(self._verticies[:, 0], dtype=np.float64),\ gxvv.GXvv(self._verticies[:, 1], dtype=np.float64),\ gxvv.GXvv(self._verticies[:, 2], dtype=np.float64) def verticies_vv_fast(self): """Return verticies in list (xvv, yvv, zvv).""" return [self._verticies[:, 0], self._verticies[:, 1], self._verticies[:, 2]] def copy(self): """Return a copy""" return Mesh(self) <file_sep>/examples/geosoft_research/self_organizing_maps/python/som_om.py # -*- coding: utf-8 -*- """ Created on Sun Jan 5 10:15:34 2014 @author: Ian """ #The following 2 lines to support remote debugging #import pydevd #pydevd.settrace('localhost', port=34765, stdoutToServer=True, stderrToServer=True) import os import geosoft import geosoft.gxapi as gxapi import geosoft.gxpy.utility as gxu import geosoft.gxpy.project as gxpj import geosoft.gxpy.gdb as gxgdb #t translation def _(s): return s def _same(f1, f2): f1 = os.path.normpath(f1) f2 = os.path.normpath(f2) if f1 == f2: return True try: return os.path.samefile(f1, f2) except FileNotFoundError: return False def rungx(): gxu.check_version('9.2') with gxpj.Geosoft_project() as pj: gdb_name = pj.current_database if not gdb_name: gxpj.user_message(_('No current database'), _('An open database is required.')) state = pj.current_db_state() # settings settings = gxu.get_parameters('SOM_OM') # if different database, reset database-dependent settings if not _same(gdb_name, settings.get('GDB_NAME', '')): settings['GDB_NAME'] = os.path.normpath(gdb_name) with gxgdb.Geosoft_gdb() as gdb: chans = state['disp_chan_list'] channorm = {} for c in chans: channorm[c] = 0 settings['INPUT_DATA'] = channorm settings.pop('FILTER', None) # analyse data gxapi.GXEDB.un_load(gdb_name) try: script = os.path.join(os.path.split(__file__)[0], 'som_om_qt5.py') results = gxu.run_external_python(script, '', '', settings) except: gxapi.GXEDB.load(gdb_name) raise # save results gxu.save_parameters('SOM_OM', results) gxapi.GXEDB.load(gdb_name) <file_sep>/docs/GXVAU.rst .. _GXVAU: GXVAU class ================================== .. autoclass:: geosoft.gxapi.GXVAU :members: .. _VAU_PRUNE: VAU_PRUNE constants ----------------------------------------------------------------------- Prune Options .. autodata:: geosoft.gxapi.VAU_PRUNE_DUMMY :annotation: .. autoattribute:: geosoft.gxapi.VAU_PRUNE_DUMMY .. autodata:: geosoft.gxapi.VAU_PRUNE_VALID :annotation: .. autoattribute:: geosoft.gxapi.VAU_PRUNE_VALID <file_sep>/docs/GXRA.rst .. _GXRA: GXRA class ================================== .. autoclass:: geosoft.gxapi.GXRA :members: <file_sep>/geosoft/gxpy/tests/test_geometry_utility.py import unittest import os import numpy as np import geosoft import geosoft.gxpy.system as gsys import geosoft.gxpy.geometry as gxgeo import geosoft.gxpy.geometry_utility as gxgeou import geosoft.gxpy.geometry as gxgeo import geosoft.gxpy.coordinate_system as gxcs from base import GXPYTest class Test(GXPYTest): @classmethod def setUpClass(cls): cls.setUpGXPYTest() def test_version(self): self.start() self.assertEqual(gxgeo.__version__, geosoft.__version__) def test_resample(self): self.start() plinelist = [[110, 5], [120, 20], [130, 15], [150, 50], [160, 70], [175, 35], [190, 65], [220, 50], [235, 18.5]] pp = gxgeo.PPoint.from_list(plinelist) ppr = gxgeou.resample(pp, 2.5) self.assertEqual(ppr.length, 93) self.assertEqual(ppr[0], pp[0]) self.assertEqual(str(ppr[-1]), '_point_(234.61203746485876, 20.64579408987188, 0.0)') ppr = gxgeou.resample(pp, 2.5, closed=True) self.assertEqual(ppr.length, 145) self.assertEqual(ppr[0], pp[0]) self.assertEqual(ppr[-1].xyz, ppr[0].xyz) ppr = gxgeou.resample(pp, 2.5, spline=gxgeou.SPLINE_AKIMA, closed=True) self.assertEqual(ppr.length, 145) self.assertEqual(ppr[0], pp[0]) self.assertEqual(str(ppr[-2]), '_point_(109.96046205631022, 4.960353267681198, 0.0)') ppr = gxgeou.resample(pp, 2.5, spline=gxgeou.SPLINE_LINEAR, closed=True) self.assertEqual(ppr.length, 145) self.assertEqual(ppr[0], pp[0]) self.assertEqual(str(ppr[-2]), '_point_(110.15605873233088, 5.016854343091733, 0.0)') ppr = gxgeou.resample(plinelist, 2.5, spline=gxgeou.SPLINE_LINEAR, closed=True) ppr = gxgeo.PPoint(ppr) self.assertEqual(ppr.length, 145) self.assertEqual(ppr[0], pp[0]) self.assertEqual(str(ppr[-2]), '_point_(110.15605873233088, 5.016854343091733, 0.0)') ppr = gxgeou.resample(plinelist, 2.5, spline=gxgeou.SPLINE_NEAREST, closed=True) ppr = gxgeo.PPoint(ppr) self.assertEqual(ppr.length, 144) self.assertEqual(ppr[0], pp[0]) self.assertEqual(ppr[-1], pp[0]) ppr = gxgeou.resample(plinelist, 2.5, spline=gxgeou.SPLINE_NEAREST) ppr = gxgeo.PPoint(ppr) self.assertEqual(ppr.length, 93) self.assertEqual(ppr[0], pp[0]) self.assertEqual(str(ppr[-1]), '_point_(235.0, 18.5, 0.0)') pp = np.array(plinelist) ppr = gxgeou.resample(pp[:, 0].flatten(), 2.5, spline=gxgeou.SPLINE_CUBIC) self.assertEqual(len(ppr), 51) self.assertEqual(ppr[0, 0], pp[0, 0]) self.assertEqual(ppr[-1, 0], 235.) pp = np.array(plinelist) ppr = gxgeou.resample(pp[:, 0].flatten(), 2.5, spline=gxgeou.SPLINE_CUBIC, closed=True) self.assertEqual(len(ppr), 102) self.assertEqual(ppr[0, 0], pp[0, 0]) self.assertEqual(ppr[-1, 0], pp[0,0]) self.assertRaises(gxgeou.GeometryUtilityException, gxgeou.resample, plinelist, -1) plinelist = [[110, 5], [120, 20], [130, 15], [150, 50], [160, 70], [175, 35], [190, 65], [220, 50], [235, 18.5], [110, 5]] pp = gxgeo.PPoint(plinelist) ppr = gxgeou.resample(pp, 2.5, spline=gxgeou.SPLINE_AKIMA) self.assertEqual(ppr.length, 145) self.assertEqual(ppr[0], pp[0]) self.assertEqual(str(ppr[-2]), '_point_(109.96046205631022, 4.960353267681198, 0.0)') pp = gxgeo.PPoint([[110, 5], [120, 20]]) ppr = gxgeou.resample(pp, 2.5, spline=gxgeou.SPLINE_AKIMA) self.assertEqual(ppr.length, 8) self.assertEqual(ppr[0], pp[0]) self.assertEqual(str(ppr[-1]), '_point_(119.7072534339415, 19.560880150912265, 0.0)') pp = gxgeo.PPoint([[110, 5]]) ppr = gxgeou.resample(pp, 2.5, spline=gxgeou.SPLINE_AKIMA) self.assertEqual(ppr.length, 1) self.assertEqual(ppr[0], pp[0]) pp = gxgeo.PPoint([[110, 5], [110, 5]]) ppr = gxgeou.resample(pp, 2.5, spline=gxgeou.SPLINE_AKIMA) self.assertEqual(ppr.length, 2) self.assertEqual(ppr[0], pp[0]) self.assertEqual(ppr[1], pp[1]) ############################################################################################### if __name__ == '__main__': unittest.main() <file_sep>/geosoft/gxapi/GXUSERMETA.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXUSERMETA(gxapi_cy.WrapUSERMETA): """ GXUSERMETA class. The `GXUSERMETA <geosoft.gxapi.GXUSERMETA>` class handles user style metadata tied to real data. """ def __init__(self, handle=0): super(GXUSERMETA, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXUSERMETA <geosoft.gxapi.GXUSERMETA>` :returns: A null `GXUSERMETA <geosoft.gxapi.GXUSERMETA>` :rtype: GXUSERMETA """ return GXUSERMETA() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, format): """ Creates an empty `GXUSERMETA <geosoft.gxapi.GXUSERMETA>` object :param format: :ref:`USERMETA_FORMAT` Type of Meta to create :type format: int :returns: `GXUSERMETA <geosoft.gxapi.GXUSERMETA>` Object :rtype: GXUSERMETA .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapUSERMETA._create(GXContext._get_tls_geo(), format) return GXUSERMETA(ret_val) @classmethod def create_s(cls, file): """ Create a `GXUSERMETA <geosoft.gxapi.GXUSERMETA>` from a file :param file: File Name :type file: str :returns: `GXUSERMETA <geosoft.gxapi.GXUSERMETA>` Object :rtype: GXUSERMETA .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapUSERMETA._create_s(GXContext._get_tls_geo(), file.encode()) return GXUSERMETA(ret_val) def get_data_creation_date(self, date): """ Get the Data Creation Date :param date: Date :type date: float_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ date.value = self._get_data_creation_date(date.value) def get_extents2d(self, min_x, min_y, max_x, max_y): """ Get the 2d Extents :param min_x: MinX :param min_y: MinY :param max_x: MaxX :param max_y: MaxY :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ min_x.value, min_y.value, max_x.value, max_y.value = self._get_extents2d(min_x.value, min_y.value, max_x.value, max_y.value) def get_extents3d(self, min_x, min_y, min_z, max_x, max_y, max_z): """ Get the 3d Extents :param min_x: MinX :param min_y: MinY :param min_z: MinZ :param max_x: MaxX :param max_y: MaxY :param max_z: MaxZ :type min_x: float_ref :type min_y: float_ref :type min_z: float_ref :type max_x: float_ref :type max_y: float_ref :type max_z: float_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value = self._get_extents3d(min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value) def get_ipj(self, ipj): """ Get the `GXIPJ <geosoft.gxapi.GXIPJ>` :param ipj: Date :type ipj: GXIPJ .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_ipj(ipj) def get_meta_creation_date(self, date): """ Get the Meta Creation Date :param date: Date :type date: float_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ date.value = self._get_meta_creation_date(date.value) def get_xml_format(self, format): """ Get the XML Format :param format: :ref:`USERMETA_FORMAT` :type format: int_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ format.value = self._get_xml_format(format.value) def set_xml_format(self, format): """ Get the XML Format :param format: :ref:`USERMETA_FORMAT` :type format: int .. versionadded:: 9.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_xml_format(format) def compare(self, usermeta2): """ Compare 2 `GXUSERMETA <geosoft.gxapi.GXUSERMETA>`'s :param usermeta2: Second UERMETA :type usermeta2: GXUSERMETA :returns: 0 - No 1 - Yes :rtype: int .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._compare(usermeta2) return ret_val def get_data_creator(self, data_creator): """ Get the Data Creator :param data_creator: DataCreator returned :type data_creator: str_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ data_creator.value = self._get_data_creator(data_creator.value.encode()) def get_format(self, format): """ Get the File Format :param format: Title returned :type format: str_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ format.value = self._get_format(format.value.encode()) def get_meta_creator(self, meta_creator): """ Get the Meta Creator :param meta_creator: MetaCreator returned :type meta_creator: str_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ meta_creator.value = self._get_meta_creator(meta_creator.value.encode()) def get_project(self, project): """ Get the File Project :param project: Title returned :type project: str_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ project.value = self._get_project(project.value.encode()) def get_title(self, title): """ Get the Title :param title: Title returned :type title: str_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ title.value = self._get_title(title.value.encode()) def serial(self, save_geo, file): """ Serialize `GXUSERMETA <geosoft.gxapi.GXUSERMETA>` to a `GXBF <geosoft.gxapi.GXBF>`. :param save_geo: Output Geosoft Metadata? :param file: File name to save to :type save_geo: bool :type file: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._serial(save_geo, file.encode()) def set_data_creation_date(self, date): """ Set the Data Creation Date :param date: Date :type date: float .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_data_creation_date(date) def set_data_creator(self, data_creator): """ Set the Data Creator :param data_creator: DataCreator :type data_creator: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_data_creator(data_creator.encode()) def set_extents2d(self, min_x, min_y, max_x, max_y): """ Set the 2d Extents :param min_x: MinX :param min_y: MinY :param max_x: MaxX :param max_y: MaxY :type min_x: float :type min_y: float :type max_x: float :type max_y: float .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_extents2d(min_x, min_y, max_x, max_y) def set_extents3d(self, min_x, min_y, min_z, max_x, max_y, max_z): """ Set the 3d Extents :param min_x: MinX :param min_y: MinY :param min_z: MinZ :param max_x: MaxX :param max_y: MaxY :param max_z: MaxZ :type min_x: float :type min_y: float :type min_z: float :type max_x: float :type max_y: float :type max_z: float .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_extents3d(min_x, min_y, min_z, max_x, max_y, max_z) def set_format(self, format): """ Set the File Format :param format: Format :type format: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_format(format.encode()) def set_ipj(self, ipj): """ Set the `GXIPJ <geosoft.gxapi.GXIPJ>` :param ipj: Date :type ipj: GXIPJ .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_ipj(ipj) def set_meta_creation_date(self, date): """ Set the Meta Creation Date :param date: Date :type date: float .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_meta_creation_date(date) def set_meta_creator(self, meta_creator): """ Set the Meta Creator :param meta_creator: MetaCreator :type meta_creator: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_meta_creator(meta_creator.encode()) def set_project(self, project): """ Set the File Project :param project: Project :type project: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_project(project.encode()) def set_title(self, title): """ Set the Title :param title: Title :type title: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_title(title.encode()) @classmethod def update_extents_2d(cls, filename, ipj, min_x, min_y, max_x, max_y): """ Edit an existing XML metadata file by changing the extents and projection data :param filename: Filename of existing metadata to update :param ipj: New projection :param min_x: New MinX value :param min_y: New MinY value :param max_x: New MaxX value :param max_y: New MaxY value :type filename: str :type ipj: GXIPJ :type min_x: float :type min_y: float :type max_x: float :type max_y: float .. versionadded:: 7.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapUSERMETA._update_extents_2d(GXContext._get_tls_geo(), filename.encode(), ipj, min_x, min_y, max_x, max_y) @classmethod def update_file_type(cls, file_name, new_file_type): """ Edit an existing XML metadata file by changing the file type :param file_name: Filename of existing metadata to update :param new_file_type: New file type :type file_name: str :type new_file_type: str .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapUSERMETA._update_file_type(GXContext._get_tls_geo(), file_name.encode(), new_file_type.encode()) @classmethod def save_file_lineage(cls, file_name, save_geo): """ Add lineage to XML :param file_name: Filename of existing metadata to update :param save_geo: Output Geosoft Metadata? :type file_name: str :type save_geo: bool .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapUSERMETA._save_file_lineage(GXContext._get_tls_geo(), file_name.encode(), save_geo) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GX3DN.rst .. _GX3DN: GX3DN class ================================== .. autoclass:: geosoft.gxapi.GX3DN :members: <file_sep>/geosoft/gxpy/tests/wee_test.py import os import numpy as np import geosoft.gxapi as gxapi import geosoft.gxpy.gx as gx import geosoft.gxpy.system as gsys import geosoft.gxpy.surface as gxsurf import geosoft.gxpy.vox as gxvox import geosoft.gxpy.group as gxgrp import geosoft.gxpy.spatialdata as gxspd import geosoft.gxpy.view as gxview import geosoft.gxpy.map as gxmap import geosoft.gxpy.vv as gxvv import geosoft.gxpy.coordinate_system as gxcs gxc = gx.GXpy() def t1(): verts = np.array([[0, 0, 0], [5, 0, 0], [5, 5, 0], [0, 3, 5], [2.5, 2, 10], [-3, 6, 8], [-4, 0, 12]], dtype=np.float64) faces = np.array([[0, 1, 2], [0, 2, 3], [3, 2, 4], [1, 2, 4], [3, 4, 5], [6, 4, 5]], dtype=np.int32) with gxsurf.Surface('maki') as s: s.add_mesh_np(faces, verts) s.render_color = gxgrp.C_CYAN s.render_style = gxsurf.STYLE_FLAT with gxview.View_3d.new() as v3d: v3d_file = v3d.file_name gxsurf.draw_surface(v3d, s) def t2(): with gxvox.Vox.open('C:\\Development\\github\\gxpy\\examples\\tutorial\\Geosoft Voxels\\rjsmith_voxi_density') as vox: with gxsurf.SurfaceDataset.vox_surface(vox, (0.01, 0.02), temp=True) as s: with gxview.View_3d.new() as v3d: v3d_file = v3d.file_name gxsurf.draw_surface(v3d, s) def t3(): verts = np.array([[0, 0, 0], [5, 0, 0], [5, 5, 0], [0, 3, 5], [2.5, 2, 10], [-3, 6, 8], [-4, 0, 12]], dtype=np.float64) faces = np.array([[0, 1, 2], [0, 2, 3], [3, 2, 4], [1, 2, 4], [3, 4, 5], [6, 4, 5]], dtype=np.int32) with gxview.View_3d.new() as v3d: v3d_file = v3d.file_name with gxgrp.Draw_3d(v3d, 'Surface') as g: g.surface(faces, verts) image_file = gxmap.Map.open(v3d_file).image_file(pix_width=800) pass def t4(): with gxcs.Coordinate_system('DHDN / Okarito 2000') as cs: with gxcs.Coordinate_system('DHDN') as csll: with gxcs.Coordinate_translate(cs, csll) as pj: lon, lat = pj.convert((500000, 6500000)) t4() pass <file_sep>/geosoft/gxpy/va.py """ Geosoft vector arrays (vector of array elements) :Classes: =============== ========================= :class:`GXva` vector of array elements =============== ========================= VA and VV classes are related based on a key called a *fiducial*, which has a start value and increment between values. The :meth:`refid` method can be used to resample vector data to the same fiducial so that vector-to-vector operations can be performed. .. seealso:: :mod:`geosoft.gxpy.vv`, :mod:`geosoft.gxapi.GXVA`, :mod:`geosoft.gxapi.GXVV` .. note:: Regression tests provide usage examples: `va tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_gxva.py>`_ """ from collections.abc import Sequence import numpy as np import geosoft import geosoft.gxapi as gxapi from . import utility as gxu __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) class VAException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.va`. .. versionadded:: 9.1 """ pass class GXva(Sequence): """ VA class wrapper. :param array: 2D numpy array, None for an empty VA :param dtype: numpy data type, default np.float :param width: array width, default is determined from array. :param fid: fid tuple (start,increment), default (0.0, 1.0) :param unit_of_measure: the unit of measurement for the data Maximum number of elements must be less that 2^31 - 1 .. versionchanged:: 9.3 added unit_of_measure .. versionchanged:: 9.2 allow construction directly from numpy array .. versionadded:: 9.1 """ def __enter__(self): return self def __exit__(self, type, value, traceback): self.__del__() def __del__(self): if hasattr(self, '_gxva'): self._gxva = None def __len__(self): return self._gxva.len() def __init__(self, array=None, width=None, dtype=None, fid=(0.0, 1.0), unit_of_measure=''): if array is not None: if not isinstance(array, np.ndarray): array = np.array(array) if dtype is None: dtype = array.dtype if array.ndim != 2: raise VAException(_t('array must have 2 dimensions')) if width is None: width = array.shape[1] if width is None or (width < 2): raise VAException('width must be >= 2') self._gxtype = gxu.gx_dtype(dtype) if self._gxtype < 0: raise VAException(_t("VA of strings is not supported.")) self._dtype = gxu.dtype_gx(self._gxtype) self._width = width self._gxva = gxapi.GXVA.create_ext(self._gxtype, 0, self._width) self.fid = fid self._start, self._incr = self.fid self._next = 0 self._unit_of_measure = unit_of_measure if array is not None and array.size > 0: self.set_data(array, fid) def __iter__(self): return self def __next__(self): if self._next >= self.length: self._next = 0 self._start, self._incr = self.fid raise StopIteration else: i = self._next self._next += 1 return self.np[i], self._start + self._incr * i def __getitem__(self, item): self._start, self._incr = self.fid return self.np[item], self._start + self._incr * item @property def unit_of_measure(self): """ data unit of measurement""" return self._unit_of_measure @unit_of_measure.setter def unit_of_measure(self, uom): self._unit_of_measure = str(uom) @property def fid(self): """ fid tuple (start,increment), can be set .. versionadded:: 9.1 """ return self._gxva.get_fid_start(), self._gxva.get_fid_incr() @fid.setter def fid(self, fid): self._gxva.set_fid_start(fid[0]) self._gxva.set_fid_incr(fid[1]) def refid(self, fid, length): """ Resample VA to a new fiducial and length :param fid: (start,incr) :param length: length .. versionadded:: 9.1 """ self._gxva.re_fid(fid[0], fid[1], length) self.fid = fid @property def length(self): """ number of elements in the VA, can be set. .. versionadded:: 9.1 .. versionchanged:: 9.3 can be set """ return self.__len__() @length.setter def length(self, length): self.refid(self.fid, length) @property def width(self): """ width of each row(element) in the VA .. versionadded:: 9.1 """ return self._width @property def dimensions(self): """ VA dimensions (length, width) .. versionadded:: 9.2 """ return (self.length, self._width) @property def gxtype(self): """ GX data type .. versionadded:: 9.1 """ return self._gxtype @property def dtype(self): """ numpy data type .. versionadded:: 9.1 """ return self._dtype @property def np(self): """ Numpy array of VA data, in the data type of the VA. Use :meth:`get_data` to get a numpy array in another `dtype`. Array will be 2-dimensional. .. versionadded:: 9.2 """ return self.get_data()[0] @property def gxva(self): """ The :class:`geosoft.gxapi.GXVA` instance handle. ..versionadded:: 9.3 """ return self._gxva def get_data(self, dtype=None, start=0, n=None, start_col=0, n_col=None): """ Return a numpy array of data from a va. :param start: index of first value, must be >=0 :param n: number of values wanted :param start_col: index of the first column wanted :param n_col: number of columns :param dtype: numpy data type wanted .. versionadded:: 9.1 """ if dtype is None: dtype = self._dtype else: dtype = np.dtype(dtype) if self.length == 0: return np.array([[], []], dtype=dtype) # strings not supported if gxu.gx_dtype(dtype) < 0: raise VAException(_t('VA string elements are not supported.')) if n is None: n = self.length - start else: n = min((self.length - start), n) if (n <= 0) or (start < 0): raise VAException(_t('Cannot get (start,n) ({},{}) from va of length {}').format(start, n, self.length)) if n_col is None: n_col = self._width - start_col else: n_col = min((self._width - start_col), n_col) if (n_col <= 0) or (start_col < 0): raise VAException(_t('Cannot get columns (start,n) ({},{}) from VA of width {}'). format(start_col, n_col, self._width)) npd = self._gxva.get_array_np(start, start_col, n, n_col, dtype).reshape(-1, n_col) # float dummies to nan if npd.dtype == np.float32 or npd.dtype == np.float64: npd[npd == gxu.gx_dummy(npd.dtype)] = np.nan fid = self.fid start = fid[0] + start * fid[1] return npd, (start, fid[1]) def set_data(self, npdata, fid=(0.0, 1.0)): """ Copy numpy data into a VA. :param npdata: numpy data array (must be 2D) :param fid: fid tuple (start,increment), default (0.0,1.0) Maximum number of elements must be less that 2^31 - 1 .. versionadded:: 9.1 """ if npdata.size == 0: self.length = 0 if fid: self.fid = fid return try: npd = npdata.reshape((-1, self._width)) except ValueError: raise VAException(_t('Numpy data does not align with VA data width ({}).').format(self._width)) max_length = gxapi.iMAX // self._width if npdata.shape[0] > max_length: raise VAException(_t('Array length {} too long. Maximum is {} for width {}').format(npdata.shape[0], max_length, self._width)) if npdata.dtype == np.float32 or npdata.dtype == np.float64: if np.isnan(npdata).any(): npdata = npdata.copy() npdata[np.isnan(npdata)] = gxu.gx_dummy(npdata.dtype) self._gxva.set_ln(npd.shape[0]) self._gxva.set_array_np(0, 0, npd) self.fid = fid <file_sep>/geosoft/gxapi/GXVOXD.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXVOXD(gxapi_cy.WrapVOXD): """ GXVOXD class. `GXVOX <geosoft.gxapi.GXVOX>` Display object. """ def __init__(self, handle=0): super(GXVOXD, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXVOXD <geosoft.gxapi.GXVOXD>` :returns: A null `GXVOXD <geosoft.gxapi.GXVOXD>` :rtype: GXVOXD """ return GXVOXD() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, vox, table, zone, contour): """ Create a new `GXVOXD <geosoft.gxapi.GXVOXD>` :param vox: `GXVOX <geosoft.gxapi.GXVOX>` Object :param table: Color table name, "" for default :param zone: :ref:`ITR_ZONE` :param contour: Color contour interval or `rDUMMY <geosoft.gxapi.rDUMMY>` :type vox: GXVOX :type table: str :type zone: int :type contour: float :returns: `GXVOXD <geosoft.gxapi.GXVOXD>` handle, terminates if creation fails :rtype: GXVOXD .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Fails if the `GXVOX <geosoft.gxapi.GXVOX>` object is NOT thematic. (See the `create_thematic <geosoft.gxapi.GXVOXD.create_thematic>` function.) """ ret_val = gxapi_cy.WrapVOXD._create(GXContext._get_tls_geo(), vox, table.encode(), zone, contour) return GXVOXD(ret_val) @classmethod def create_itr(cls, vox, itr): """ Create a new `GXVOXD <geosoft.gxapi.GXVOXD>` with our own `GXITR <geosoft.gxapi.GXITR>` :param vox: `GXVOX <geosoft.gxapi.GXVOX>` Object :param itr: `GXITR <geosoft.gxapi.GXITR>` Object :type vox: GXVOX :type itr: GXITR :returns: `GXVOXD <geosoft.gxapi.GXVOXD>` handle, terminates if creation fails :rtype: GXVOXD .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Fails if the `GXVOX <geosoft.gxapi.GXVOX>` object is thematic. (See the `create_thematic <geosoft.gxapi.GXVOXD.create_thematic>` function.) """ ret_val = gxapi_cy.WrapVOXD._create_itr(GXContext._get_tls_geo(), vox, itr) return GXVOXD(ret_val) @classmethod def create_thematic(cls, vox): """ Create a new `GXVOXD <geosoft.gxapi.GXVOXD>` for a thematic `GXVOX <geosoft.gxapi.GXVOX>` object. :param vox: `GXVOX <geosoft.gxapi.GXVOX>` Object :type vox: GXVOX :returns: `GXVOXD <geosoft.gxapi.GXVOXD>` handle, terminates if creation fails :rtype: GXVOXD .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A thematic voxel is one where the stored integer values represent indices into an internally stored `GXTPAT <geosoft.gxapi.GXTPAT>` object. Thematic voxels contain their own color definitions, and normal numerical operations, such as applying ITRs for display, are not valid. To determine if a `GXVOX <geosoft.gxapi.GXVOX>` object is thematic, use the `is_thematic <geosoft.gxapi.GXVOXD.is_thematic>` function. Fails if the `GXVOX <geosoft.gxapi.GXVOX>` object is NOT thematic. """ ret_val = gxapi_cy.WrapVOXD._create_thematic(GXContext._get_tls_geo(), vox) return GXVOXD(ret_val) def is_thematic(self): """ Is this a thematic voxel? :returns: 1 if `GXVOX <geosoft.gxapi.GXVOX>` is thematic :rtype: int .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A thematic voxel is one where the stored integer values represent indices into an internally stored `GXTPAT <geosoft.gxapi.GXTPAT>` object. Thematic voxels contain their own color definitions, and normal numerical operations, such as applying ITRs for display, are not valid. """ ret_val = self._is_thematic() return ret_val def get_thematic_info(self, tpat, vv): """ Get a copy of a thematic voxel's `GXTPAT <geosoft.gxapi.GXTPAT>` object and a `GXVV <geosoft.gxapi.GXVV>` containing the current display selections. :param tpat: `GXTPAT <geosoft.gxapi.GXTPAT>` object to get :param vv: `GXVV <geosoft.gxapi.GXVV>` (int) object to fill with current selections :type tpat: GXTPAT :type vv: GXVV .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_thematic_info(tpat, vv) def set_thematic_selection(self, vv): """ Get a copy of a thematic voxel's `GXTPAT <geosoft.gxapi.GXTPAT>` object and a `GXVV <geosoft.gxapi.GXVV>` containing the current display selections. :param vv: `GXVV <geosoft.gxapi.GXVV>` (int) object to set the current selections to :type vv: GXVV .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_thematic_selection(vv) def get_draw_controls(self, box, trans, min_x, min_y, min_z, max_x, max_y, max_z): """ Get the draw controls :param box: Draw Bounding Box :param trans: Transparency :param min_x: Min X :param min_y: Min Y :param min_z: Min Z :param max_x: Max X :param max_y: Max Y :param max_z: Max Z :type box: int_ref :type trans: float_ref :type min_x: float_ref :type min_y: float_ref :type min_z: float_ref :type max_x: float_ref :type max_y: float_ref :type max_z: float_ref .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ box.value, trans.value, min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value = self._get_draw_controls(box.value, trans.value, min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value) def get_name(self, name): """ Gets the file name of the voxel. :param name: File name returned :type name: str_ref .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ name.value = self._get_name(name.value.encode()) def get_itr(self, itr): """ Get the `GXITR <geosoft.gxapi.GXITR>` of the `GXVOXD <geosoft.gxapi.GXVOXD>` :param itr: `GXITR <geosoft.gxapi.GXITR>` object :type itr: GXITR .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_itr(itr) def get_shell_controls(self, min, max): """ Get the shell controls :param min: Min Value (`rDUMMY <geosoft.gxapi.rDUMMY>` for no limit) :param max: Max Value (`rDUMMY <geosoft.gxapi.rDUMMY>` for no limit) :type min: float_ref :type max: float_ref .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ min.value, max.value = self._get_shell_controls(min.value, max.value) def set_draw_controls(self, box, trans, min_x, min_y, min_z, max_x, max_y, max_z): """ Set the draw controls :param box: Draw Bounding Box :param trans: Transparency :param min_x: Min X :param min_y: Min Y :param min_z: Min Z :param max_x: Max X :param max_y: Max Y :param max_z: Max Z :type box: int :type trans: float :type min_x: float :type min_y: float :type min_z: float :type max_x: float :type max_y: float :type max_z: float .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_draw_controls(box, trans, min_x, min_y, min_z, max_x, max_y, max_z) def set_itr(self, itr): """ Set the `GXITR <geosoft.gxapi.GXITR>` of the `GXVOXD <geosoft.gxapi.GXVOXD>` :param itr: `GXITR <geosoft.gxapi.GXITR>` object :type itr: GXITR .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_itr(itr) def set_shell_controls(self, min, max): """ Set the shell controls :param min: Min Value (`rDUMMY <geosoft.gxapi.rDUMMY>` for no limit) :param max: Max Value (`rDUMMY <geosoft.gxapi.rDUMMY>` for no limit) :type min: float :type max: float .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_shell_controls(min, max) def get_render_mode(self, render_mode): """ Get voxel render mode. :param render_mode: :ref:`VOXELRENDER_MODE` :type render_mode: int_ref .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ render_mode.value = self._get_render_mode(render_mode.value) def set_render_mode(self, render_mode): """ Get voxel render mode. :param render_mode: :ref:`VOXELRENDER_MODE` :type render_mode: int .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_render_mode(render_mode) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXMSTK.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXSTK import GXSTK ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXMSTK(gxapi_cy.WrapMSTK): """ GXMSTK class. Multi-profile stack This class is used for storing data of multiple profiles and plotting profiles in a map. It is a container of `GXSTK <geosoft.gxapi.GXSTK>` class objects. See also: `GXSTK <geosoft.gxapi.GXSTK>` class. """ def __init__(self, handle=0): super(GXMSTK, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXMSTK <geosoft.gxapi.GXMSTK>` :returns: A null `GXMSTK <geosoft.gxapi.GXMSTK>` :rtype: GXMSTK """ return GXMSTK() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def add_stk(self): """ Create and add a `GXSTK <geosoft.gxapi.GXSTK>` object to `GXMSTK <geosoft.gxapi.GXMSTK>` :returns: `GXSTK <geosoft.gxapi.GXSTK>`, fail if error :rtype: GXSTK .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Index to the added `GXSTK <geosoft.gxapi.GXSTK>` object is the last one in `GXMSTK <geosoft.gxapi.GXMSTK>` container. """ ret_val = self._add_stk() return GXSTK(ret_val) def chan_list_vv(self, db, num_ch_vv, str_ch_vv, x_ch_vv, prof_ch_vv, prof_ch__un_used_vv): """ Save channel names in VVs based on channel types :param db: Database handle :param num_ch_vv: List of names of numeric channels :param str_ch_vv: List of name of string channels :param x_ch_vv: List of channel names which can be used for X axis. Must be numeric channels but not `GXVA <geosoft.gxapi.GXVA>` channels :param prof_ch_vv: List of profiles with channel names in both `GXMSTK <geosoft.gxapi.GXMSTK>` and `GXDB <geosoft.gxapi.GXDB>` :param prof_ch__un_used_vv: List of profiles with channels in `GXMSTK <geosoft.gxapi.GXMSTK>` but not in database :type db: GXDB :type num_ch_vv: GXVV :type str_ch_vv: GXVV :type x_ch_vv: GXVV :type prof_ch_vv: GXVV :type prof_ch__un_used_vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Terms 'used' and 'unused' indicate that the a channel name in database also 'in' and 'not in' the `GXMSTK <geosoft.gxapi.GXMSTK>` object respectively """ self._chan_list_vv(db, num_ch_vv, str_ch_vv, x_ch_vv, prof_ch_vv, prof_ch__un_used_vv) @classmethod def create(cls): """ Create `GXMSTK <geosoft.gxapi.GXMSTK>`. :returns: `GXMSTK <geosoft.gxapi.GXMSTK>`, aborts if creation fails :rtype: GXMSTK .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapMSTK._create(GXContext._get_tls_geo()) return GXMSTK(ret_val) def draw_profile(self, db, line, map): """ Draw multiple profiles in map :param db: Database handle :param line: Database line :param map: `GXMAP <geosoft.gxapi.GXMAP>` handle :type db: GXDB :type line: int :type map: GXMAP .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._draw_profile(db, line, map) def set_y_axis_direction(self, direction): """ Set the Y-axis direction - normal or inverted :param direction: Y-axis direction: 0 - normal, 1 - inverted :type direction: int .. versionadded:: 8.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_y_axis_direction(direction) def find_stk2(self, str_val, index, vv_rtd): """ Find index of `GXSTK <geosoft.gxapi.GXSTK>` from a string of group names and X/Y channels :param str_val: Input string (see notes above). Will be modified on return :param index: Index to the `GXSTK <geosoft.gxapi.GXSTK>` found, Must be greater than 0 if found, -1 if not found :param vv_rtd: Returned `GXVV <geosoft.gxapi.GXVV>` with names of Group, X channel and Y channel `GXVV <geosoft.gxapi.GXVV>` type must be of STRING :type str_val: str :type index: int_ref :type vv_rtd: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Format of the input string: Map group name + " ( " + X channel name + " , " + Y channel name + " )" for example, string "DATA ( DIST , MAG )" indicates a map group name of DATA, X channel name of DIST and Y channel name of MAG. """ index.value = self._find_stk2(str_val.encode(), index.value, vv_rtd) def get_stk(self, num): """ Get a specific `GXSTK <geosoft.gxapi.GXSTK>` object from a `GXMSTK <geosoft.gxapi.GXMSTK>` object (Index of 0 gets the first `GXSTK <geosoft.gxapi.GXSTK>` in the `GXMSTK <geosoft.gxapi.GXMSTK>`) :param num: Index to `GXSTK <geosoft.gxapi.GXSTK>` to get :type num: int :returns: x - `GXSTK <geosoft.gxapi.GXSTK>` Object handle :rtype: GXSTK .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._get_stk(num) return GXSTK(ret_val) def delete_stk(self, num): """ Delete a `GXSTK <geosoft.gxapi.GXSTK>` object :param num: Index to `GXSTK <geosoft.gxapi.GXSTK>` to delete (0 is first one) :type num: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** 0 is the first one """ self._delete_stk(num) def find_stk(self, str_val, index, group, x_ch, y_ch): """ Find index of `GXSTK <geosoft.gxapi.GXSTK>` from a string of group names and X/Y channels :param str_val: Input string (see notes above). Will be modified on return :param index: Index to the `GXSTK <geosoft.gxapi.GXSTK>` found, Must be greater than 0 if found, -1 if not found :param group: Output group name string :param x_ch: Output X channel name string :param y_ch: Output Y channel name string :type str_val: str :type index: int_ref :type group: str_ref :type x_ch: str_ref :type y_ch: str_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Format of the input string: Map group name + " ( " + X channel name + " , " + Y channel name + " )" for example, string "DATA ( DIST , MAG )" indicates a map group name of DATA, X channel name of DIST and Y channel name of MAG. """ index.value, group.value, x_ch.value, y_ch.value = self._find_stk(str_val.encode(), index.value, group.value.encode(), x_ch.value.encode(), y_ch.value.encode()) def get_num_stk(self): """ Get the number of `GXSTK <geosoft.gxapi.GXSTK>` objects in a `GXMSTK <geosoft.gxapi.GXMSTK>` object :returns: The number of `GXSTK <geosoft.gxapi.GXSTK>` objects in a `GXMSTK <geosoft.gxapi.GXMSTK>` object :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._get_num_stk() return ret_val def read_ini(self, ra): """ Read multiple profiles parameters from an INI file :param ra: `GXRA <geosoft.gxapi.GXRA>` handle to an INI file :type ra: GXRA .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._read_ini(ra) def save_profile(self, wa): """ Save multiple profile INI parameters in a `GXWA <geosoft.gxapi.GXWA>` file of INI format :param wa: `GXWA <geosoft.gxapi.GXWA>` handle to an INI file :type wa: GXWA .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._save_profile(wa) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXDBREAD.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXVA import GXVA from .GXVV import GXVV ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXDBREAD(gxapi_cy.WrapDBREAD): """ GXDBREAD class. The `GXDBREAD <geosoft.gxapi.GXDBREAD>` class is used to open and read from databases. Very large lines are split into blocks and served up sequentially to prevent the over-use of virtual memory when channels are read into VVs or VAs. Individual data blocks are limited by default to 1 MB (which is user-alterable). Single lines smaller than the block size are served up whole, one block per line. """ def __init__(self, handle=0): super(GXDBREAD, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXDBREAD <geosoft.gxapi.GXDBREAD>` :returns: A null `GXDBREAD <geosoft.gxapi.GXDBREAD>` :rtype: GXDBREAD """ return GXDBREAD() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Create Methods @classmethod def create(cls, db, line_lst): """ Create a `GXDBREAD <geosoft.gxapi.GXDBREAD>` object Add channels using the `add_channel <geosoft.gxapi.GXDBREAD.add_channel>` method.channel. :param db: Database input :param line_lst: List of lines to process NAME = line name, VALUE = line symbol :type db: GXDB :type line_lst: GXLST :returns: `GXDBREAD <geosoft.gxapi.GXDBREAD>` object :rtype: GXDBREAD .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapDBREAD._create(GXContext._get_tls_geo(), db, line_lst) return GXDBREAD(ret_val) @classmethod def create_xy(cls, db, line_lst): """ Create a `GXDBREAD <geosoft.gxapi.GXDBREAD>` object for a XY-located data. Add channels using the `add_channel <geosoft.gxapi.GXDBREAD.add_channel>` method. :param db: Database input :param line_lst: List of lines to process NAME = line name, VALUE = line symbol :type db: GXDB :type line_lst: GXLST :returns: `GXDBREAD <geosoft.gxapi.GXDBREAD>` object :rtype: GXDBREAD .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapDBREAD._create_xy(GXContext._get_tls_geo(), db, line_lst) return GXDBREAD(ret_val) @classmethod def create_xyz(cls, db, line_lst): """ Create a `GXDBREAD <geosoft.gxapi.GXDBREAD>` object for a XYZ-located data. Add channels using the `add_channel <geosoft.gxapi.GXDBREAD.add_channel>` method. :param db: Database input :param line_lst: List of lines to process NAME = line name, VALUE = line symbol :type db: GXDB :type line_lst: GXLST :returns: `GXDBREAD <geosoft.gxapi.GXDBREAD>` object :rtype: GXDBREAD .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapDBREAD._create_xyz(GXContext._get_tls_geo(), db, line_lst) return GXDBREAD(ret_val) def add_channel(self, chan): """ Add a data channel to the `GXDBREAD <geosoft.gxapi.GXDBREAD>` object. :param chan: Channel handle (does not need to be locked, but can be.) :type chan: int :returns: Channel index. Use for getting the correct `GXVV <geosoft.gxapi.GXVV>` or `GXVA <geosoft.gxapi.GXVA>` object. :rtype: int .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._add_channel(chan) return ret_val # Data Access Methods def get_vv(self, chan): """ Get the `GXVV <geosoft.gxapi.GXVV>` handle for a channel. :param chan: Index of channel to access. :type chan: int :returns: `GXVV <geosoft.gxapi.GXVV>` handle :rtype: GXVV .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Call only for single-column (regular) channels. You can call the `get_chan_array_size <geosoft.gxapi.GXDBREAD.get_chan_array_size>` function to find the number fo columns in a given channel. The `GXVV <geosoft.gxapi.GXVV>` is filled anew for each block served up. """ ret_val = self._get_vv(chan) return GXVV(ret_val) def get_va(self, chan): """ Get the `GXVA <geosoft.gxapi.GXVA>` handle for an array channel. :param chan: Index of channel to access. :type chan: int :returns: `GXVA <geosoft.gxapi.GXVA>` handle :rtype: GXVA .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Call only for array (multi-column) channels. You can call the `get_chan_array_size <geosoft.gxapi.GXDBREAD.get_chan_array_size>` function to find the number fo columns in a given channel, or you can call `GXVA.col <geosoft.gxapi.GXVA.col>` on the returned `GXVA <geosoft.gxapi.GXVA>` handle. The `GXVA <geosoft.gxapi.GXVA>` is filled anew for each block served up. """ ret_val = self._get_va(chan) return GXVA(ret_val) def get_v_vx(self): """ Get the X channel `GXVV <geosoft.gxapi.GXVV>` handle. :returns: `GXVV <geosoft.gxapi.GXVV>` handle :rtype: GXVV .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Only available for the CreateXY or CreateXYZ methods. The `GXVV <geosoft.gxapi.GXVV>` is filled anew for each block served up. """ ret_val = self._get_v_vx() return GXVV(ret_val) def get_v_vy(self): """ Get the Y channel `GXVV <geosoft.gxapi.GXVV>` handle. :returns: `GXVV <geosoft.gxapi.GXVV>` handle :rtype: GXVV .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Only available for the CreateXY or CreateXYZ methods. The `GXVV <geosoft.gxapi.GXVV>` is filled anew for each block served up. """ ret_val = self._get_v_vy() return GXVV(ret_val) def get_v_vz(self): """ Get the Z channel `GXVV <geosoft.gxapi.GXVV>` handle. :returns: `GXVV <geosoft.gxapi.GXVV>` handle :rtype: GXVV .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Only available for the CreateXY or CreateXYZ methods. The `GXVV <geosoft.gxapi.GXVV>` is filled anew for each block served up. If the Z channel is an array channel, the returned `GXVV <geosoft.gxapi.GXVV>` is the "base" `GXVV <geosoft.gxapi.GXVV>` of the `GXVA <geosoft.gxapi.GXVA>` and contains all items sequentially. """ ret_val = self._get_v_vz() return GXVV(ret_val) def get_chan_array_size(self, chan): """ Get the number of columns of data in a channel. :param chan: Index of channel to access. :type chan: int :returns: The number of columns (array size) for a channel :rtype: int .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Regular channels have one column of data. Array channels have more than one column of data. This function should be called to determine whether to use `get_vv <geosoft.gxapi.GXDBREAD.get_vv>` or `get_va <geosoft.gxapi.GXDBREAD.get_va>` to access data for a channel. """ ret_val = self._get_chan_array_size(chan) return ret_val def get_number_of_blocks_to_process(self): """ Get the number of blocks to be served up. :returns: The number of blocks to process in the selected lines. :rtype: int .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The selected lines are scanned. All lines where the served up data is less than the maximum block size for all channels are served as a single block. Any lines where any channel's data exceeds the maximum block size are split up into blocks. The value returned can be used as the progress message maximum iteration value. """ ret_val = self._get_number_of_blocks_to_process() return ret_val # Processing def get_next_block(self, line, block, n_blocks): """ Get the next block of data. :param line: (returned) The index into the input selected line list of the line whose data is contained in the current block :param block: (returned) The block index (0 to NBlocks-1) for the current line of data. :param n_blocks: (returned) The number of blocks that the current line is split into. :type line: int_ref :type block: int_ref :type n_blocks: int_ref :returns: Returns the current block index, or -1 if at end of file (no new data returned). :rtype: int .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The next block of data is read and copied into the channel `GXVV <geosoft.gxapi.GXVV>` and/or `GXVA <geosoft.gxapi.GXVA>` objects, accessed using the `get_vv <geosoft.gxapi.GXDBREAD.get_vv>` and `get_va <geosoft.gxapi.GXDBREAD.get_va>` functions. """ ret_val, line.value, block.value, n_blocks.value = self._get_next_block(line.value, block.value, n_blocks.value) return ret_val ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXGMSYS.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXGMSYS(gxapi_cy.WrapGMSYS): """ GXGMSYS class. The `GXGMSYS <geosoft.gxapi.GXGMSYS>` Methods """ def __init__(self, handle=0): super(GXGMSYS, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXGMSYS <geosoft.gxapi.GXGMSYS>` :returns: A null `GXGMSYS <geosoft.gxapi.GXGMSYS>` :rtype: GXGMSYS """ return GXGMSYS() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def launch(cls, model): """ Launch `GXGMSYS <geosoft.gxapi.GXGMSYS>` with extension :param model: Model name :type model: str .. versionadded:: 5.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapGMSYS._launch(GXContext._get_tls_geo(), model.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXST.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXST(gxapi_cy.WrapST): """ GXST class. Mono-variate statistics. The `GXST <geosoft.gxapi.GXST>` class is used to accumulate statistical information about a set of data. This class is usually used in conjunction with others. For instance, `GXDU.stat <geosoft.gxapi.GXDU.stat>` (see `GXDU <geosoft.gxapi.GXDU>`) will add a channel's data to the `GXST <geosoft.gxapi.GXST>` object, and sComputeST_IMG (see `GXIMG <geosoft.gxapi.GXIMG>`) will compute statistics for a grid. **Note:** .. _histogram_ranges: *** Histogram ranges and color zone ranges *** Histogram bins are defined with inclusive minima and exclusive maxima; for instance if Min = 0 and Inc = 1, then the second bin would include all values z such that 0 >= z > 1 (the first bin has all values < 0). Color zones used in displaying grids (`GXITR <geosoft.gxapi.GXITR>`, ZON etc...) are the opposite, with exclusive minima and inclusive maxima. For instance, if a zone is defined from 0 to 1, then it would contain all values of z such that 0 > z >= 1. These definitions mean that it is impossible to perfectly assign `GXITR <geosoft.gxapi.GXITR>` colors to individual bars of a histogram. The best work-around when the data values are integers is to define the color zones using 0.5 values between the integers. A general work-around is to make the number of histogram bins much larger than the number of color zones. See also `GXST2 <geosoft.gxapi.GXST2>` (bi-variate statistics) """ def __init__(self, handle=0): super(GXST, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXST <geosoft.gxapi.GXST>` :returns: A null `GXST <geosoft.gxapi.GXST>` :rtype: GXST """ return GXST() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls): """ This method creates a statistics object which is used to accumulate statistics. :returns: `GXST <geosoft.gxapi.GXST>` Object :rtype: GXST .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapST._create(GXContext._get_tls_geo()) return GXST(ret_val) @classmethod def create_exact(cls): """ This method creates a statistics object which stores all values. :returns: `GXST <geosoft.gxapi.GXST>` Object :rtype: GXST .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapST._create_exact(GXContext._get_tls_geo()) return GXST(ret_val) def data(self, val): """ Add this value to the statistics object. :param val: Value to Add :type val: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._data(val) def data_vv(self, vv): """ Add all the values in this `GXVV <geosoft.gxapi.GXVV>` to the statistics object. :param vv: `GXVV <geosoft.gxapi.GXVV>` object :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._data_vv(vv) def get_histogram_bins(self, vv): """ Retrieve number of items in each hostogram bin :param vv: `GXVV <geosoft.gxapi.GXVV>` for numbers of items :type vv: GXVV .. versionadded:: 6.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The length of the returned `GXVV <geosoft.gxapi.GXVV>` is set to the total number of bins. If a histogram is not defined in the `GXST <geosoft.gxapi.GXST>`, then the returned length is zero. """ self._get_histogram_bins(vv) def get_histogram_info(self, div, min, max): """ Retrieve number of bins, min and max value in histogram :param div: # of bins :param min: Min (value at start of 2nd bin) :param max: Max (value at end of 2nd last bin) :type div: int_ref :type min: float_ref :type max: float_ref .. versionadded:: 6.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The items correspond to those in `histogram2 <geosoft.gxapi.GXST.histogram2>`. If a histogram is not defined in the `GXST <geosoft.gxapi.GXST>`, then the returned number of bins is zero, and the min and max values will be dummies. """ div.value, min.value, max.value = self._get_histogram_info(div.value, min.value, max.value) def histogram(self, bins): """ This method prepares `GXST <geosoft.gxapi.GXST>` for recording histogram. :param bins: # of bins :type bins: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The Number of bins includes the one before the minimum and the one after the maximum, so it must be a value >2. IMPORTANT: This function gets the histogram minimum and maximum from the current min and max values stored in the `GXST <geosoft.gxapi.GXST>`, so this is equivalent to calling `histogram2 <geosoft.gxapi.GXST.histogram2>` with ``#bins, Min, (Max-Min)/(# bins -2))`` You should already have the data loaded in order to call this function. See histogram_ranges_ """ self._histogram(bins) def histogram2(self, bins, min, max): """ This method prepares `GXST <geosoft.gxapi.GXST>` for recording histogram. :param bins: # of bins :param min: Min :param max: Max :type bins: int :type min: float :type max: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The Number of bins includes the one before the minimum and the one after the maximum, so it must be a value >2. The width of the individual bins will be (Min-Max)/(# - 2) See histogram_ranges_ """ self._histogram2(bins, min, max) def equivalent_percentile(self, value): """ Return corresponding Percentile for a Value. :param value: Input value :type value: float :returns: The percentile at the given value (0 - 100) :rtype: float .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Statistics and histogram must have been calculated prior to calling this method """ ret_val = self._equivalent_percentile(value) return ret_val def equivalent_value(self, percent): """ Return corresponding Value for a Percentile :param percent: Input percentile (0 - 100) :type percent: float :returns: The value at the given percentile. :rtype: float .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Statistics and histogram must have been calculated prior to calling this method """ ret_val = self._equivalent_value(percent) return ret_val def reset(self): """ Resets the Statistics. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._reset() def get_info(self, id): """ This method allows you to retrieve (and compute) the information from the `GXST <geosoft.gxapi.GXST>` object. :param id: :ref:`ST_INFO` :type id: int :returns: Data you asked for `GS_R8DM <geosoft.gxapi.GS_R8DM>` for none :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The following can only be determined if the `GXST <geosoft.gxapi.GXST>` has recorded a histogram: `ST_MEDIAN <geosoft.gxapi.ST_MEDIAN>`, `ST_MODE <geosoft.gxapi.ST_MODE>` `ST_MINPOS <geosoft.gxapi.ST_MINPOS>` can be used to retrieve the smallest value greater than zero, but not from `GXST <geosoft.gxapi.GXST>` objects recovered from serialized object. """ ret_val = self._get_info(id) return ret_val @classmethod def get_norm_prob(cls, x): """ Return percent value :param x: Real :type x: float :returns: real Notes this function is based on Normal Cumulative distribution function mit to about 5 standard deviations :rtype: float .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapST._get_norm_prob(GXContext._get_tls_geo(), x) return ret_val @classmethod def get_norm_prob_x(cls, percent): """ Return number of sigmas from 50% a given percent is :param percent: Real :type percent: float :returns: real Notes this function is based on Normal Cumulative distribution function mit to about 5 standard deviations :rtype: float .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapST._get_norm_prob_x(GXContext._get_tls_geo(), percent) return ret_val def normal_test(self): """ Test the "normality" of the histogram distribution :returns: The normality statistic. Terminates if no histogram in the `GXST <geosoft.gxapi.GXST>` object. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This function compares the histogram to a normal curve with the same mean and standard deviation. The individual counts are normalized by the total counts, the bin width and the standard deviation. For each bin, the rms difference between the expected probability and the normalized count is summed, and the final result is normalized by the total number of bins. In this way histograms with different means, standard deviations, number of bins and counts can be compared. If the histogram were perfectly normal, then a value of 0 would be returned. The more "non-normal", the higher the statistic. """ ret_val = self._normal_test() return ret_val ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXSYS.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXRA import GXRA from .GXWA import GXWA ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXSYS(gxapi_cy.WrapSYS): """ GXSYS class. The `GXSYS <geosoft.gxapi.GXSYS>` library functions perform a wide range functions, including the storage and retrieval of named parameters from the current workspace; writing messages to the user; display of progress bars; retrieving file, date and time information from the operating system; and providing warning and error handling functions. **Note:** PARAMETER CONTROL FUNCTIONS Parameters can be named with an index extension. For example, a parameter could be named as "PARM[1]". The index can be a positive number, or it can be a '*'. If the index is a '*' in `set_string <geosoft.gxapi.GXSYS.set_string>`, then the value string will be parsed into multiple values. Commas are assumed to be delimiters. E.g. :: "group1", "multiparm[*]", "value1,\\"value,2\\",\\"value 3\\", value4 ,\\"value 5 \\"" Will set: :: multiparm[0] ="value1" multiparm[1] ="value,2" multiparm[2] ="value 3" multiparm[3] ="value4" multiparm[4] ="value 5" To read a parameter, name the parameter with the index. There is no looped-reading ability. For example using the following with `gt_string <geosoft.gxapi.GXSYS.gt_string>`: ``"group1","multiparm[3]",setting`` will return: ``setting = "value4"`` """ def __init__(self, handle=0): super(GXSYS, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXSYS <geosoft.gxapi.GXSYS>` :returns: A null `GXSYS <geosoft.gxapi.GXSYS>` :rtype: GXSYS """ return GXSYS() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Date/Time @classmethod def break_date(cls, date, year, month, day): """ Breaks a decimal date value into year, month and day. :param date: Date value to break :param year: Year :param month: Month (0-11) :param day: Day (0-30) :type date: float :type year: int_ref :type month: int_ref :type day: int_ref .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ year.value, month.value, day.value = gxapi_cy.WrapSYS._break_date(GXContext._get_tls_geo(), date, year.value, month.value, day.value) @classmethod def dateto_long(cls, date): """ Converts a double date to a value representing total days elapsed since day 0 of year 0. This uses the Numerical Receipies Julian function. :param date: Date :type date: float :returns: x - Days :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._dateto_long(GXContext._get_tls_geo(), date) return ret_val @classmethod def timeto_long(cls, time): """ Converts decimal hours to seconds in a day. :param time: Time :type time: float :returns: x - Seconds (integer) :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._timeto_long(GXContext._get_tls_geo(), time) return ret_val @classmethod def date(cls): """ Returns the current date in decimal years. :returns: Date in decimal years. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The FormatDate_STR function can be used to convert a date to a string. """ ret_val = gxapi_cy.WrapSYS._date(GXContext._get_tls_geo()) return ret_val @classmethod def longto_date(cls, days): """ Converts a value representing total days elapsed since day 0 of year 0 to a geosoft date. This uses the Numerical Receipies Julian function. :param days: Day :type days: int :returns: x - Date :rtype: float .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._longto_date(GXContext._get_tls_geo(), days) return ret_val @classmethod def longto_time(cls, sec): """ Converts seconds to decimal hours. :param sec: Seconds :type sec: int :returns: x - Time :rtype: float .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._longto_time(GXContext._get_tls_geo(), sec) return ret_val @classmethod def make_date(cls, year, month, day): """ Returns the decimal date given the year, month and day. :param year: Year :param month: Month (0-11) :param day: Day (0-30) :type year: int :type month: int :type day: int :returns: Date in decimal years. :rtype: float .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._make_date(GXContext._get_tls_geo(), year, month, day) return ret_val @classmethod def secondsto_time(cls, sec): """ Converts fractional seconds to decimal hours. :param sec: Seconds :type sec: float :returns: x - Time :rtype: float .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._secondsto_time(GXContext._get_tls_geo(), sec) return ret_val @classmethod def time(cls): """ Returns the current time in decimal hours. :returns: Time in decimal hours. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The FormatTime_STR function can be used to convert a time to a string. """ ret_val = gxapi_cy.WrapSYS._time(GXContext._get_tls_geo()) return ret_val @classmethod def timeto_seconds(cls, time): """ Converts decimal hours to seconds in a day fractional :param time: Time :type time: float :returns: x - Number of seconds with fractions :rtype: float .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._timeto_seconds(GXContext._get_tls_geo(), time) return ret_val @classmethod def utc_date(cls): """ Returns the current UTC date in decimal years. :returns: Date in decimal years. :rtype: float .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The FormatDate_STR function can be used to convert a date to a string. """ ret_val = gxapi_cy.WrapSYS._utc_date(GXContext._get_tls_geo()) return ret_val @classmethod def utc_time(cls): """ Returns the current UTC time in decimal hours. :returns: Time in decimal hours. :rtype: float .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The FormatTime_STR function can be used to convert a time to a string. """ ret_val = gxapi_cy.WrapSYS._utc_time(GXContext._get_tls_geo()) return ret_val # Environment @classmethod def exist_env(cls, parm): """ Check if setting exists in environment. :param parm: Setting :type parm: str :returns: 1 - Yes 0 - No :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._exist_env(GXContext._get_tls_geo(), parm.encode()) return ret_val @classmethod def get_env(cls, parm, set): """ Get an environment setting. :param parm: Setting :param set: Value string :type parm: str :type set: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ set.value = gxapi_cy.WrapSYS._get_env(GXContext._get_tls_geo(), parm.encode(), set.value.encode()) @classmethod def set_env(cls, parm, set): """ Set an environment setting. :param parm: Setting :param set: Value :type parm: str :type set: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._set_env(GXContext._get_tls_geo(), parm.encode(), set.encode()) # Error Handling @classmethod def clear_err_ap(cls): """ This method is called at to clear all registered errors. :returns: 0 - Successful :rtype: int .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._clear_err_ap(GXContext._get_tls_geo()) return ret_val @classmethod def get_top_error_ap(cls): """ Get the error number of the last registered error. :returns: The top error number registered, 0 if none registered. :rtype: int .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._get_top_error_ap(GXContext._get_tls_geo()) return ret_val @classmethod def get_error_ap(cls, err): """ Get the error number of an error. :param err: The error index (0 to N-1, where N=number of registered errors) :type err: int :returns: The error number registered, 0 if none registered. :rtype: int .. versionadded:: 9.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._get_error_ap(GXContext._get_tls_geo(), err) return ret_val @classmethod def get_error_message_ap(cls, err, err_str): """ Return the error message text as a string. :param err: The error index (0 to N-1, where N=number of registered errors) :param err_str: Buffer to return message in :type err: int :type err_str: str_ref .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This wrapper is mostly for use outside of GXs, because in general if an error is registered in a GX the GX would terminate before it could be called. Use `num_errors_ap <geosoft.gxapi.GXSYS.num_errors_ap>` to get the number of registered errors. """ err_str.value = gxapi_cy.WrapSYS._get_error_message_ap(GXContext._get_tls_geo(), err, err_str.value.encode()) @classmethod def num_errors_ap(cls): """ Returns the number of registered errors. :returns: The number of registered errors. :rtype: int .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This wrapper is mostly for use outside of GXs, because in general if an error is registered in a GX the GX would terminate before it could be called. .. seealso:: GetErrorMessageAP_SYS """ ret_val = gxapi_cy.WrapSYS._num_errors_ap(GXContext._get_tls_geo()) return ret_val @classmethod def set_server_messages_ap(cls, flag): """ Control the server message handling. :param flag: 1 - Display messages, 0 - messages reported as errors :type flag: int .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Should be set to false when dialogs should not appear. This setting is thread specific. """ gxapi_cy.WrapSYS._set_server_messages_ap(GXContext._get_tls_geo(), flag) # Execution @classmethod def run(cls, command, args, process): """ Run a command line process. :param command: Command name :param args: Command line arguments :param process: Flags :ref:`SYS_RUN_TYPE` :ref:`SYS_RUN_DISPLAY` :ref:`SYS_RUN_HOLD` :ref:`SYS_RUN_WIN` :type command: str :type args: str :type process: int :returns: -1 if failed to execute task Exit status of the task :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The Default option for each define below is the first one and is set to 0. We look for the command object in the following order: 1. the local working directory 2. the <geosoft>\\bin directory 3. the system path """ ret_val = gxapi_cy.WrapSYS._run(GXContext._get_tls_geo(), command.encode(), args.encode(), process) return ret_val @classmethod def run_gs(cls, gs): """ Run a GS. :param gs: Name of GS to run. :type gs: str :returns: Exit status of the GS -1 cancelled 0 success 1 ended with an error. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `set_interactive <geosoft.gxapi.GXSYS.set_interactive>`, `run_gx <geosoft.gxapi.GXSYS.run_gx>` """ ret_val = gxapi_cy.WrapSYS._run_gs(GXContext._get_tls_geo(), gs.encode()) return ret_val @classmethod def run_gx(cls, gx): """ Run a GX. :param gx: Name of GX to run. :type gx: str :returns: Exit status of the GX: -1 cancelled 0 success 1 ended with an error. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the called GX returns an error, they will not be displayed until the "top" calling GX terminates, unless you call `show_error <geosoft.gxapi.GXSYS.show_error>`. .. seealso:: `run_gx_ex <geosoft.gxapi.GXSYS.run_gx_ex>`, `set_interactive <geosoft.gxapi.GXSYS.set_interactive>` and `run_gs <geosoft.gxapi.GXSYS.run_gs>` """ ret_val = gxapi_cy.WrapSYS._run_gx(GXContext._get_tls_geo(), gx.encode()) return ret_val @classmethod def run_python(cls, gx, init_info): """ Run a Python GX script with initialization information. :param gx: Name of Python GX to run. :param init_info: Initialization information (usually JSON). Available withon Python script as a global variable named gx_init_info. :type gx: str :type init_info: str :returns: Return value of the GX set by `set_return <geosoft.gxapi.GXSYS.set_return>` (0 by default). Terminates on error or cancel. :rtype: int .. versionadded:: 9.9 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `set_return <geosoft.gxapi.GXSYS.set_return>` """ ret_val = gxapi_cy.WrapSYS._run_python(GXContext._get_tls_geo(), gx.encode(), init_info.encode()) return ret_val @classmethod def run_gx_ex(cls, gx, ret): """ Run a GX. :param gx: Name of GX to run. :param ret: Return value set in the child GX (0 by default) :type gx: str :type ret: int_ref :returns: Exit status of the GX: -1 cancelled 0 success 1 ended with an error. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `run_gx <geosoft.gxapi.GXSYS.run_gx>`, `set_return <geosoft.gxapi.GXSYS.set_return>` """ ret_val, ret.value = gxapi_cy.WrapSYS._run_gx_ex(GXContext._get_tls_geo(), gx.encode(), ret.value) return ret_val @classmethod def run_pdf(cls, mnu, pdf): """ Run a PDF. :param mnu: Group name, can be "". :param pdf: PDF name (.pdf assumed) :type mnu: str :type pdf: str :returns: Exit status of the task, 0 usually means success. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The group name of the PDF variables will be "group_pdf", where "group" is the name given in the first argument, and "pdf" is the root PDF file name. """ ret_val = gxapi_cy.WrapSYS._run_pdf(GXContext._get_tls_geo(), mnu.encode(), pdf.encode()) return ret_val @classmethod def shell_execute(cls, verb, file, parameters, directory, show): """ Call Microsoft ShellExecute function (See `MSDN <https://msdn.microsoft.com/en-us/library/windows/desktop/bb762153(v=vs.85).aspx>`_) :param verb: Verb :param file: File :param parameters: Parameters :param directory: Directory :param show: :ref:`SHELL_EXECUTE` :type verb: str :type file: str :type parameters: str :type directory: str :type show: int :returns: return value of ShellExecute as documented on MSDN :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `do_command <geosoft.gxapi.GXSYS.do_command>` """ ret_val = gxapi_cy.WrapSYS._shell_execute(GXContext._get_tls_geo(), verb.encode(), file.encode(), parameters.encode(), directory.encode(), show) return ret_val @classmethod def set_return(cls, ret): """ Set the return value of a GX. :param ret: Return Value :type ret: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This value is returned in the `run_gx_ex <geosoft.gxapi.GXSYS.run_gx_ex>` call only. """ gxapi_cy.WrapSYS._set_return(GXContext._get_tls_geo(), ret) # External DLL @classmethod def do_command(cls, command): """ Execute an Oasis montaj command. :param command: Command :type command: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Commands syntax: "[type] command" ======= ============================================================================================ type command ======= ============================================================================================ ID Internal Menu Command (as found in omn and geobar files e.g. ``*ID_EDIT_SELECT``) ------- -------------------------------------------------------------------------------------------- GX gx file name ------- -------------------------------------------------------------------------------------------- GS gs file name ------- -------------------------------------------------------------------------------------------- DOTNET dll file name Use qualifiers to specify class and method e.g.: ``"[DOTNET] geogxnet.dll(Geosoft.GX.NewGDB.NewGDB;Run)"`` ------- -------------------------------------------------------------------------------------------- PDF Geosoft pdf file name (Not Adobe PDF document, a legacy Geosoft Sushi script) ------- -------------------------------------------------------------------------------------------- DOS DOS style command ------- -------------------------------------------------------------------------------------------- HLP help file name ======= ============================================================================================ The must be ONE space between the "]" and the command. For example: ``"[ID] ID_EDIT_SELECT"`` // bring up the line edit tool .. seealso:: `shell_execute <geosoft.gxapi.GXSYS.shell_execute>` """ gxapi_cy.WrapSYS._do_command(GXContext._get_tls_geo(), command.encode()) @classmethod def error(cls, error_file, module, error): """ Register an error message :param error_file: Your error file name, "" if none. :param module: Module name in which error occurred. :param error: Error number :type error_file: str :type module: str :type error: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Use this function to register your own error messages when an error occurs in your code. Your errors can be provided in your own `GXGER <geosoft.gxapi.GXGER>` file. See GEOSOFT.`GXGER <geosoft.gxapi.GXGER>` for an example of the `GXGER <geosoft.gxapi.GXGER>` file format. If the error # is not found in your error file, the OE32.`GXGER <geosoft.gxapi.GXGER>` file, then the GEOSOFT.`GXGER <geosoft.gxapi.GXGER>` file will be searched. """ gxapi_cy.WrapSYS._error(GXContext._get_tls_geo(), error_file.encode(), module.encode(), error) @classmethod def error_tag(cls, tag, set): """ Set an error message tag string :param tag: Tag string, ie "%1". :param set: String to replace the tag. :type tag: str :type set: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Use this method to replace tag strings in your error message text with run-time information. For example, Geosoft error messages often use the tag strings "%1", "%2", etc. as place holders to be replaced by a string which is only known at run-time. """ gxapi_cy.WrapSYS._error_tag(GXContext._get_tls_geo(), tag.encode(), set.encode()) @classmethod def assert_gx(cls, exp, mod, parm): """ DLL function argument error assertion :param exp: Boolean expression (ie. (dB != 0.0) ) :param mod: Module name :param parm: Argument name :type exp: int :type mod: str :type parm: str :returns: 0 assertion passed 1 assertion failed :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Use this function to evaluate errors in passed function arguments. Functions called by GX programs should be tolerant of all errors in the passed argument list. The `assert_gx <geosoft.gxapi.GXSYS.assert_gx>` can be used to test each argument before doing any work in the function. If an assertion fails, an error will be registered with the name of the function and the parameter name and a 1 will be returned. The caller should immediatley cleaning up (if necessary) and return. You could also test the validity of arguments and call the `error <geosoft.gxapi.GXSYS.error>`, `error_tag <geosoft.gxapi.GXSYS.error_tag>` and `terminate <geosoft.gxapi.GXSYS.terminate>` functions if you would like to provide a more specific error message. """ ret_val = gxapi_cy.WrapSYS._assert_gx(GXContext._get_tls_geo(), exp, mod.encode(), parm.encode()) return ret_val @classmethod def ole_automation(cls, object, info_str, info_val): """ Call OLE Automation designed to be called from Montaj. :param object: Object Name :param info_str: Info String :param info_val: Info Int :type object: str :type info_str: str :type info_val: int :returns: Return from automation engine. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapSYS._ole_automation(GXContext._get_tls_geo(), object.encode(), info_str.encode(), info_val) return ret_val @classmethod def save_log(cls, file): """ Saves the main log file to another file. :param file: Output file name :type file: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapSYS._save_log(GXContext._get_tls_geo(), file.encode()) @classmethod def terminate(cls, name): """ DLL error termination :param name: Module name :type name: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Call this function immediately before returning to the caller after an error has occurred inside the DLL. If an error has occurred, you should clean-up (free memory, close files), call `error <geosoft.gxapi.GXSYS.error>` to register your own error messages, call `error_tag <geosoft.gxapi.GXSYS.error_tag>` to set any error message tags, call `terminate <geosoft.gxapi.GXSYS.terminate>` and return. Geosoft functions that detect an error will have already registered their own errors and called `terminate <geosoft.gxapi.GXSYS.terminate>`. """ gxapi_cy.WrapSYS._terminate(GXContext._get_tls_geo(), name.encode()) # File System @classmethod def crc_file(cls, file): """ Compute the CRC of a file :param file: File Name :type file: str :returns: CRC Value :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._crc_file(GXContext._get_tls_geo(), file.encode()) return ret_val @classmethod def crc_file_offset(cls, file, offset): """ Compute the CRC of a file with an Offset :param file: File Name :param offset: Offset in the file (0 for start) :type file: str :type offset: int :returns: CRC Value :rtype: int .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._crc_file_offset(GXContext._get_tls_geo(), file.encode(), offset) return ret_val @classmethod def file_ren(cls, old_file, new_file): """ Rename a file :param old_file: Old file name :param new_file: New file name :type old_file: str :type new_file: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._file_ren(GXContext._get_tls_geo(), old_file.encode(), new_file.encode()) @classmethod def find_files_vv(cls, vv, mask): """ Fill a `GXVV <geosoft.gxapi.GXVV>` with files matching an input file mask. :param vv: `GXVV <geosoft.gxapi.GXVV>` object :param mask: File mask to match :type vv: GXVV :type mask: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Fill a `GXVV <geosoft.gxapi.GXVV>` with files matching the input file mask. The `GXVV <geosoft.gxapi.GXVV>` should be of string type. """ gxapi_cy.WrapSYS._find_files_vv(GXContext._get_tls_geo(), vv, mask.encode()) @classmethod def absolute_file_name(cls, abbr, name): """ Convert an abbreviated path name to a full path name. :param abbr: Input file name to resolve :param name: Output name, can be the same as input :type abbr: str :type name: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is mainly intended to convert ".\\name" to a full name at run-time. """ name.value = gxapi_cy.WrapSYS._absolute_file_name(GXContext._get_tls_geo(), abbr.encode(), name.value.encode()) @classmethod def copy_file(cls, src_file, dest_file): """ Copy a file. :param src_file: Source file :param dest_file: Destination file :type src_file: str :type dest_file: str :returns: 0 if file copied ok. 1 if unable to copy file or source file not found. :rtype: int .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._copy_file(GXContext._get_tls_geo(), src_file.encode(), dest_file.encode()) return ret_val @classmethod def delete_file(cls, file): """ Delete a file. :param file: Name of file to delete :type file: str :returns: 0 if file deleted. 1 if unable to find file or delete file. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._delete_file(GXContext._get_tls_geo(), file.encode()) return ret_val @classmethod def delete_gi_file(cls, file): """ Delete the GI file associated with a grid. :param file: Name of grid file to delete :type file: str :returns: 0 if file deleted. 1 if file is not found, or found but could not be deleted. This is a "one-line" function to take a grid file name, remove the qualifiers, add the ".gi" and delete the file. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._delete_gi_file(GXContext._get_tls_geo(), file.encode()) return ret_val @classmethod def delete_grid_file(cls, file): """ Delete a grid file and its associated GI and XML files. :param file: Name of grid file to delete :type file: str :returns: 0 if grid file deleted. 1 if grid file not found or if one or more files is found but could not be deleted. :rtype: int .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Deletes the grid file first, and, if they exist, the associated GI and XML files. No error is registered if a file is not found or cannot be deleted. """ ret_val = gxapi_cy.WrapSYS._delete_grid_file(GXContext._get_tls_geo(), file.encode()) return ret_val @classmethod def dir_exist(cls, dir): """ Check to see if a directory exists :param dir: Name of directory to check :type dir: str :returns: 0 - Directory doesn't exist 1 - Directory exists :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._dir_exist(GXContext._get_tls_geo(), dir.encode()) return ret_val @classmethod def file_exist(cls, file): """ Check to see if a file exists :param file: Name of file to check :type file: str :returns: 0 - File doesn't exist 1 - File exists :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Use the FULL path for the file name. If the full path is not specified, then the current working directory is used for the path. """ ret_val = gxapi_cy.WrapSYS._file_exist(GXContext._get_tls_geo(), file.encode()) return ret_val @classmethod def file_size(cls, file): """ Returns size of a file. :param file: Name of file :type file: str :returns: 0 none/error x Size :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._file_size(GXContext._get_tls_geo(), file.encode()) return ret_val @classmethod def file_writable(cls, file): """ Check if a file can be created or opened in read-write mode at a specific location :param file: File path name to check :type file: str :rtype: bool .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._file_writable(GXContext._get_tls_geo(), file.encode()) return ret_val @classmethod def find_path(cls, file, mode, fullname): """ Get full path for a file with Geosoft subdirectory parameter. :param file: File to get path name for :param mode: :ref:`SYS_SEARCH_PATH` :param fullname: Buffer to place path name into :type file: str :type mode: int :type fullname: str_ref :returns: 0 if file found. 1 if file not found. :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Directories can be resolved from the Environment section of the Geosoft registry, or from system environment variables that are not defined in the Geosoft Environment registry. The following file prefixes will be replaced by the environment settings: <geosoft> the main Geosoft installation directory <geosoft2> the secondary Geosoft installation directory <geotemp> the Geosoft temporary file directory <windows> the operating system Windows directory <system> the operating system system directory <other> other environment variables """ ret_val, fullname.value = gxapi_cy.WrapSYS._find_path(GXContext._get_tls_geo(), file.encode(), mode, fullname.value.encode()) return ret_val @classmethod def find_path_ex(cls, file, mode, dir_mode, fullname): """ Get full path for a file. :param file: File to get path name for :param mode: :ref:`SYS_SEARCH_PATH` :param dir_mode: :ref:`GEO_DIRECTORY` :param fullname: Buffer to place path name into :type file: str :type mode: int :type dir_mode: int :type fullname: str_ref :returns: 0 if file found. 1 if file not found. :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Directories can be resolved from the Environment section of the Geosoft registry, or from system environment variables that are not defined in the Geosoft Environment registry. The following file prefixes will be replaced by the environment settings: <geosoft> the main Geosoft installation directory <geosoft2> the secondary Geosoft installation directory <geotemp> the Geosoft temporary file directory <windows> the operating system Windows directory <system> the operating system system directory <other> other environment variable """ ret_val, fullname.value = gxapi_cy.WrapSYS._find_path_ex(GXContext._get_tls_geo(), file.encode(), mode, dir_mode, fullname.value.encode()) return ret_val @classmethod def get_directory(cls, sys_dir, dir): """ Get a directory path :param sys_dir: :ref:`SYS_DIR` :param dir: Returned directory path string :type sys_dir: int :type dir: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The path will always end with the file separator character """ dir.value = gxapi_cy.WrapSYS._get_directory(GXContext._get_tls_geo(), sys_dir, dir.value.encode()) @classmethod def get_path(cls, type, path): """ Get a Geosoft path :param type: :ref:`SYS_PATH` :param path: String in which to place path :type type: int :type path: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The path name will have a directory separator at the end. """ path.value = gxapi_cy.WrapSYS._get_path(GXContext._get_tls_geo(), type, path.value.encode()) @classmethod def get_windows_dir(cls, dir): """ Get the Windows directory path :param dir: Buff for directory path string :type dir: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ dir.value = gxapi_cy.WrapSYS._get_windows_dir(GXContext._get_tls_geo(), dir.value.encode()) @classmethod def make_dir(cls, dir): """ Create a directory. :param dir: Name of directory :type dir: str :returns: 0 - Directory made 1 - Directory cannot be made :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._make_dir(GXContext._get_tls_geo(), dir.encode()) return ret_val @classmethod def make_file_readonly(cls, file): """ Set a file's read-only attribute. :param file: Name of file :type file: str :returns: 0 if read-only attribute successfully set, 1 if attribute change fails. :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._make_file_readonly(GXContext._get_tls_geo(), file.encode()) return ret_val @classmethod def make_file_writable(cls, file): """ Removes a file's read-only attribute. :param file: Name of file :type file: str :returns: 0 if read-only attribute successfully removed, 1 if attribute change fails. :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._make_file_writable(GXContext._get_tls_geo(), file.encode()) return ret_val @classmethod def relative_file_name(cls, abbr, name): """ Convert a file name to a relative abbreviated path name :param abbr: Input file name to resolve :param name: Output name, can be the same as input :type abbr: str :type name: str_ref .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This will produce relative paths based on the workspace directory into ".\\name". """ name.value = gxapi_cy.WrapSYS._relative_file_name(GXContext._get_tls_geo(), abbr.encode(), name.value.encode()) @classmethod def short_path_file_name(cls, in_name, name): """ Obtains the short path form of a specified input path. :param in_name: Input file name to resolve :param name: Output name, can be the same as input :type in_name: str :type name: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ name.value = gxapi_cy.WrapSYS._short_path_file_name(GXContext._get_tls_geo(), in_name.encode(), name.value.encode()) @classmethod def temp_file_ext(cls, ext, out): """ Generate a unique file name for this extension in the temp directory. :param ext: Input extension (without .) :param out: Output name :type ext: str :type out: str_ref .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is useful for created a unique tempory name for a file in the Geosoft temporary directory. """ out.value = gxapi_cy.WrapSYS._temp_file_ext(GXContext._get_tls_geo(), ext.encode(), out.value.encode()) @classmethod def temp_file_name(cls, path_file, out_filename): """ Generate a file name for this file in the temp directory. :param path_file: Input file name to resolve (path is removed) :param out_filename: Output name, can be the same as input :type path_file: str :type out_filename: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is useful for created a unique tempory name for a file in the Geosoft temporary directory. From version 7.0 The file extension will match the input file, but the filename itself will be a process and thread unique value to ensure that clashes does not happen. """ out_filename.value = gxapi_cy.WrapSYS._temp_file_name(GXContext._get_tls_geo(), path_file.encode(), out_filename.value.encode()) @classmethod def transfer_path(cls, path_file, file): """ Transfers file path to new file name. :param path_file: Input file path/name :param file: Output file name with path transfered :type path_file: str :type file: str_ref .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The path and volume of from the input string is added to file name from the output string. """ file.value = gxapi_cy.WrapSYS._transfer_path(GXContext._get_tls_geo(), path_file.encode(), file.value.encode()) @classmethod def valid_file_name(cls, file): """ Check to see if a file name valid :param file: Name of file to check :type file: str :returns: 0 - File name is not valid 1 - File name is valid :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Use the FULL path for the file name. If the full path is not specified, then the current working directory is used for the path. """ ret_val = gxapi_cy.WrapSYS._valid_file_name(GXContext._get_tls_geo(), file.encode()) return ret_val @classmethod def write_in_dir(cls, dir): """ Can I create files in this directory ? :param dir: Name of directory to check :type dir: str :returns: 0 - Directory doesn't allow write of does not exist 1 - Directory allows writes :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._write_in_dir(GXContext._get_tls_geo(), dir.encode()) return ret_val @classmethod def file_date(cls, file): """ File creation date in decimal years. :param file: File name :type file: str :returns: Date in decimal years, `rDUMMY <geosoft.gxapi.rDUMMY>` if the file does not exist. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The FormatDate_STR function can be used to convert a date to a string. """ ret_val = gxapi_cy.WrapSYS._file_date(GXContext._get_tls_geo(), file.encode()) return ret_val @classmethod def file_time(cls, file): """ File creation time in decimal hours. :param file: File name :type file: str :returns: Date in decimal hours, `rDUMMY <geosoft.gxapi.rDUMMY>` if the file does not exist. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The FormatTime_STR function can be used to convert a time to a string. """ ret_val = gxapi_cy.WrapSYS._file_time(GXContext._get_tls_geo(), file.encode()) return ret_val @classmethod def utc_file_date(cls, file): """ File creation UTC date in decimal years. :param file: File name :type file: str :returns: Date in decimal years, `rDUMMY <geosoft.gxapi.rDUMMY>` if the file does not exist. :rtype: float .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The FormatDate_STR function can be used to convert a date to a string. """ ret_val = gxapi_cy.WrapSYS._utc_file_date(GXContext._get_tls_geo(), file.encode()) return ret_val @classmethod def utc_file_time(cls, file): """ File creation UTC time in decimal hours. :param file: File name :type file: str :returns: Date in decimal hours, `rDUMMY <geosoft.gxapi.rDUMMY>` if the file does not exist. :rtype: float .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The FormatTime_STR function can be used to convert a time to a string. """ ret_val = gxapi_cy.WrapSYS._utc_file_time(GXContext._get_tls_geo(), file.encode()) return ret_val # Global Parameter @classmethod def get_settings_meta(cls, meta): """ Get the settings metadata object. :param meta: `GXMETA <geosoft.gxapi.GXMETA>` object to store the settings metadata in :type meta: GXMETA .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._get_settings_meta(GXContext._get_tls_geo(), meta) @classmethod def global_reset(cls, ini): """ Reset the global parameters. :param ini: New INI file name, if "", use default. :type ini: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._global_reset(GXContext._get_tls_geo(), ini.encode()) @classmethod def global_set(cls, parm, set): """ Set a global parameter setting. :param parm: Name of the Parameter :param set: Setting :type parm: str :type set: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._global_set(GXContext._get_tls_geo(), parm.encode(), set.encode()) @classmethod def global_write(cls, ini): """ Modify the global parameters. :param ini: Global INI file, if "" use default. :type ini: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the global parameters have been changed, use this function to make the changes permanent, """ gxapi_cy.WrapSYS._global_write(GXContext._get_tls_geo(), ini.encode()) @classmethod def global_(cls, parm, setting): """ Get a global parameter setting. :param parm: Name of the Parameter :param setting: Setting returned :type parm: str :type setting: str_ref :returns: 0 if parameter found. 1 if parameter not found or not set. :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The returned string will be empty if the parameter is not found. Parameters are derived from GEOSOFT.INI. This is a standard Windows style INI file that contains [GROUPS], PARAMETERS and SETTINGS as follows [GROUP1] PARAM1=setting1 PARAM2 setting2 PARAM3 "setting3 is text" To retrieve an entry, specify the group.parameter. For example, iGlobal_SYS("GROUP1.PARAM3",sSetting) will retrieve the string "setting is text". The double quotes will not appear in the setting. """ ret_val, setting.value = gxapi_cy.WrapSYS._global_(GXContext._get_tls_geo(), parm.encode(), setting.value.encode()) return ret_val @classmethod def reset_settings(cls): """ Resets the GX_HELP settings in the geosoft.ini file after changes have been made. .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapSYS._reset_settings(GXContext._get_tls_geo()) @classmethod def set_settings_meta(cls, meta): """ Set the settings metadata object. :param meta: `GXMETA <geosoft.gxapi.GXMETA>` object :type meta: GXMETA .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._set_settings_meta(GXContext._get_tls_geo(), meta) # Licensing @classmethod def check_arc_license(cls): """ Check to see if a ESRI ArcEngine or ArcView license is available :returns: 1 - Licenced 0 - Not licenced :rtype: int .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._check_arc_license(GXContext._get_tls_geo()) return ret_val @classmethod def check_arc_license_ex(cls, version): """ Check to see if a ESRI ArcEngine or ArcView license is available, returns type and version of available engine. :param version: Version String :type version: str_ref :returns: :ref:`ARC_LICENSE` :rtype: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, version.value = gxapi_cy.WrapSYS._check_arc_license_ex(GXContext._get_tls_geo(), version.value.encode()) return ret_val @classmethod def check_intrinsic(cls, cl, name): """ Check to see if an intrinsic object is licensed :param cl: Intrinsic Class Number :param name: Intrinsic Name (must be exact) :type cl: int :type name: str :returns: 1 - Licenced 0 - Not licenced :rtype: int .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._check_intrinsic(GXContext._get_tls_geo(), cl, name.encode()) return ret_val @classmethod def get_geodist(cls): """ Gets a global flag that indicates whether we are running within the geodist library :returns: 0 - Geodist not loaded, 1 - Geodist loaded :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._get_geodist(GXContext._get_tls_geo()) return ret_val @classmethod def get_license_class(cls, cl): """ Get the current application license class. :param cl: Class String :type cl: str_ref .. versionadded:: 6.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** String may be one of : "ArcGIS" "OasisMontaj" "DapServer" """ cl.value = gxapi_cy.WrapSYS._get_license_class(GXContext._get_tls_geo(), cl.value.encode()) @classmethod def get_licensed_user(cls, user, company): """ Get the licensed user name and Company :param user: User Name :param company: Company Name :type user: str_ref :type company: str_ref .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ user.value, company.value = gxapi_cy.WrapSYS._get_licensed_user(GXContext._get_tls_geo(), user.value.encode(), company.value.encode()) @classmethod def is_signed_in(cls): """ Check if signed in via Geosoft Connect :rtype: bool .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._is_signed_in(GXContext._get_tls_geo()) return ret_val @classmethod def sign_in(cls): """ Sign in via Geosoft Connect .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._sign_in(GXContext._get_tls_geo()) @classmethod def check_product_updates(cls, silent): """ Check for product updates via Geosoft Connect :param silent: Do not show notification if no updates available. :type silent: bool .. versionadded:: 9.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._check_product_updates(GXContext._get_tls_geo(), silent) @classmethod def geosoft_connect_authenticate_and_navigate(cls, url): """ Automatically authenticate and navigate to my.geosoft.com URL :param url: URL :type url: str .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._geosoft_connect_authenticate_and_navigate(GXContext._get_tls_geo(), url.encode()) @classmethod def get_geosoft_id(cls, id): """ Get the Geosoft ID (email) if signed in :param id: Returned ID :type id: str_ref .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ id.value = gxapi_cy.WrapSYS._get_geosoft_id(GXContext._get_tls_geo(), id.value.encode()) @classmethod def publish_datasets_to_central(cls, instance_uuid, project_uuid, project_name, central_server_guid, central_branch_id, central_revision_id, metadata_as_json, parent_event_id): """ Publish datasets to Central :param instance_uuid: Central Instance UUID :param project_uuid: Project UUID :param project_name: Project name :param central_server_guid: Central server GUID :param central_branch_id: Branch ID :param central_revision_id: Revision ID :param metadata_as_json: metadata as JSON :param parent_event_id: Parent Event ID :type instance_uuid: str :type project_uuid: str :type project_name: str :type central_server_guid: str_ref :type central_branch_id: int_ref :type central_revision_id: int_ref :type metadata_as_json: str :type parent_event_id: str .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ central_server_guid.value, central_branch_id.value, central_revision_id.value = gxapi_cy.WrapSYS._publish_datasets_to_central(GXContext._get_tls_geo(), instance_uuid.encode(), project_uuid.encode(), project_name.encode(), central_server_guid.value.encode(), central_branch_id.value, central_revision_id.value, metadata_as_json.encode(), parent_event_id.encode()) @classmethod def get_publish_path_for_central(cls, project_uuid, cache_path): """ Get cache path to publish datasets to Central :param project_uuid: Project UUID :param cache_path: Returned cache path :type project_uuid: str :type cache_path: str_ref .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ cache_path.value = gxapi_cy.WrapSYS._get_publish_path_for_central(GXContext._get_tls_geo(), project_uuid.encode(), cache_path.value.encode()) @classmethod def connect_with_current_central_instance(cls, name, base_path, url, token): """ Query information necessary to communicate with current Central Instance :param name: Instance name :param base_path: Base path (single port support) :param url: URL :param token: Authorization token :type name: str_ref :type base_path: str_ref :type url: str_ref :type token: str_ref .. versionadded:: 9.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ name.value, base_path.value, url.value, token.value = gxapi_cy.WrapSYS._connect_with_current_central_instance(GXContext._get_tls_geo(), name.value.encode(), base_path.value.encode(), url.value.encode(), token.value.encode()) @classmethod def get_profile_name(cls, name): """ Get the profile name as defined in My Geosoft (or email if not defined) :param name: Returned name :type name: str_ref .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ name.value = gxapi_cy.WrapSYS._get_profile_name(GXContext._get_tls_geo(), name.value.encode()) @classmethod def get_profile_url(cls, url): """ Get link to my.geosoft.com profile URL :param url: Returned URL :type url: str_ref .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ url.value = gxapi_cy.WrapSYS._get_profile_url(GXContext._get_tls_geo(), url.value.encode()) # Lineage @classmethod def add_lineage_parameter(cls, name, value): """ Add a parameter to the current lineage object :param name: Paramter Name :param value: Parameter Value :type name: str :type value: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._add_lineage_parameter(GXContext._get_tls_geo(), name.encode(), value.encode()) @classmethod def add_lineage_source(cls, source_type, source_name): """ Add a source to the current lineage object :param source_type: :ref:`SYS_LINEAGE_SOURCE` :param source_name: Source Name :type source_type: int :type source_name: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._add_lineage_source(GXContext._get_tls_geo(), source_type, source_name.encode()) @classmethod def clear_lineage_parameters(cls): """ Clear all the lineage parameters .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._clear_lineage_parameters(GXContext._get_tls_geo()) @classmethod def clear_lineage_sources(cls): """ Clear all the lineage sources .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._clear_lineage_sources(GXContext._get_tls_geo()) @classmethod def copy_geo_file(cls, data, dir): """ Copy a Geosoft data file and all associated files to a new folder :param data: File Name :param dir: Target directory :type data: str :type dir: str .. versionadded:: 7.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Grids are copied and the GI's are maintained - note that support for non-geosoft grids is limited since this method does not guarantee all grid files besides the main one are copied. """ gxapi_cy.WrapSYS._copy_geo_file(GXContext._get_tls_geo(), data.encode(), dir.encode()) @classmethod def backup_geo_file(cls, data, target): """ Backup a Geosoft data file and all associated files to a temporary folder. :param data: File Name :param target: Buffer to place the target name into :type data: str :type target: str_ref .. versionadded:: 7.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Grids are copied and the GI's are maintained - note that support for non-geosoft grids is limited since this method does not guarantee all grid files besides the main one are copied. """ target.value = gxapi_cy.WrapSYS._backup_geo_file(GXContext._get_tls_geo(), data.encode(), target.value.encode()) @classmethod def remove_lineage_output(cls, output_name): """ Remove an output from the current lineage object :param output_name: Source Name :type output_name: str .. versionadded:: 7.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._remove_lineage_output(GXContext._get_tls_geo(), output_name.encode()) @classmethod def remove_lineage_parameter(cls, name): """ Remove a parameter in the current lineage object :param name: Paramter Name :type name: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._remove_lineage_parameter(GXContext._get_tls_geo(), name.encode()) @classmethod def remove_lineage_source(cls, source_name): """ Remove a source from the current lineage object :param source_name: Source Name :type source_name: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._remove_lineage_source(GXContext._get_tls_geo(), source_name.encode()) @classmethod def restore_geo_file(cls, target, original): """ Backup a Geosoft data file and all associated files to original location :param target: Backup File Name :param original: Original file name :type target: str :type original: str .. versionadded:: 7.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Grids are copied and the GI's are maintained - note that support for non-geosoft grids is limited since this method does not guarantee all grid files besides the main one are copied. """ gxapi_cy.WrapSYS._restore_geo_file(GXContext._get_tls_geo(), target.encode(), original.encode()) @classmethod def set_lineage_description(cls, description): """ Set the description for the current lineage object :param description: Description :type description: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._set_lineage_description(GXContext._get_tls_geo(), description.encode()) @classmethod def set_lineage_display_name(cls, display_name): """ Set the display name for the current lineage object :param display_name: DisplayName :type display_name: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._set_lineage_display_name(GXContext._get_tls_geo(), display_name.encode()) @classmethod def set_lineage_name(cls, name): """ Set the name for the current lineage object :param name: Name :type name: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._set_lineage_name(GXContext._get_tls_geo(), name.encode()) # Menus and Toolbar @classmethod def clear_menus(cls, flag): """ Clear all menus :param flag: :ref:`SYS_MENU_CLEAR` :type flag: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapSYS._clear_menus(GXContext._get_tls_geo(), flag) @classmethod def get_loaded_menus(cls, lst_default, lst_loaded, lst_user): """ Get the loaded menus. :param lst_default: Default menus (typically a single entry based on product) :param lst_loaded: Loaded menus :param lst_user: Loaded user menus :type lst_default: GXLST :type lst_loaded: GXLST :type lst_user: GXLST .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The names of the LSTs contain the menus and the values contain any exclusions. Exclusions are semicolon separated top level menu names and/or toolbar.geobar file names. """ gxapi_cy.WrapSYS._get_loaded_menus(GXContext._get_tls_geo(), lst_default, lst_loaded, lst_user) @classmethod def set_loaded_menus(cls, lst_default, lst_loaded, lst_user): """ Load a list of menus :param lst_default: Default menus (typically a single entry based on product, do not change the name returned by `get_loaded_menus <geosoft.gxapi.GXSYS.get_loaded_menus>`) :param lst_loaded: Loaded menus :param lst_user: Loaded user menus :type lst_default: GXLST :type lst_loaded: GXLST :type lst_user: GXLST .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The names of the LSTs contain the menus and the values contain any exclusions. Exclusions are semicolon separated top level menu names and/or toolbar.geobar file names. """ gxapi_cy.WrapSYS._set_loaded_menus(GXContext._get_tls_geo(), lst_default, lst_loaded, lst_user) @classmethod def get_entitlement_rights(cls, lst_rights): """ Get the Entitlement Rights :param lst_rights: Rights :type lst_rights: GXLST .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._get_entitlement_rights(GXContext._get_tls_geo(), lst_rights) # Misc @classmethod def generate_guid(cls, guid): """ Genrates a GUID string (e.g. {4FEDE8BF-CDAB-430A-8026-1CCC0EC0A2EB}) :param guid: GUID :type guid: str_ref .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ guid.value = gxapi_cy.WrapSYS._generate_guid(GXContext._get_tls_geo(), guid.value.encode()) @classmethod def clipboard_to_file(cls, file): """ Copy text from the clipboard to a file. :param file: File name to place it into :type file: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._clipboard_to_file(GXContext._get_tls_geo(), file.encode()) @classmethod def create_clipboard_ra(cls): """ Create a `GXRA <geosoft.gxapi.GXRA>` to read text from the clipboard. :returns: `GXRA <geosoft.gxapi.GXRA>` to use for reading. :rtype: GXRA .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Destroy the `GXRA <geosoft.gxapi.GXRA>` as soon as possible. As long as it open the clipboard is not accessible from any application. """ ret_val = gxapi_cy.WrapSYS._create_clipboard_ra(GXContext._get_tls_geo()) return GXRA(ret_val) @classmethod def create_clipboard_wa(cls): """ Create a `GXWA <geosoft.gxapi.GXWA>` to write text on the clipboard. :returns: `GXWA <geosoft.gxapi.GXWA>` to use for reading. :rtype: GXWA .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Destroy the `GXWA <geosoft.gxapi.GXWA>` as soon as possible. As long as it open the clipboard is not accessible from any application. """ ret_val = gxapi_cy.WrapSYS._create_clipboard_wa(GXContext._get_tls_geo()) return GXWA(ret_val) @classmethod def emf_object_size(cls, file, size_x, size_y): """ Get the size of an EMF object :param file: EMF File holding data :param size_x: Size X :param size_y: Size Y :type file: str :type size_x: float_ref :type size_y: float_ref .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ size_x.value, size_y.value = gxapi_cy.WrapSYS._emf_object_size(GXContext._get_tls_geo(), file.encode(), size_x.value, size_y.value) @classmethod def file_to_clipboard(cls, file): """ Copy a text file onto the clipboard as text. :param file: File place into clipboard :type file: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._file_to_clipboard(GXContext._get_tls_geo(), file.encode()) @classmethod def font_lst(cls, lst, which): """ List all Windows and geosoft fonts. :param lst: List Object :param which: :ref:`SYS_FONT` :type lst: GXLST :type which: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** To get TT and GFN fonts, call twice with the same list and `SYS_FONT_TT <geosoft.gxapi.SYS_FONT_TT>`, then `SYS_FONT_GFN <geosoft.gxapi.SYS_FONT_GFN>`, or vice-versa to change order of listing. """ gxapi_cy.WrapSYS._font_lst(GXContext._get_tls_geo(), lst, which) @classmethod def get_dot_net_gx_entries(cls, gx, entry_buffer): """ Get the list of entry points that this assembly has exposed to Oasis montaj. :param gx: Name of .NET GX assembly :param entry_buffer: Buffer to place list of entries in :type gx: str :type entry_buffer: str_ref :returns: 0 success 1 error. :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The list of entry points are passed back as one string with each entry point separated by a semi-colon. For example: NewGDB|Run;NewGDB|RunEx """ ret_val, entry_buffer.value = gxapi_cy.WrapSYS._get_dot_net_gx_entries(GXContext._get_tls_geo(), gx.encode(), entry_buffer.value.encode()) return ret_val @classmethod def send_general_message(cls, cl, info): """ Send a general information message to all listners :param cl: Message Class :param info: Message Info :type cl: str :type info: str .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._send_general_message(GXContext._get_tls_geo(), cl.encode(), info.encode()) @classmethod def write_debug_log(cls, log): """ This method writes out information to the output debugging log file (in temp folder) or output window. :param log: String to Write out :type log: str .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._write_debug_log(GXContext._get_tls_geo(), log.encode()) @classmethod def log_script_run(cls, location): """ This method logs that a script was run :param location: Location that launched the script :type location: str .. versionadded:: 9.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._log_script_run(GXContext._get_tls_geo(), location.encode()) # Multithreading @classmethod def get_thread_id(cls): """ Get the ID the current thread. :returns: x - ID :rtype: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** In a single threaded application this will always be 0. """ ret_val = gxapi_cy.WrapSYS._get_thread_id(GXContext._get_tls_geo()) return ret_val @classmethod def run_multi_user_script(cls, script, users, iterations, wait_min_time, wait_max_time, ramp_up_time): """ Execute a script using multithreaded users :param script: Script to run :param users: Number of users to run :param iterations: Number of iterations to run (for each user) :param wait_min_time: Minimum wait time between iterations (0 for none) :param wait_max_time: Maximum wait time between iterations (0 for none) :param ramp_up_time: Ramp up time for users (0 for all users start immediatly) :type script: str :type users: int :type iterations: int :type wait_min_time: int :type wait_max_time: int :type ramp_up_time: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** No access is provided in the script to EMAPS or EDBS. Users must ensure that the resources that are shared are protected. """ gxapi_cy.WrapSYS._run_multi_user_script(GXContext._get_tls_geo(), script.encode(), users, iterations, wait_min_time, wait_max_time, ramp_up_time) # Parameter @classmethod def clear_group(cls, group): """ Clear current contents of a group :param group: Group to clear :type group: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._clear_group(GXContext._get_tls_geo(), group.encode()) @classmethod def clear_group_parm(cls, group): """ Clears all paramters in a specified group. :param group: String :type group: str .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._clear_group_parm(GXContext._get_tls_geo(), group.encode()) @classmethod def clear_parm(cls): """ Clears all paramters. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._clear_parm(GXContext._get_tls_geo()) @classmethod def default_int(cls, group, field, val): """ Allows a default int to be set. :param group: Group Name :param field: Parameter Name :param val: Int Value to Set :type group: str :type field: str :type val: int .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The value will only be set if there is no existing setting. """ gxapi_cy.WrapSYS._default_int(GXContext._get_tls_geo(), group.encode(), field.encode(), val) @classmethod def default_double(cls, group, field, val): """ Allows a default real to be set. :param group: Group Name :param field: Parameter Name :param val: Real Value to Set :type group: str :type field: str :type val: float .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The value will only be set if there is no existing setting. """ gxapi_cy.WrapSYS._default_double(GXContext._get_tls_geo(), group.encode(), field.encode(), val) @classmethod def default_string(cls, group, field, val): """ Allows a default string to be set. :param group: Group Name :param field: Parameter Name :param val: String to Set it To :type group: str :type field: str :type val: str .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The value will only be set if there is no existing setting. """ gxapi_cy.WrapSYS._default_string(GXContext._get_tls_geo(), group.encode(), field.encode(), val.encode()) @classmethod def get_pattern(cls, group, pat, size, thick, dense, col, back_col): """ Gets pattern parameters from the parameter block. :param group: Input group name :param pat: Pattern :param size: Size, :param thick: Thick (0-100) :param dense: Density, :param col: Pattern Color :param back_col: Background Color :type group: str :type pat: int_ref :type size: float_ref :type thick: int_ref :type dense: float_ref :type col: int_ref :type back_col: int_ref .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Gets all the user-definable pattern parameters from a specified group. Parameters are: "PAT_NUMBER" 0 is solid fill (default) "PAT_SIZE" pattern tile size in mm. (can return `iDUMMY <geosoft.gxapi.iDUMMY>`) "PAT_THICKNESS" pattern line thickness in percent of the tile size. valid range is 0-100. "PAT_DENSITY" Tile spacing. A value of 1 means tiles are laid with no overlap. A value of 2 means they overlap each other. "PAT_COLOR" The color value. "PAT_BACKCOLOR" Background color value. Returned values may be DUMMY, but will be acceptable for use with the `GXGUI.color_form <geosoft.gxapi.GXGUI.color_form>` function, to set defaults. """ pat.value, size.value, thick.value, dense.value, col.value, back_col.value = gxapi_cy.WrapSYS._get_pattern(GXContext._get_tls_geo(), group.encode(), pat.value, size.value, thick.value, dense.value, col.value, back_col.value) @classmethod def get_reg(cls, reg, group): """ Get `GXREG <geosoft.gxapi.GXREG>` parameters. :param reg: `GXREG <geosoft.gxapi.GXREG>` to add parameters to :param group: Group name wanted :type reg: GXREG :type group: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._get_reg(GXContext._get_tls_geo(), reg, group.encode()) @classmethod def gt_string(cls, group, field, buff): """ This method returns a string in the parameter block. :param group: Group Name :param field: Parameter Name :param buff: Buffer to place the string into :type group: str :type field: str :type buff: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the setting exits it is placed in the buffer, otherwise the buffer will have zero length """ buff.value = gxapi_cy.WrapSYS._gt_string(GXContext._get_tls_geo(), group.encode(), field.encode(), buff.value.encode()) @classmethod def exist_int(cls, group, field): """ This method checks to see if a int parameter exists. :param group: Group Name :param field: Parameter Name :type group: str :type field: str :returns: 1 - Yes 0 - No :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._exist_int(GXContext._get_tls_geo(), group.encode(), field.encode()) return ret_val @classmethod def exist_double(cls, group, field): """ This method checks to see if a real parameter exists. :param group: Group Name :param field: Parameter Name :type group: str :type field: str :returns: 1 - Yes 0 - No :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._exist_double(GXContext._get_tls_geo(), group.encode(), field.encode()) return ret_val @classmethod def exist_string(cls, group, field): """ This method checks to see if a string parameter exists. :param group: Group Name :param field: Parameter Name :type group: str :type field: str :returns: 1 - Yes 0 - No :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._exist_string(GXContext._get_tls_geo(), group.encode(), field.encode()) return ret_val @classmethod def get_int(cls, group, field): """ This method returns an int from the parameter block. :param group: Group Name :param field: Parameter Name :type group: str :type field: str :returns: Int Value, `iDUMMY <geosoft.gxapi.iDUMMY>` if the parameter is not set. :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._get_int(GXContext._get_tls_geo(), group.encode(), field.encode()) return ret_val @classmethod def get_yes_no(cls, group, field): """ Check a YES/NO Setting :param group: Group Name :param field: Parameter Name :type group: str :type field: str :returns: 1 - if first char in setting is a "Y" or"y" 0 - Otherwise :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._get_yes_no(GXContext._get_tls_geo(), group.encode(), field.encode()) return ret_val @classmethod def replace_string(cls, str_val, output, group): """ Replace "% %" tokens in a string with parameter values :param str_val: String to filter replace :param output: Output string :param group: Default group name :type str_val: str :type output: str_ref :type group: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If parameter does not exist, the token is removed. Full parameter names, such as "%group.name%", are used as-is. Partial parameter names, such as "%name%" will have the default group attached. """ output.value = gxapi_cy.WrapSYS._replace_string(GXContext._get_tls_geo(), str_val.encode(), output.value.encode(), group.encode()) @classmethod def load_parm(cls, file, groups): """ Reads parameters from a file. :param file: Name of the File to read from :param groups: Group Name to write read ("" for all groups) :type file: str :type groups: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._load_parm(GXContext._get_tls_geo(), file.encode(), groups.encode()) @classmethod def get_double(cls, group, field): """ This method returns a real from the parameter block. :param group: Group Name :param field: Parameter Name :type group: str :type field: str :returns: Real Value, `rDUMMY <geosoft.gxapi.rDUMMY>` if parameter not set. :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._get_double(GXContext._get_tls_geo(), group.encode(), field.encode()) return ret_val @classmethod def save_parm(cls, file, mode, groups): """ Writes out one group (or all groups) to a file. :param file: Name of the File :param mode: 0 - New file, 1 - Append :param groups: Group Name to write out ("" for all groups) :type file: str :type mode: int :type groups: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._save_parm(GXContext._get_tls_geo(), file.encode(), mode, groups.encode()) @classmethod def filter_parm_group(cls, group, add): """ Controls filtering of specific group during logging. :param group: Group Name :param add: 0 - Clear filter, 1 - Add filter :type group: str :type add: int .. versionadded:: 9.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is useful to prevent certain utility GX parameters from being recorded during GS script runs where the parameters does not influence the actual script execution. """ gxapi_cy.WrapSYS._filter_parm_group(GXContext._get_tls_geo(), group.encode(), add) @classmethod def set_int(cls, group, field, val): """ This method sets an int in the parameter block. :param group: Group Name :param field: Parameter Name :param val: Int Value to Set :type group: str :type field: str :type val: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._set_int(GXContext._get_tls_geo(), group.encode(), field.encode(), val) @classmethod def set_pattern(cls, group, pat, size, thick, dense, col, back_col): """ Sets pattern parameters in the parameter block. :param group: Group Name :param pat: Pattern :param size: Size. Input `GS_R8DM <geosoft.gxapi.GS_R8DM>` to use default :param thick: Thickness (0-100). Input `GS_S4DM <geosoft.gxapi.GS_S4DM>` to use default :param dense: Density. Input `GS_R8DM <geosoft.gxapi.GS_R8DM>` to use default :param col: Pattern Color :param back_col: Background Color. Can be `C_TRANSPARENT <geosoft.gxapi.C_TRANSPARENT>` :type group: str :type pat: int :type size: float :type thick: int :type dense: float :type col: int :type back_col: int .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Sets all the user-definable pattern parameters to a specified group. Parameters are: "PAT_NUMBER" 0 is solid fill "PAT_SIZE" pattern tile size in mm. "PAT_THICKNESS" pattern line thickness in percent of the tile size. valid range is 0-100. "PAT_DENSITY" Tile spacing. A value of 1 means tiles are laid with no overlap. A value of 2 means they overlap each other. "PAT_COLOR" The color value. "PAT_BACKCOLOR" Background color value. Input values may be DUMMY. Designed for use along with the sPatternForm_GUI function. """ gxapi_cy.WrapSYS._set_pattern(GXContext._get_tls_geo(), group.encode(), pat, size, thick, dense, col, back_col) @classmethod def set_double(cls, group, field, val): """ This method Sets a real in the parameter block. :param group: Group Name :param field: Parameter Name :param val: Real :type group: str :type field: str :type val: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._set_double(GXContext._get_tls_geo(), group.encode(), field.encode(), val) @classmethod def set_reg(cls, reg): """ Copy contents of a `GXREG <geosoft.gxapi.GXREG>` to current parameters. :param reg: `GXREG <geosoft.gxapi.GXREG>` object :type reg: GXREG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._set_reg(GXContext._get_tls_geo(), reg) @classmethod def set_string(cls, group, field, val): """ This method sets a string in the parameter block. :param group: Group Name :param field: Parameter Name :param val: String to Set it To :type group: str :type field: str :type val: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._set_string(GXContext._get_tls_geo(), group.encode(), field.encode(), val.encode()) # Progress Control @classmethod def check_stop(cls): """ This method is called at convenient points in the GX code to check if the user has asked the script to stop running. This method should be called by any GX program that may take a while to complete. :returns: 0 - No 1 - Yes, Terminate processing. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._check_stop(GXContext._get_tls_geo()) return ret_val @classmethod def prog_state(cls): """ Return current progress state (On/Off) :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is useful, for instance, when calling one GX from another, especially if it is called multiple times in a loop. The called GX may turn the progress ON/OFF on its own, which means any progress tracking in the calling GX is disrupted. The called GX should use this function to determine the original progress state, and not turn off progress if it was already on. Returns 0 - Progress is on - Progress is off """ ret_val = gxapi_cy.WrapSYS._prog_state(GXContext._get_tls_geo()) return ret_val @classmethod def prog_name(cls, name, reset): """ This method allows you to name the current process being displayed by the progress bar. This method has no affect if no progress bar exists. :param name: New Process Name :param reset: 0 - Change the Name but do not change the percentage 1 - Change the Name and Reset Percent to 0 2 - Change the Name but no Percent Bar :type name: str :type reset: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._prog_name(GXContext._get_tls_geo(), name.encode(), reset) @classmethod def progress(cls, on): """ This method allows you to turn on the Progress BAR ON/OFF. Once the progress bar is on, use the UpdateProg method to drive it. :param on: 0 - Turn Progress Bar OFF 1 - Turn Progress Bar ON :type on: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._progress(GXContext._get_tls_geo(), on) @classmethod def prog_update(cls, perc): """ This method drives the Progress Bar. It is passed a percentage and will update the bar to reflect that percentage. :param perc: Percentage Completed (0-100). :type perc: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._prog_update(GXContext._get_tls_geo(), perc) @classmethod def prog_update_l(cls, v1, v2): """ Updates progress bar based on count and maxcount. :param v1: Count :param v2: Max count >= count :type v1: int :type v2: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._prog_update_l(GXContext._get_tls_geo(), v1, v2) # Registry @classmethod def get_sys_info(cls, sys_info, info): """ Get system information :param sys_info: :ref:`SYS_INFO` :param info: Returned setting :type sys_info: int :type info: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ info.value = gxapi_cy.WrapSYS._get_sys_info(GXContext._get_tls_geo(), sys_info, info.value.encode()) @classmethod def registry_get_val(cls, domain, key, sub_key, value): """ Get a registry value :param domain: :ref:`REG_DOMAIN` :param key: Key to set :param sub_key: Value name within key :param value: String for value data :type domain: int :type key: str :type sub_key: str :type value: str_ref :returns: 0 if value exists 1 if value does not exist :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, value.value = gxapi_cy.WrapSYS._registry_get_val(GXContext._get_tls_geo(), domain, key.encode(), sub_key.encode(), value.value.encode()) return ret_val @classmethod def registry_delete_key(cls, domain, key): """ Delete a registry value :param domain: :ref:`REG_DOMAIN` :param key: Key to delete :type domain: int :type key: str :returns: 0 - Ok 1 - Error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** All sub-keys and values will be deleted if they exist. """ ret_val = gxapi_cy.WrapSYS._registry_delete_key(GXContext._get_tls_geo(), domain, key.encode()) return ret_val @classmethod def registry_delete_val(cls, domain, key, value_name): """ Delete a registry value :param domain: :ref:`REG_DOMAIN` :param key: Key :param value_name: Name of value to delete :type domain: int :type key: str :type value_name: str :returns: 0 - Ok 1 - Error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._registry_delete_val(GXContext._get_tls_geo(), domain, key.encode(), value_name.encode()) return ret_val @classmethod def registry_set_val(cls, domain, key, sub_key, value): """ Set/create a registry value :param domain: :ref:`REG_DOMAIN` :param key: Key to set :param sub_key: Name of Subkey within key :param value: Value for Subkey :type domain: int :type key: str :type sub_key: str :type value: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This function will create the subkey and key if either do not already exist. """ gxapi_cy.WrapSYS._registry_set_val(GXContext._get_tls_geo(), domain, key.encode(), sub_key.encode(), value.encode()) # Temporary File @classmethod def destroy_ptmp(cls, ptmp): """ Destroy PTMP. :param ptmp: PTMP object to destroy :type ptmp: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._destroy_ptmp(GXContext._get_tls_geo(), ptmp) @classmethod def get_ptmp(cls, ptmp): """ Get temporary saves copy of parameter block. :param ptmp: Saved with Save_PTMP_SYS :type ptmp: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `save_ptmp <geosoft.gxapi.GXSYS.save_ptmp>`, `destroy_ptmp <geosoft.gxapi.GXSYS.destroy_ptmp>` """ gxapi_cy.WrapSYS._get_ptmp(GXContext._get_tls_geo(), ptmp) @classmethod def save_ptmp(cls, groups): """ Save a temporary copy of the parameter block. :param groups: Group Name to save, "" for everything. :type groups: str :returns: PTMP handle. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** All PTMP instances will be destroyed on exit. .. seealso:: `get_ptmp <geosoft.gxapi.GXSYS.get_ptmp>`, `destroy_ptmp <geosoft.gxapi.GXSYS.destroy_ptmp>` """ ret_val = gxapi_cy.WrapSYS._save_ptmp(GXContext._get_tls_geo(), groups.encode()) return ret_val # Termination @classmethod def abort(cls, message): """ This method terminates the execution of a script. A message giving the reason for the abort will be displayed along with the line number where we stopped in the script. :param message: Message to display :type message: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._abort(GXContext._get_tls_geo(), message.encode()) @classmethod def assert_(cls, exp): """ Abort with GX line number if not true. :param exp: Expression to evaluate (0 aborts) :type exp: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._assert_(GXContext._get_tls_geo(), exp) @classmethod def exit_(cls): """ This method terminates the execution of a script in a regular fashion with no error messages displayed. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._exit_(GXContext._get_tls_geo()) @classmethod def cancel_(cls): """ This method indicates that the GX program terminated without doing anything of interest and should be ignored. In particular, the GX will not be logged in a recorded GS. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSYS._cancel_(GXContext._get_tls_geo()) # Timing @classmethod def delay(cls, secs): """ Idle delay method. :param secs: Decimal Seconds to delay :type secs: float :returns: Success if the delay has elapsed. :rtype: int .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._delay(GXContext._get_tls_geo(), secs) return ret_val @classmethod def get_timer(cls, flag, start_time, elapsed_time): """ Return the elapsed time since the established time. :param flag: 1 - set start time, 0 - return elapsed time :param start_time: Start time in seconds :param elapsed_time: Elapsed time in seconds :type flag: int :type start_time: float_ref :type elapsed_time: float_ref :returns: Success if the delay has elapsed. :rtype: int .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 1st time through call the method with a flag of 1 to identify the count start time, subsequent times the time will be the time elapsed since the queried start time. Do so by settign the flag to 0. """ ret_val, start_time.value, elapsed_time.value = gxapi_cy.WrapSYS._get_timer(GXContext._get_tls_geo(), flag, start_time.value, elapsed_time.value) return ret_val # User Interaction @classmethod def display_help(cls, group, topic): """ Display the help dialog with the specified topic highlighted :param group: Group string to lookup in gxhelp.ini :param topic: Index string to lookup in gxhelp.ini :type group: str :type topic: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapSYS._display_help(GXContext._get_tls_geo(), group.encode(), topic.encode()) @classmethod def display_help_topic(cls, file, topic): """ Display the help dialog without topic lookup in INI files :param file: Help File (blank for default) :param topic: Help Topic :type file: str :type topic: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapSYS._display_help_topic(GXContext._get_tls_geo(), file.encode(), topic.encode()) @classmethod def display_int(cls, title, n): """ Display an integer. :param title: Title of the Window :param n: Number :type title: str :type n: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapSYS._display_int(GXContext._get_tls_geo(), title.encode(), n) @classmethod def display_message(cls, title, message): """ Display a user message. :param title: Title of the Window :param message: Message String :type title: str :type message: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapSYS._display_message(GXContext._get_tls_geo(), title.encode(), message.encode()) @classmethod def display_double(cls, title, real): """ Display a real number. :param title: Title of the Window :param real: Number :type title: str :type real: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapSYS._display_double(GXContext._get_tls_geo(), title.encode(), real) @classmethod def display_question(cls, title, message): """ Display a YES/NO type question. This method waits for the user to hit YES or NO. :param title: Title of the window :param message: Message String :type title: str :type message: str :returns: 0 - user selected No 1 - user selected YES :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapSYS._display_question(GXContext._get_tls_geo(), title.encode(), message.encode()) return ret_val @classmethod def display_question_with_cancel(cls, title, message): """ Display a YES/NO/CANCEL type question. This method waits for the user to hit YES or NO or CANCEL. :param title: Title of the window :param message: Message String :type title: str :type message: str :returns: 0 - user selected No 1 - user selected YES 2 - user selected CANCEL :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapSYS._display_question_with_cancel(GXContext._get_tls_geo(), title.encode(), message.encode()) return ret_val @classmethod def display_task_dialog_ui(cls, title, main_instruction, content, common_buttons, custom_button_lst, icon, footer, footer_icon, verification_check_text, verification_checked, expanded_information, collapsed_control_text, expanded_control_text): """ Show a Windows TaskDialog UI (see https://msdn.microsoft.com/en-us/library/windows/desktop/bb760441(v=vs.85).aspx). :param title: Title :param main_instruction: Main instruction (empty string for none) :param content: Content (empty string for none) :param common_buttons: Common Buttons, one of :ref:`TD_BUTTON` :param custom_button_lst: Optional `GXLST <geosoft.gxapi.GXLST>` of custom buttons. Name in `GXLST <geosoft.gxapi.GXLST>` will be used for button text, while value should be an int to return. Pass (`GXLST <geosoft.gxapi.GXLST>`)0 to only use standard button flags. :param icon: Icon :ref:`TD_ICON` :param footer: Footer (empty string for none) :param footer_icon: Footer Icon :ref:`TD_ICON` :param verification_check_text: Verification checkbox text (empty string for none) :param verification_checked: Verification checkbox checked (in/out) :param expanded_information: Expanded information (empty string for none) :param collapsed_control_text: Collapsed control text for expanded information (empty string for default; 'More') :param expanded_control_text: Expanded control text for expanded information (empty string for default; 'Less') :type title: str :type main_instruction: str :type content: str :type common_buttons: int :type custom_button_lst: GXLST :type icon: int :type footer: str :type footer_icon: int :type verification_check_text: str :type verification_checked: int_ref :type expanded_information: str :type collapsed_control_text: str :type expanded_control_text: str :returns: Button ID. One of :ref:`TD_ID` or the int value from `GXLST <geosoft.gxapi.GXLST>` of custom buttons. :rtype: int .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, verification_checked.value = gxapi_cy.WrapSYS._display_task_dialog_ui(GXContext._get_tls_geo(), title.encode(), main_instruction.encode(), content.encode(), common_buttons, custom_button_lst, icon, footer.encode(), footer_icon, verification_check_text.encode(), verification_checked.value, expanded_information.encode(), collapsed_control_text.encode(), expanded_control_text.encode()) return ret_val @classmethod def interactive(cls): """ Checks to see if you should run interactively. :returns: 0 - Run in batch mode only 1 - Run Interactively only :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._interactive(GXContext._get_tls_geo()) return ret_val @classmethod def testing_system_mode(cls): """ Checks to see if the GX is running in the Geosoft testing system. :returns: 0 - Normal operation 1 - Running in the Geosoft testing system. :rtype: int .. versionadded:: 9.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._testing_system_mode(GXContext._get_tls_geo()) return ret_val @classmethod def prompt(cls, title, result): """ Asks the User to enter a string. :param title: Title of the window :param result: Buffer to place the user's string :type title: str :type result: str_ref :returns: 0 - User hit OK 1 - user hit CANCEL :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The User string is displayed as the default value in the prompt. Empty the user string if no default is needed. """ ret_val, result.value = gxapi_cy.WrapSYS._prompt(GXContext._get_tls_geo(), title.encode(), result.value.encode()) return ret_val @classmethod def script(cls): """ Checks to see if we are running inside OMS (script mode) :returns: 0 - Normal mode 1 - Scripting mode A number of functions can only be run from inside Oasis montaj (such as `GXEMAP.get_display_area_raw <geosoft.gxapi.GXEMAP.get_display_area_raw>`), because they require an actual window object, such as an editable database or map. Use this function to prevent calls :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._script(GXContext._get_tls_geo()) return ret_val @classmethod def script_record(cls): """ Checks to see if we are in scripting recording mode :returns: 0 - Normal mode 1 - Recording mode :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._script_record(GXContext._get_tls_geo()) return ret_val @classmethod def set_cursor(cls, cursor): """ Set the cursor on the display. :param cursor: Cursor Names :type cursor: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Possible Cursors: Normal, Horiz, Vert, Moving, Cross, Hand, NoEdit, Sun, View, Group, ViewSel, GroupSel, BoxSelect, Shadow, Link, Line, PolyLine, Polygon, Ellipse, Rectangle, Text, Symbol, Zoom, Pan, Rotate, InteractiveZoom, PolyFill, GetFill, SnapPoint, SnapLine, SnapOnPoint, SnapOnLine, NPolygon, ExcludeRect, ExcludePoly, ExcludeNPoly, AddVertex, DelVertex, GeneralAdd and GeneralDelete """ gxapi_cy.WrapSYS._set_cursor(GXContext._get_tls_geo(), cursor.encode()) @classmethod def set_info_line(cls, message): """ Display a message on the information line at the left bottom corner of the OAISIS montaj application. :param message: Message String :type message: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapSYS._set_info_line(GXContext._get_tls_geo(), message.encode()) @classmethod def set_interactive(cls, mode): """ Sets the interactive mode. :param mode: 0 - interactive off 1 - interative on :type mode: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Call to `interactive <geosoft.gxapi.GXSYS.interactive>` will return the value set here. .. seealso:: `interactive <geosoft.gxapi.GXSYS.interactive>`, `run_gx <geosoft.gxapi.GXSYS.run_gx>` and `run_gs <geosoft.gxapi.GXSYS.run_gs>` """ gxapi_cy.WrapSYS._set_interactive(GXContext._get_tls_geo(), mode) # Workspace @classmethod def get_workspace_reg(cls, reg): """ Get a copy of the workspace `GXREG <geosoft.gxapi.GXREG>`; :param reg: `GXREG <geosoft.gxapi.GXREG>` object :type reg: GXREG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The workspace `GXREG <geosoft.gxapi.GXREG>` is separate from the reg used to store `GXSYS <geosoft.gxapi.GXSYS>` parameters. Because `get_workspace_reg <geosoft.gxapi.GXSYS.get_workspace_reg>` returns a copy of the workspace `GXREG <geosoft.gxapi.GXREG>`, and not the workspace `GXREG <geosoft.gxapi.GXREG>` itself, you must call `set_workspace_reg <geosoft.gxapi.GXSYS.set_workspace_reg>` if you make changes to your own `GXREG <geosoft.gxapi.GXREG>` object and you wish them to take effect in the workspace `GXREG <geosoft.gxapi.GXREG>`. """ gxapi_cy.WrapSYS._get_workspace_reg(GXContext._get_tls_geo(), reg) @classmethod def set_workspace_reg(cls, reg): """ Set the workspace `GXREG <geosoft.gxapi.GXREG>`; :param reg: `GXREG <geosoft.gxapi.GXREG>` object :type reg: GXREG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The workspace `GXREG <geosoft.gxapi.GXREG>` is separate from the reg used to store `GXSYS <geosoft.gxapi.GXSYS>` parameters. Because `get_workspace_reg <geosoft.gxapi.GXSYS.get_workspace_reg>` returns a copy of the workspace `GXREG <geosoft.gxapi.GXREG>`, and not the workspace `GXREG <geosoft.gxapi.GXREG>` itself, you must call `set_workspace_reg <geosoft.gxapi.GXSYS.set_workspace_reg>` if you make changes to your own `GXREG <geosoft.gxapi.GXREG>` object and you wish them to take effect in the workspace `GXREG <geosoft.gxapi.GXREG>` """ gxapi_cy.WrapSYS._set_workspace_reg(GXContext._get_tls_geo(), reg) # String Encryption @classmethod def encrypt_string(cls, input, output, key): """ Encrypts a string for secure storage in configuration files or in the workspace parameters. :param input: Input string for encryption. :param output: Output buffer for encrypted result. :param key: :ref:`SYS_ENCRYPTION_KEY` :type input: str :type output: str_ref :type key: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ output.value = gxapi_cy.WrapSYS._encrypt_string(GXContext._get_tls_geo(), input.encode(), output.value.encode(), key) @classmethod def decrypt_string(cls, input, output, key): """ Decrypts a string that has been previously encrypted by `encrypt_string <geosoft.gxapi.GXSYS.encrypt_string>`. :param input: Input string for decryption. :param output: Output buffer for decrypted result. :param key: :ref:`SYS_ENCRYPTION_KEY` :type input: str :type output: str_ref :type key: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ output.value = gxapi_cy.WrapSYS._decrypt_string(GXContext._get_tls_geo(), input.encode(), output.value.encode(), key) @classmethod def is_encrypted_string(cls, input): """ Checks whether the specified string was encrypted by `encrypt_string <geosoft.gxapi.GXSYS.encrypt_string>`. :param input: Input string to inspect. :type input: str :returns: 0 (false) or non-zero (true) :rtype: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSYS._is_encrypted_string(GXContext._get_tls_geo(), input.encode()) return ret_val # GX Debugger @classmethod def disable_gx_debugger(cls): """ Disable GX Debugger `GXGUI <geosoft.gxapi.GXGUI>` if active .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** All breakpoints will be cleared by this call. """ gxapi_cy.WrapSYS._disable_gx_debugger(GXContext._get_tls_geo()) @classmethod def enable_gx_debugger(cls, src_dir, first_gx): """ Enable GX Debugger `GXGUI <geosoft.gxapi.GXGUI>` :param src_dir: Path that will be scanned recursively for GXC source files :param first_gx: Name of gx where first breakpoint should be set :type src_dir: str :type first_gx: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Takes as input two strings one a path that will be scanned recursively for GXC source files and a second string without a path of the GX where the first breakpoint should be set in (i.e. "gxname.gx"). The source of the GX should be found in the path (e.g. <path>\\somewhere\\gxname.gxc) and a breakpoint will be set on the first executing line of this GX. Make sure the GX binary is newer than the source file, otherwise unexpected results may occur. As soon as the GX is run the `GXGUI <geosoft.gxapi.GXGUI>` will become visible and it will be possible to set more breakpoints in any of the GXC files found in the path. """ gxapi_cy.WrapSYS._enable_gx_debugger(GXContext._get_tls_geo(), src_dir.encode(), first_gx.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXTPAT.rst .. _GXTPAT: GXTPAT class ================================== .. autoclass:: geosoft.gxapi.GXTPAT :members: .. _TPAT_STRING_SIZE: TPAT_STRING_SIZE constants ----------------------------------------------------------------------- Default string sizes. .. autodata:: geosoft.gxapi.TPAT_CODE_SIZE :annotation: .. autoattribute:: geosoft.gxapi.TPAT_CODE_SIZE .. autodata:: geosoft.gxapi.TPAT_LABEL_SIZE :annotation: .. autoattribute:: geosoft.gxapi.TPAT_LABEL_SIZE .. autodata:: geosoft.gxapi.TPAT_DESC_SIZE :annotation: .. autoattribute:: geosoft.gxapi.TPAT_DESC_SIZE .. autodata:: geosoft.gxapi.TPAT_SYMBFONT_SIZE :annotation: .. autoattribute:: geosoft.gxapi.TPAT_SYMBFONT_SIZE <file_sep>/docs/GXGEOSTRING.rst .. _GXGEOSTRING: GXGEOSTRING class ================================== .. autoclass:: geosoft.gxapi.GXGEOSTRING :members: .. _GEOSTRING_OPEN: GEOSTRING_OPEN constants ----------------------------------------------------------------------- Open Modes .. autodata:: geosoft.gxapi.GEOSTRING_OPEN_READ :annotation: .. autoattribute:: geosoft.gxapi.GEOSTRING_OPEN_READ .. autodata:: geosoft.gxapi.GEOSTRING_OPEN_READWRITE :annotation: .. autoattribute:: geosoft.gxapi.GEOSTRING_OPEN_READWRITE .. _SECTION_ORIENTATION: SECTION_ORIENTATION constants ----------------------------------------------------------------------- Section orientation types .. autodata:: geosoft.gxapi.SECTION_ORIENTATION_UNKNOWN :annotation: .. autoattribute:: geosoft.gxapi.SECTION_ORIENTATION_UNKNOWN .. autodata:: geosoft.gxapi.SECTION_ORIENTATION_PLAN :annotation: .. autoattribute:: geosoft.gxapi.SECTION_ORIENTATION_PLAN .. autodata:: geosoft.gxapi.SECTION_ORIENTATION_SECTION :annotation: .. autoattribute:: geosoft.gxapi.SECTION_ORIENTATION_SECTION .. autodata:: geosoft.gxapi.SECTION_ORIENTATION_CROOKED :annotation: .. autoattribute:: geosoft.gxapi.SECTION_ORIENTATION_CROOKED .. autodata:: geosoft.gxapi.SECTION_ORIENTATION_GMSYS :annotation: .. autoattribute:: geosoft.gxapi.SECTION_ORIENTATION_GMSYS <file_sep>/geosoft/gxpy/grid.py """ Geosoft grid and image handling, including all `supported file formats <https://geosoftgxdev.atlassian.net/wiki/display/GXDEV92/Grid+File+Name+Decorations>`_ . :Classes: :`Grid`: grid dataset :Constants: :FILE_READ: 0 open for read, files are not changed :FILE_READWRITE: 1 open for read and write, files can be changed :FILE_NEW: 2 new grid file, accompanied by `overwrite=` parameter .. seealso:: :mod:`geosoft.gxpy.grid_utility`, :mod:`geosoft.gxpy.grid_fft`, :class:`geosoft.gxapi.GXIMG`, :class:`geosoft.gxapi.GXIMU` .. note:: Regression tests provide usage examples: `Tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_grid.py>`_ """ import os import numpy as np import math import geosoft import geosoft.gxapi as gxapi from . import gx as gx from . import coordinate_system as gxcs from . import vv as gxvv from . import utility as gxu from . import agg as gxagg from . import geometry as gxgm from . import map as gxmap from . import grid_utility as gxgrdu from . import view as gxview from . import gdb as gxgdb __version__ = geosoft.__version__ FILE_READ = 0 FILE_READWRITE = 1 FILE_NEW = 2 def _t(s): return geosoft.gxpy.system.translate(s) class GridException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.grid`. .. versionadded:: 9.1 """ pass def reopen(g, dtype=None, mode=FILE_READWRITE): """ Reopen a grid to access the grid as an existing grid. Some gxapi.GXIMU methods will not work with grids open as new grids. This method closes the grid and reopens in the specific mode :param g: `Grid` instance :param dtype: data type, None to match the data type of the grid being reopened :param mode: `FILE_READWRITE` (default) or `FILE_READ` :return: new `Grid` instance .. versionadded:: 9.4 """ if dtype is None: dtype = g.dtype dfn = g.file_name_decorated delete_set = g.remove_on_close g.delete_files(False) g.close() g = Grid.open(dfn, dtype=dtype, mode=mode) if delete_set: g.delete_files() return g def name_parts(name): """ Return folder, undecorated file name + ext, file root, ext, decorations. If extension is not specified, ".grd" assumed For example: .. code:: >>> import geosoft.gxpy.grid as gxgrd >>> namep = gxgrd.name_parts("f:/someFolder/name.grd(GRD;TYPE=SHORT)") >>> print(namep) ('f:/someFolder/','name.grd','name','.grd','(GRD;TYPE=SHORT)') .. versionadded:: 9.1 """ path = os.path.abspath(name) fn = os.path.dirname(path) root, ext = os.path.splitext(os.path.basename(path)) if '(' in ext: ext, dec = ext.split('(') if ')' in dec: dec = dec.split(')')[0] else: dec = '' if not ext: if (not dec) or (dec[:3].upper() == 'GRD'): ext = '.grd' name = root + ext return fn, name, root, ext, dec def decorate_name(name, decorations=''): """ Properly decorate a grid name. :param name: file name :param decorations: file decorations, semicolon delimited :returns: decorated file name .. versionadded:: 9.1 """ if name is None: return None root, ext = os.path.splitext(name) dec = decorations.strip() if dec: d = decorations.lstrip('(') end = d.rfind(')') if end != -1: d = d[:end] ext = ext.split('(')[0] return '{}{}({})'.format(root, ext, d) else: if ext.lower() == '.grd': return '{}{}(GRD)'.format(root, ext) else: return name def delete_files(file_name): """ Delete all files associates with this grid name. :param file_name: name of the grid file .. versionadded:: 9.2 """ if file_name is not None: fn = name_parts(file_name) file_name = os.path.join(fn[0], fn[1]) ext = fn[3] gxu.delete_file(file_name) gxu.delete_file(file_name + '.gi') gxu.delete_file(file_name + '.xml') # remove shaded files associated with this grid file_s = os.path.join(fn[0], fn[1].replace('.', '_')) + '_s.grd' gxu.delete_file(file_s) gxu.delete_file(file_s + '.gi') gxu.delete_file(file_s + '.xml') # hgd files if ext == '.hgd': for i in range(16): gxu.delete_file(file_name + str(i)) def _transform_color_int_to_rgba(np_values): np_values[np_values == gxapi.iDUMMY] = 0 a = (np.right_shift(np_values, 24) & 0xFF).astype(np.uint8) b = (np.right_shift(np_values, 16) & 0xFF).astype(np.uint8) g = (np.right_shift(np_values, 8) & 0xFF).astype(np.uint8) r = (np_values & 0xFF).astype(np.uint8) # the values for color grids actually do not contain alphas but just # 0 or 1 to indicate if the color is valid or not a[a > 0] = 255 return np.array([r, g, b, a]).transpose() class Grid(gxgm.Geometry): """ Grid and image class. :Constructors: ========================= ============================================================== :meth:`open` open an existing grid/image :meth:`new` create a new grid/image :meth:`copy` create a copy :meth:`index_window` create a windowed grid based on grid indexes :meth:`from_data_array` create a new grid from a 2d data array :meth:`minimum_curvature` create by fitting a minimum-curvature surface to located data. ========================= ============================================================== A grid instance supports iteration that yields (x, y, z, grid_value) by points along rows. For example, the following prints the x, y, z, grid_value of every non-dummy point in a grid: .. code:: import geosoft.gxpy.grid as gxgrd with gxgrd.Grid.open('some.grd') ad g: for x, y, z, v in g: if v is not None: print(x, y, z, v) Specific grid cell values can be indexed (null grid values are None): .. code:: import geosoft.gxpy.grid as gxgrd with gxgrd.Grid.open('some.grd') as g: for ix in range(g.nx): for iy in range(g.ny): x, y, z, v = g[ix, iy] if v is not None: print(x, y, z, v) .. versionadded:: 9.1 .. versionchanged:: 9.2.1 added iterator support """ _delete_files = False _file_name = None def __enter__(self): return self def __exit__(self, _type, _value, _traceback): self.__del__() def __del__(self): if hasattr(self, '_close'): self._close() def _close(self, pop=True): def flush_hgd(hgd_temp): # convert tempory grid to an HGD file img = gxapi.GXIMG.create_file(gxapi.GS_TYPE_DEFAULT, decorate_name(hgd_temp, 'GRD'), gxapi.IMG_FILE_READONLY) gxapi.GXHGD.h_create_img(img, decorate_name(self._hgd_name, 'HGD')) img = None if hasattr(self, '_open'): if self._open: self._img = None grid_file_name = self._file_name file_name_decorated = decorate_name(self._file_name, self._decoration) if self._decoration else None if self._hgd: flush_hgd(self._file_name) if self._metadata_changed: with open(self._file_name + '.xml', 'w+') as f: f.write(gxu.xml_from_dict(self._metadata)) if file_name_decorated: gxapi.GXIMG.sync(file_name_decorated) delete_files(grid_file_name) else: if self._delete_files: delete_files(self._file_name) elif self._mode != FILE_READ: if file_name_decorated: try: gxapi.GXIMG.sync(file_name_decorated) except (geosoft.GXRuntimeError, geosoft.gxapi.GXAPIError): # Locked files, extremely large files (e.g. GXF) etc. could cause errors with the # command above. TODO: Do we even need it? The code below overwrites it anyway? pass if grid_file_name: if self._metadata and self._metadata_changed: with open(grid_file_name + '.xml', 'w+') as f: f.write(gxu.xml_from_dict(self._metadata)) if pop: gx.pop_resource(self._open) self._open = None self._buffer_np = None self._buffer_x = None self._buffer_y = None self._cs = None self._gxpg = None def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): if self._file_name is None: return '<class Grid>: memory ({}, {})'.format(self.nx, self.ny) else: return '<class Grid>: {} ({}, {})'.format(self.file_name_decorated, self.nx, self.ny) def __init__(self, file_name=None, in_memory=False, dtype=None, mode=None, kx=1, dim=None, overwrite=False, **kwargs): self._delete_files = False self._readonly = False self._decoration = '' if 'name' not in kwargs: if file_name: kwargs['name'] = os.path.splitext(file_name)[0] else: kwargs['name'] = '_grid_' super().__init__(**kwargs) self._hgd = False self._hgd_name = None self._metadata = None self._metadata_changed = False self._metadata_root = '' self._img = None self._buffered_row = None self._buffer_np = None self._buffered_xy = None self._buffer_x = None self._buffer_y = None self._cs = None self._gxpg = None # build a file name if in_memory: self._file_name = None else: if (file_name is None) or (len(file_name.strip()) == 0): file_name = gx.gx().temp_file('.grd(GRD)') path, file_name, root, ext, self._decoration = name_parts(file_name) self._file_name = os.path.join(path, file_name) # for an HGD file work with a temporary grid, save to HGD on closing if mode == FILE_NEW and ext.lower() == '.hgd': self._hgd = True self._hgd_name = self._file_name file_name = gx.gx().temp_file('.grd(GRD)') path, file_name, root, ext, self._decoration = name_parts(file_name) self._file_name = os.path.join(path, file_name) if mode == FILE_NEW: if dtype is None: dtype = np.float64 gxtype = gxu.gx_dtype(dtype) if in_memory: self._img = gxapi.GXIMG.create(gxtype, kx, dim[0], dim[1]) # Need to set the kx otherwise it will be 0 and some routines (e.g. IMU stats calc) could cause aborts self._img.opt_kx(kx) else: if not overwrite: if os.path.isfile(self._file_name): raise GridException(_t('Cannot overwrite existing grid {}'.format(self.file_name))) self._img = gxapi.GXIMG.create_new_file(gxtype, kx, dim[0], dim[1], decorate_name(self._file_name, self._decoration)) else: # open an existing grid if mode == FILE_READ: open_mode = gxapi.IMG_FILE_READONLY self._readonly = True else: mode = FILE_READWRITE open_mode = gxapi.IMG_FILE_READORWRITE self._readonly = False # always open in default type unless float or double specifically requested gxtype = gxapi.GS_TYPE_DEFAULT if dtype is not None: gxtype_from_dtype = gxu.gx_dtype(dtype) if gxtype_from_dtype in (gxapi.GS_FLOAT, gxapi.GS_DOUBLE): gxtype = gxtype_from_dtype self._img = gxapi.GXIMG.create_file(gxtype, self.file_name_decorated, open_mode) if dtype is None: dtype = gxu.dtype_gx(self._img.e_type()) self._mode = mode self._next = 0 self._next_row = 0 self._next_col = 0 self._gxtype = self._img.e_type() self._dtype = dtype self._dummy = gxu.gx_dummy(self._dtype) self._is_int = gxu.is_int(gxu.gx_dtype(self.dtype)) self._cos_rot = self._sin_rot = None self.rot = self.rot # this sets _cos_rot and _sin_rot self._open = gx.track_resource(self.__class__.__name__, self._file_name) @classmethod def open(cls, file_name, dtype=None, mode=FILE_READ, coordinate_system=None, cell_size=None, expand=None): """ Open an existing grid file. :param file_name: name of the grid file, with decorations. See `supported file formats <https://geosoftgxdev.atlassian.net/wiki/display/GXDEV92/Grid+File+Name+Decorations>`_) :param dtype: numpy data type, which will be the grid data type. :param mode: open mode: ================= ================================================ FILE_READ only read the file, properties cannot be changed FILE_READWRITE grid stays the same, but properties may change ================= ================================================ :param coordinate_system: desired coordinate system. The grid will be reprojected if necessary. :param cell_size: desired cell size, defaults to the current cell size. :param expand: if reprojecting or resampling the are can be expanded by this percentage to allow for curved edges in the new coordinate system space. The default expands by 1%. Set to 0 to prevent expansion. If reprojecting or setting the cell size different from the original grid, the mode must be FILE_READ. If reprojecting without setting the cell size a default cell size will be calculated in the new coordinate system that is nominally equivalent to the current cell size. .. versionadded:: 9.1 .. versionchanged:: 9.4 added reprojection support """ grd = cls(file_name, dtype=dtype, mode=mode) # determine if we need to reproject or resample repro = False if coordinate_system: if not isinstance(coordinate_system, gxcs.Coordinate_system): coordinate_system = gxcs.Coordinate_system(coordinate_system) repro = coordinate_system != grd.coordinate_system if (not repro and cell_size is not None) and ((cell_size != grd.dx) or (cell_size != grd.dy)): repro = True if repro: if mode != FILE_READ: raise GridException(_t('Mode must be FILE_READ to reproject or resample a grid.')) if cell_size is None: cell_size = gxapi.rDUMMY if expand is None: expand = gxapi.rDUMMY if not coordinate_system: coordinate_system = grd.coordinate_system grd.gximg.create_projected3(coordinate_system.gxipj, cell_size, expand) grd._cs = None grd._cos_rot = 1.0 grd._sin_rot = 0.0 return grd @classmethod def new(cls, file_name=None, properties=None, overwrite=False, in_memory=False): """ Create a new grid file. :param file_name: name of the grid file, None for a temporary grid. See `supported file formats <https://geosoftgxdev.atlassian.net/wiki/display/GXDEV92/Grid+File+Name+Decorations>`_) :param in_memory: Creates an in-memory grid (file_name will be ignored) :param properties: dictionary of grid properties, see :meth:`properties` :param overwrite: True to overwrite existing file .. versionadded:: 9.1 """ if properties is None: raise GridException(_t("Missing properties dictionary.")) # set basic grid properties dtype = properties.get('dtype', None) nx = properties.get('nx', 0) ny = properties.get('ny', 0) if (nx <= 0) or (ny <= 0): raise GridException(_t('Grid dimension ({},{}) must be > 0').format(nx, ny)) grd = cls(file_name, in_memory=in_memory, dtype=dtype, mode=FILE_NEW, dim=(nx, ny), overwrite=overwrite) grd.set_properties(properties) return grd @classmethod def minimum_curvature(cls, data, unit_of_measure=None, file_name=None, overwrite=False, max_segments=1000, coordinate_system=None, cs='', area=('', '', '', ''), bclip='', logopt='', logmin='', idsf='', bkd='', srd='', iwt='', edgclp='', tol='', pastol='100', itrmax='', ti='', icgr=''): """ Create a minimum-curvature surface grid from (x, y, value) located data. Reference: <NAME>, 1990, Gridding with continuous curvature splines in tension. :param data: list of [(x, y, value), ...] or a callback that returns lists, or a tuple (gdb, value_channel, x_channel, y_channel) where x_channel and y_channel, if not specified, default to the current database (x, y) channels. See below. :param unit_of_measure: string unit of measurement descriptor. :param file_name: name of the grid file, None for a temporary grid. See `supported file formats <https://geosoftgxdev.atlassian.net/wiki/display/GXDEV92/Grid+File+Name+Decorations>`_) :param overwrite: True to overwrite existing file :param max_segments: Maximum number of line segments if using a callback, defaults to 1000. :param coordinate_system: coordinate system Gridding parameters follow the nomenclature of the rangrid.con file: https://github.com/GeosoftInc/gxc/blob/master/reference/con_files/rangrid.con :param cs: The grid cell size in reference system units. :param area: (xmin, ymin, xmax, ymax) - grid area, default is the data limits :param bclip: 0 to use all data (default), 1 to only use data in the dat area. :param logopt: 1 for log(value) minimum cliped to log(logmin); 2 for `logmin + log(value/logmin)` for postive `value`, `-logmin - log(-value/logmin` for negative `value` :param logmin: see `logopt`, default is 1. :param idsf: low-pass desampling factor in cells, default is 1. Effectively a low-pass filter that can smooth noisy data that has clustered locations. :param bkd: Blanking distance. All grid cells farther than the blanking distance from a valid point will be blanked in the output grid. The default is the nominal sample interval, i.e. sqrt(area/#data). This parameter should normally be set to just greater than the maximum sampling interval through which interpolation is desired. If there are too many holes in the resulting grid, increase appropriately. :param srd: The maximum search radius to use for establishing the starting values for the coarse grid. The default is four times the coarse grid size defined by `icgr`. If no data is found within the maximum search radius, the mean of the data is used as the starting value. If the search radius is too small, the starting grid can be a poor approximation of the desired grid, resulting in excessive processing time. If too large, too much time will be consumed establishing the original coarse grid. :param iwt: The weighting power to use to establish the coarse starting grid. The default is 2, for inverse distance squared. There is little reason to change this from the default. :param edgclp: Edge clipping parameter, the number of grid cells to extend beyond the outside limits of the data. The default (-1) is not to apply edge clipping to the blanking distanced grid. Use this parameter to ensure the grid does not extend too far beyond the actual data limits, which can occur when using a large blanking distance with widely spaced data. :param tol: The tolerance required for each grid cell. The default is 0.1 percent of the range of the data. Decrease for a more accurate grid. :param pastol: The percentage of points that must meet the tolerance. The iteration process will stop when the percentage of points change by higher than this required percentage in iteration. The default is 100.0 percent. Decrease for rough data to reduce minimum curvature overshoot, and increase for a to make the grid surface more accurately match the data. Overshoot can also be controlled by increasing tension (ti). :param itrmax: Maximum number of iterations to use in solving the minimum curvature function. The default is 200 iterations. Increase for a more accurate grid. A value of 1000 is typically sufficient for maximum accuracy. :param ti: The degree of internal tension ( between 0 and 1 ). The default is no tension (0.0) which produces a true minimum curvature surface. Increasing tension can prevent overshooting of valid data in sparse areas at the expense of increased local curvature near data points. :param icgr: The course grid size relative to the final grid size. Allowable factors are 16,8,4,2 or 1. The default is 8. The optimum is a factor close to half the nominal data spacing, although in most situations the default is fine. This parameter effects the length of time it takes to find a solution. **The** `data` **parameter:** The data can be provided to the gridding algorithm either as a list array, a callback function that returns list array segments, or a `geosoft.gxpy.gdb.Geosoft_database` instance. In the case of a list or a callback, a temporary database is constructed internally. A callback is passed a sequence number, 0, 1, 2, ... and is expected to return a list array with each call or None when there is no more data. See the example below. When a callback is used, the `max_segments` parameter sets the maximum number of lines for the temporary database as each return from the callback will create a new line in the internal temporary database. If a database instance is passed it must be the first item in a tuple of 2 or 4 items: (gdb_instance, value_channel) or (gdb_instance, value_channel, x_channel, y_channel). In the first case the default spatial (x, y) channels in the database are assumed. Examples: .. code:: import numpy as np import geosoft.gxpy.grid as gxgrd # simple data array xyv = [(45., 10., 100), (60., 25., 77.), (50., 8., 80.)] grid = gxgrd.Grid.minimum_curvature(xyv) # or a numpy array grid = gxgrd.Grid.minimum_curvature(np.array(xyv)) # a database, grid to a cell size of 100 import geosoft.gxpy.gdb as gxgdb gdb = gxgdb.Geosoft_database.open('some_mag_data.gdb') grid = gxgrd.Grid.minimum_curvature((gdb, 'tmi'), cs=100) # a callback, used for very large data, or to feed data efficiently from some other source. nxyv = np.array([[(45., 10., 100), (60., 25., 77.), (50., 8., 81.), (55., 11., 66.)], [(20., 15., 108), (25., 5., 77.), (33., 9., np.nan), (28., 2., 22.)], [(35., 18., 110), (40., 31., 77.), (13., 4., 83.), (44., 4., 7.)]]) def feed_data(n): if n >= len(nxyv): return None return nxyv[n] grid = gxgrd.Grid.minimum_curvature(feed_data, cs=1.) .. versionadded:: 9.4 """ def gdb_from_callback(callback): _gdb = gxgdb.Geosoft_gdb.new(max_lines=max_segments) channels = ('x', 'y', 'v') il = 0 xyz_list = callback(il) while xyz_list is not None: _gdb.write_line('L{}'.format(il), xyz_list, channels=channels) il += 1 xyz_list = callback(il) _gdb.xyz_channels = channels[:2] return _gdb def gdb_from_data(_d): def _data(i): if i == 0: return _d else: return None return gdb_from_callback(_data) # create a database from the data xc, yc = ('x', 'y') discard = False if callable(data): gdb = gdb_from_callback(data) vc = 'v' discard = True elif isinstance(data, tuple): gdb = data[0] vc = data[1] if len(data) == 4: xc = data[2] yc = data[3] else: xc, yc, _ = gdb.xyz_channels discard = True else: gdb = gdb_from_data(data) vc = 'v' if tol and float(tol) <= 0.: tol = 1.0e-25 # parameter control file con_file = gx.gx().temp_file('con') with open(con_file, 'x') as f: f.write('{} / cs\n'.format(cs)) f.write('{},{},{},{},{} / xmin, ymin, xmax, ymax, bclip\n'. format(area[0], area[1], area[2], area[3], bclip)) f.write(',,,{},{} / ,,, logopt, logmin\n'.format(logopt, logmin)) f.write('{},{},{},{},{} / idsf, bkd, srd, iwt, edgeclp\n'.format(idsf, bkd, srd, iwt, edgclp)) f.write('{},{},{},{},{} / tol, pastol, itrmax, ti, icgr\n'.format(tol, pastol, itrmax, ti, icgr)) if file_name is None: file_name = gx.gx().temp_file('grd(GRD)') elif os.path.exists(file_name): if overwrite: gxu.delete_files_by_root(file_name) else: raise GridException(_t('Cannot overwrite existing file: {}').format(file_name)) gxapi.GXRGRD.run2(gdb.gxdb, xc, yc, vc, con_file, file_name) grd = cls.open(file_name, mode=FILE_READWRITE) if coordinate_system is None: coordinate_system = gdb.coordinate_system grd.coordinate_system = coordinate_system if unit_of_measure is None: unit_of_measure = gxgdb.Channel(gdb, vc).unit_of_measure grd.unit_of_measure = unit_of_measure log_file = 'rangrid.log' if os.path.exists(log_file): gxu.delete_file(log_file) if discard: gdb.close(discard=True) return grd def __iter__(self): return self def __next__(self): if self._next >= self.nx * self.ny: self._next = 0 raise StopIteration else: v = self.__getitem__(self._next) self._next += 1 return v def __getitem__(self, item): if isinstance(item, int): ix = item % self.nx iy = item // self.nx else: ix, iy = item x, y, z = self.xyz((ix, iy)) if self._buffered_row != iy: self._buffered_row = iy self._buffer_np = self.read_row(self._buffered_row).np v = self._buffer_np[ix] if self._is_int: v = int(v) if v == gxapi.iDUMMY: v = None elif np.isnan(v): v = None else: v = float(v) return x, y, z, v def gxpg(self, copy=False): """ Get a copy of the `geosoft.gxapi.GXPG` instance for the grid. :param copy: `True` to return a copy of the grids pager. The default is `False`, which returns the shared grid pager, such that changes to the pager change the grid and the pager is invalid when thr grid is closed or loses context. .. versionadded:: 9.1 .. versionchanged:: 9.4 added `copy` parameter """ if self._gxpg is None: self._gxpg = self._img.geth_pg() if copy: pg = gxapi.GXPG.create(self._gxpg.n_rows(), self._gxpg.n_cols(), self._gxpg.e_type()) pg.copy(self._gxpg) return pg return self._gxpg def get_value(self, x, y): """ Return a grid value at a point as a float. For scalar data the point value will be interpolated between neighbors. For color data the nearest value is returned as a color int. :param x: X location on the grid plane :param y: Y location on the grid plane :returns: grid value, or None if outside of grid area """ return gxu.dummy_none(self.gximg.get_z(x, y)) @classmethod def copy(cls, grd, file_name=None, dtype=None, overwrite=False, in_memory=False, mode=FILE_READWRITE): """ Create a new Grid instance as a copy of an existing grid. :param grd: :class:`Grid` instance to save as a new grid, or a grid file name :param file_name: name of the new grid (file with optional decorations). If not specified a temporary file is created. :param dtype: numpy data type, None to use type of the parent grid :param overwrite: True to overwrite if the file exists, False to not overwrite. :param in_memory: True to create a grin in memory. :param mode: `open` mode for working with the copy. .. versionadded:: 9.2 """ if not isinstance(grd, Grid): grd = cls.open(grd, mode=FILE_READ) close_grid = True else: close_grid = False p = grd.properties() if dtype is not None: p['dtype'] = dtype if not in_memory and file_name is not None: path0, base_file0, root0, ext0, dec0 = name_parts(grd.file_name_decorated) path1, base_file1, root1, ext1, dec1 = name_parts(file_name) if not ext1: ext1 = ext0 if (ext1 == ext0) and not dec1: dec1 = dec0 file_name = decorate_name(os.path.join(path1, root1) + ext1, dec1) copy = cls.new(file_name, p, overwrite=overwrite, in_memory=in_memory) if file_name is None: file_name = copy.file_name_decorated grd.gximg.copy(copy.gximg) if close_grid: grd.close() if in_memory: return copy copy.close() return cls.open(file_name, dtype=dtype, mode=mode) @classmethod def index_window(cls, grd, name=None, x0=0, y0=0, nx=None, ny=None, overwrite=False): """ Create a windowed instance of a grid. :param grd: :class:`Grid` instance :param name: name for the windowed_grid, default is constructed from input grid :param x0: integer index of the first X point :param y0: integer index of the first Y point :param nx: number of points in x :param ny: number of points in y :param overwrite: True to overwrite existing file, default is False .. versionadded:: 9.2 """ if not isinstance(grd, Grid): grd = Grid.open(grd) gnx = grd.nx gny = grd.ny if nx is None: nx = gnx - x0 if ny is None: ny = gny - y0 mx = x0 + nx my = y0 + ny if ((x0 >= gnx) or (y0 >= gny) or (x0 < 0) or (y0 < 0) or (nx <= 0) or (ny <= 0) or (mx > gnx) or (my > gny)): raise GridException(_t('Window x0,y0,mx,my({},{},{},{}) out of bounds ({},{})'). format(x0, y0, mx, my, gnx, gny)) if name is None: path, file_name, root, ext, dec = name_parts(grd.file_name_decorated) name = '{}_({},{})({},{}){}'.format(root, x0, y0, nx, ny, ext) name = decorate_name(name, dec) overwrite = True # create new grid p = grd.properties() p['nx'] = nx p['ny'] = ny if grd.rot == 0.0: p['x0'] = grd.x0 + grd.dx * x0 p['y0'] = grd.y0 + grd.dy * y0 else: dx = grd.dx * x0 dy = grd.dy * y0 cos, sin = grd.rotation_cos_sine p['x0'] = grd.x0 - dx * cos - dy * sin p['y0'] = grd.y0 - dy * cos + dx * sin window_grid = cls.new(name, p, overwrite=overwrite) source_pager = grd.gxpg(copy=False) window_pager = window_grid.gxpg(copy=False) window_pager.copy_subset(source_pager, 0, 0, y0, x0, ny, nx) return window_grid @classmethod def from_data_array(cls, data, file_name=None, properties=None, overwrite=False): """ Create grid from a 2D data array or `geosoft.gxapi.GXPG`. :param data: 2D numpy data array, a 2d list, ir a `geosoft.gxapi.GXPG`. :param file_name: name of the file, default creates a temporary file name :param properties: grid properties as a dictionary :param overwrite: `True` to overwrite existing grid. :returns: :class:`Grid` instance .. versionadded:: 9.1 .. versionchanged:: 9.4 - support for default temporary file name and creation from a GXPG. """ if isinstance(data, gxapi.GXPG): if data.n_slices() != 1: raise GridException(_t('Pager must be 2D')) nx = data.n_cols() ny = data.n_rows() dtype = gxu.dtype_gx(data.e_type()) else: if not isinstance(data, np.ndarray): data = np.array(data) ny, nx = data.shape dtype = data.dtype if properties is None: properties = {} properties['nx'] = nx properties['ny'] = ny properties['dtype'] = dtype if (file_name is None) or (len(file_name.strip()) == 0): file_name = gx.gx().temp_file('.grd(GRD)') grd = cls.new(file_name, properties=properties, overwrite=overwrite) grd.write_rows(data) return reopen(grd) @property def is_crooked_path(self): """True if this grid follows a crooked path section.""" return self.coordinate_system.gxipj.get_orientation() == gxapi.IPJ_ORIENT_SECTION_CROOKED def crooked_path(self): """ Return the `CrookedPath` instance for a crooked-path grid. .. versionadded::9.4 """ if not self.is_crooked_path: raise GridException(_t("This is not a crooked-path section grid.")) return gxview.CrookedPath(self.coordinate_system) @property def rotation_cos_sine(self): """ Returns grid rotation (cosine, sine). .. versionadded:: 9.3.1 """ return self._cos_rot, self._sin_rot def delete_files(self, delete=True): """ Delete the files associated with this grid when deleting the grid object. Note that files are not deleted until all references to this object are deleted and garbage collection is performed. :param delete: set to False to reverse a previous delete request .. versionadded:: 9.1 """ self._delete_files = delete @property def remove_on_close(self): """Remove files on close setting, can be set.""" return self._delete_files @remove_on_close.setter def remove_on_close(self, tf): self._delete_files = bool(tf) def close(self, discard=False): """ Close the grid and release all instance resources. :param discard: `True` to discard associated files on close .. versionchanged:: 9.4 added `discard` parameter """ if discard: self.delete_files() self._close() @property def dummy_value(self): """ Return the grid data dummy value.""" return self._dummy @property def gximg(self): """ The `geosoft.gxapi.GXIMG` instance handle.""" return self._img def _init_metadata(self): if not self._metadata: self._metadata = gxu.geosoft_metadata(self._file_name) self._metadata_root = tuple(self._metadata.items())[0][0] @property def metadata(self): """ Return the grid metadata as a dictionary. Can be set, in which case the dictionary items passed will be added to, or replace existing metadata. .. seealso:: `Geosoft metadata schema <https://geosoftgxdev.atlassian.net/wiki/display/GXDEV92/Geosoft+Metadata+Schema>`_ .. versionadded:: 9.2 """ self._init_metadata() return self._metadata[self._metadata_root] @metadata.setter def metadata(self, meta): self._init_metadata() self._metadata[self._metadata_root] = gxu.merge_dict(self._metadata[self._metadata_root], meta) self._metadata_changed = True @property def unit_of_measure(self): """ Units of measurement (a string) for the grid data, can be set. .. versionadded:: 9.2 """ try: uom = self.metadata['geosoft']['dataset']['geo:unitofmeasurement']['#text'] except (KeyError, TypeError): uom = '' return uom @unit_of_measure.setter def unit_of_measure(self, uom): self.metadata = {'geosoft': {'@xmlns': 'http://www.geosoft.com/schema/geo', 'dataset': {'geo:unitofmeasurement': {'@xmlns:geo': 'http://www.geosoft.com/schema/geo', '#text': str(uom)}}}} @property def dtype(self): """ numpy data type for the grid .. versionadded:: 9.2 """ return self._dtype @property def gxtype(self): """ Geosoft data type for the grid .. versionadded:: 9.2 """ return self._gxtype @property def is_int(self): """ returns True if base grid type is integer, which includes color integers""" return self._is_int @property def nx(self): """ grid x dimension (number of columns) .. versionadded:: 9.2 """ return self._img.nx() @property def ny(self): """ grid y dimension (number of rows) .. versionadded:: 9.2 """ return self._img.ny() @property def x0(self): """ grid origin x location in the plane coordinate system .. versionadded:: 9.2 """ return self._img.query_double(gxapi.IMG_QUERY_rXO) @property def y0(self): """ grid origin y location in the plane coordinate system .. versionadded:: 9.2 """ return self._img.query_double(gxapi.IMG_QUERY_rYO) @property def dx(self): """ separation between grid points in the grid x direction .. versionadded:: 9.2 """ return self._img.query_double(gxapi.IMG_QUERY_rDX) @property def dy(self): """ separation between grid points in the grid y direction .. versionadded:: 9.2 """ return self._img.query_double(gxapi.IMG_QUERY_rDY) @property def rot(self): """ grid rotation angle, degrees azimuth Note that grid rotations in the gxapi GXIMG are degrees clockwise, which is the opposite of degree azimuth, used here. All horizontal plane angles in the Python gxpy module are degrees azimuth for consistency. .. versionadded:: 9.2 """ return -self._img.query_double(gxapi.IMG_QUERY_rROT) @property def is_color(self): """ returns True if grid contains colors. is_int will also be True""" return bool(self._img.is_colour()) @property def file_name(self): """ grid file name without decorations .. versionadded:: 9.2 """ if self._hgd: return self._hgd_name return self._file_name @property def file_name_decorated(self): """ grid file name with decorations .. versionadded:: 9.2 """ if self._hgd: decor = 'HGD' else: decor = self._decoration return decorate_name(self.file_name, decor) @property def name(self): """ Grid name, usually the file name without path or extension. .. versionadded:: 9.2 """ if self._file_name is None: return 'None' basename = os.path.basename(self.file_name) return os.path.splitext(basename)[0] @property def gridtype(self): """ grid type (e.g. 'GRD', 'HGD' etc. 'MEMORY' for in-memory grid) .. versionadded:: 9.2 """ if self._file_name is None: return 'MEMORY' _, _, _, ext, dec = name_parts(self._file_name) if len(dec) > 0: return dec.split(';')[0] else: return ext[1:].upper() @property def decoration(self): """ grid descriptive decoration .. versionadded:: 9.2 """ return self._decoration @property def coordinate_system(self): """ grid coordinate system as a :class:`geosoft.gxpy.coordinate_system.Coordinate_system` instance. Can be set from any :class:`geosoft.gxpy.coordinate_system.Coordinate_system` constructor. .. versionadded:: 9.2 .. versionchanged:: 9.3 added ability to set directly """ if self._cs is None: ipj = gxapi.GXIPJ.create() self._img.get_ipj(ipj) self._cs = gxcs.Coordinate_system(ipj) return self._cs @coordinate_system.setter def coordinate_system(self, cs): self._cs = gxcs.Coordinate_system(cs) self._img.set_ipj(self._cs.gxipj) def properties(self): """ Get the grid properties dictionary :returns: dictionary of all grid properties .. versionadded:: 9.1 .. versionchanged:: 9.4 added 'unit_of_measure' """ properties = {'nx': self.nx, 'ny': self.ny, 'x0': self.x0, 'y0': self.y0, 'dx': self.dx, 'dy': self.dy, 'rot': self.rot, 'is_color': self.is_color, 'dtype': self.dtype, 'gridtype': self.gridtype, 'decoration': self._decoration, 'unit_of_measure': self.unit_of_measure, 'coordinate_system': self.coordinate_system} return properties def statistics(self, gxst=None): """ Calculate and return current grid data statistics as a dictionary. :param gxst: gxapi.GXST instance, to which stats will be accumulated, or None. :returns: dictionary of grid data statistics: =============== ============================ min minimum max maximum mean mean geometric_mean geometric mean variance variance sd standard deviation skew skew kurtosis kurtosis sum sum of all data sum_power_2 sum of data**2 sum_power_3 sum of data**3 sum_power_4 sum of data**4 num_data number of valid data values num_dummy number of dummy values =============== ============================ .. versionadded:: 9.4 """ def get_st(what): v = gxst.get_info(what) if v == gxapi.rDUMMY: return None return v if gxst is None: gxst = gxapi.GXST.create() vv = gxvv.GXvv() for iv in range(self.gximg.nv()): self.gximg.read_v(iv, 0, 0, vv.gxvv) gxst.data_vv(vv.gxvv) st = {'min': get_st(gxapi.ST_MIN), 'max': get_st(gxapi.ST_MAX), 'mean': get_st(gxapi.ST_MEAN), 'geometric_mean': get_st(gxapi.ST_GEOMEAN), 'variance': get_st(gxapi.ST_VARIANCE), 'sd': get_st(gxapi.ST_STDDEV), 'skew': get_st(gxapi.ST_SKEW), 'kurtosis': get_st(gxapi.ST_KURTOSIS), 'sum': get_st(gxapi.ST_SUM), 'sum_power_2': get_st(gxapi.ST_SUM2), 'sum_power_3': get_st(gxapi.ST_SUM3), 'sum_power_4': get_st(gxapi.ST_SUM4), 'num_data': get_st(gxapi.ST_ITEMS), 'num_dummy': get_st(gxapi.ST_DUMMIES) } return st @x0.setter def x0(self, v): self._img.set_info(self.dx, self.dy, v, self.y0, -self.rot) @y0.setter def y0(self, v): self._img.set_info(self.dx, self.dy, self.x0, v, -self.rot) @dx.setter def dx(self, v): self._img.set_info(v, self.dy, self.x0, self.y0, -self.rot) @dy.setter def dy(self, v): self._img.set_info(self.dx, v, self.x0, self.y0, -self.rot) @rot.setter def rot(self, v): self._img.set_info(self.dx, self.dy, self.x0, self.y0, -v) self._cos_rot = math.cos(math.radians(v)) self._sin_rot = math.sin(math.radians(v)) def set_properties(self, properties): """ Set grid properties from a properties dict. Settable property keys are: ==================== ============================================ 'x0' grid X origin location (default 0.0) 'y0' grid Y origin location (0.0) 'dx' grid X point separation (1.0) 'dy' grid Y point separation (1.0) 'rot' grid rotation angle in degrees azimuth (0.0) 'unit_of_measure' unit of measure for the grid data 'coordinate_system' coordinate system (unchanged) ==================== ============================================ Not all keys need be passed, though typically one will get the properties from the grid and modify those that need to change and pass the properties back. :param properties: properties dictionary .. versionadded:: 9.1 """ if self._readonly: raise GridException(_t('{} opened as read-only, cannot set properties.').format(self.file_name_decorated)) dx = properties.get('dx', 1.0) dy = properties.get('dy', dx) self._img.set_info(dx, dy, properties.get('x0', 0.0), properties.get('y0', 0.0), -properties.get('rot', 0.0)) self.rot = self.rot # calculates cos and sin uom = properties.get('unit_of_measure', None) if uom is not None: self.unit_of_measure = uom cs = properties.get('coordinate_system', None) if cs is not None: if not isinstance(cs, gxcs.Coordinate_system): cs = gxcs.Coordinate_system(cs) self._img.set_ipj(cs.gxipj) def write_rows(self, data, ix0=0, iy0=0, order=1): """ Write data to a grid by rows. :param data: array of data to write, numpy, list or `geosoft.gxapi.GXPG` :param ix0: grid X index of first point :param iy0: grid Y index of first point, top index if writing rows top to bottom :param order: 1: bottom to top; -1: top to bottom .. versionadded:: 9.1 .. versionchanged:: 9.4 accepts list or GXPG """ if isinstance(data, gxapi.GXPG): nx = data.n_cols() ny = data.n_rows() else: if not isinstance(data, np.ndarray): data = np.array(data) ny, nx = data.shape if ((nx - ix0) > self.nx) or ((ny - iy0) > self.ny): raise GridException(_t('Data size exceeds grid size.')) dvv = gxvv.GXvv(dtype=self.dtype) dvv.length = nx iy = iy0 for i in range(ny): if isinstance(data, gxapi.GXPG): data.read_row(i, 0, 0, dvv.gxvv) else: dvv.set_data(data[i, :]) self._img.write_y(iy, ix0, 0, dvv.gxvv) iy += order def read_row(self, row=None, start=0, length=None): """ :param row: row to read, if not specified the next row is read starting from row 0 :param start: the first point in the row, default is 0 :param length: number of points to read, the default is to the end of the row. :return: :class:`geosoft.gxvv.GXvv` instance .. versionadded:: 9.1 """ if row is None: row = self._next_row self._next_row = row + 1 if self._next_row == self.ny: self._next_row = 0 if row >= self.ny: raise GridException(_t('Attempt to read row {} past the last row {}'.format(row, self.ny))) vv = gxvv.GXvv(dtype=self.dtype) if length is None: length = 0 self._img.read_y(row, start, length, vv.gxvv) return vv def read_column(self, column=None, start=0, length=0): """ :param column: column to read, if not specified the next column is read starting from column 0 :param start: the first point in the column, default is 0 :param length: number of points to read, the default is to the end of the col. :return: :class:`geosoft.gxvv.GXvv` instance .. versionadded:: 9.1 """ if column is None: column = self._next_col if column >= self.nx: raise GridException(_t('Attempt to read column {} past the last column {}'.format(column, self.ny))) self._next_col = column + 1 if self._next_col == self.nx: self._next_col = 0 vv = gxvv.GXvv(dtype=self.dtype) self._img.read_x(column, start, length, vv.gxvv) return vv def write_row(self, data, row=None, start=0, length=None): """ :param data: data to write, `geosoft.gxpy.vv.GXvv` instance or an array :param row: row to write, if not specified the next row is written starting from row 0 :param start: the first point in the row, default is 0 :param length: number of points to read, the default is to the end of the row. .. versionadded:: 9.4 """ if not isinstance(data, gxvv.GXvv): data = gxvv.GXvv(data, dtype=self.dtype) if row is None: row = self._next_row self._next_row = row + 1 if self._next_row == self.ny: self._next_row = 0 if row >= self.ny: raise GridException(_t('Attempt to read row {} past the last row {}'.format(row, self.ny))) if length is None: length = 0 self._img.write_y(row, start, length, data.gxvv) def write_column(self, data, column=None, start=0, length=None): """ :param data: data to write, `geosoft.gxpy.vv.GXvv` instance or an array :param column: column to write, if not specified the next column is written starting from column 0 :param start: the first point in the column, default is 0 :param length: number of points to write, the default is to the end of the row. .. versionadded:: 9.4 """ if not isinstance(data, gxvv.GXvv): data = gxvv.GXvv(data, dtype=self.dtype) if column is None: column = self._next_col self._next_col = column + 1 if self._next_col == self.nx: self._next_col = 0 if column >= self.nx: raise GridException(_t('Attempt to read column {} past the last column {}'.format(column, self.nx))) if length is None: length = 0 self._img.write_x(column, start, length, data.gxvv) def reset_read_write(self): """ Reset the default read/write to the grid row 0, column 0. """ self._next = self._next_col = self._next_row = 0 @staticmethod def name_parts(name): """ .. deprecated:: 9.2 use gxpy.grid.name_parts() """ return name_parts(name) @staticmethod def decorate_name(name, decorations=''): """ .. deprecated:: 9.2 use gxpy.grid.name_parts() """ return decorate_name(name, decorations) def indexWindow(self, name, x0=0, y0=0, nx=None, ny=None): """ .. deprecated:: 9.2 gxpy.Grid.index_window() """ return self.index_window(self, name, x0, y0, nx, ny, overwrite=True) def xy_from_index(self, ix, iy): """ Return the rotated location of grid index ix, iy :param ix: grid index x :param iy: grid index y .. versionadded:: 9.4 """ def rotate(x, y): x -= self.x0 y -= self.y0 _x = x * self._cos_rot + y * self._sin_rot _y = -x * self._sin_rot + y * self._cos_rot return _x + self.x0, _y + self.y0 x = self.x0 + (ix * self.dx) y = self.y0 + (iy * self.dy) if self.rot != 0.: return rotate(x, y) return x, y def extent_2d(self): """ Return the 2D extent of the grid on the grid plane. Extent is to the outer edge of grid "cells", which extend half a cell beyond the edge points. :returns:(min_x, min_y, max_x, max_y) .. versionadded:: 9.2 .. versionchanged:: 9.4 - extent to the cell edges. """ x0, y0 = self.xy_from_index(-0.5, -0.5) x1, y1 = self.xy_from_index(self.nx - 0.5, self.ny - 0.5) if self.rot != 0.: xx0, yy0 = self.xy_from_index(self.nx - 0.5, -0.5) xx1, yy1 = self.xy_from_index(-0.5, self.ny - 0.5) min_x = min(x0, xx0, x1, xx1) min_y = min(y0, yy0, y1, yy1) max_x = max(x0, xx0, x1, xx1) max_y = max(y0, yy0, y1, yy1) return min_x, min_y, max_x, max_y return x0, y0, x1, y1 def extent_point_2d(self): """ Return the 2D extent of the grid point (cell centers) on the grid plane. :returns:(min_x, min_y, max_x, max_y) .. versionadded:: 9.4 """ x0, y0 = self.x0, self.y0 x1, y1 = self.xy_from_index(self.nx - 1, self.ny - 1) if self.rot != 0.: xx0, yy0 = self.xy_from_index(self.nx - 1, 0) xx1, yy1 = self.xy_from_index(0, self.ny - 1) min_x = min(x0, xx0, x1, xx1) min_y = min(y0, yy0, y1, yy1) max_x = max(x0, xx0, x1, xx1) max_y = max(y0, yy0, y1, yy1) return min_x, min_y, max_x, max_y return x0, y0, x1, y1 def extent_cell_2d(self): """ .. deprecated:: 9.4 - same as `extent_2d()` """ return self.extent_2d() def extent_3d(self): """ Return the 3D extent of the grid in the base coordinate system. :returns: (min_x, min_y, min_z, max_x, max_y, max_z) .. versionadded:: 9.2 """ cs = self.coordinate_system ex2d = self.extent_2d() if self.is_crooked_path: min_x, min_y, max_x, max_y = self.crooked_path().extent_xy min_z = cs.xyz_from_oriented((ex2d[0], ex2d[1], 0.0))[2] max_z = cs.xyz_from_oriented((ex2d[0], ex2d[3], 0.0))[2] else: xyz0 = cs.xyz_from_oriented((ex2d[0], ex2d[1], 0.0)) xyz1 = cs.xyz_from_oriented((ex2d[2], ex2d[1], 0.0)) xyz2 = cs.xyz_from_oriented((ex2d[2], ex2d[3], 0.0)) xyz3 = cs.xyz_from_oriented((ex2d[0], ex2d[3], 0.0)) min_x = min(xyz0[0], xyz1[0], xyz2[0], xyz3[0]) min_y = min(xyz0[1], xyz1[1], xyz2[1], xyz3[1]) min_z = min(xyz0[2], xyz1[2], xyz2[2], xyz3[2]) max_x = max(xyz0[0], xyz1[0], xyz2[0], xyz3[0]) max_y = max(xyz0[1], xyz1[1], xyz2[1], xyz3[1]) max_z = max(xyz0[2], xyz1[2], xyz2[2], xyz3[2]) return min_x, min_y, min_z, max_x, max_y, max_z def extent_cell_3d(self): """ .. deprecated:: 9.4 - same as `extent_3d()` """ return self.extent_3d() @property def extent(self): """ Grid cell extent as `geosoft.gxpy.geometry.Point2`. .. versionadded:: 9.3.1 """ return gxgm.Point2((self.extent_3d()), coordinate_system=self.coordinate_system) def np(self, dtype=None): """ Return a numpy array of grid values in the working dtype. :param dtype: desired data type, default is the work_dtype, ignored for color grids :returns: numpy array shape (nx, ny) or (nx, ny, 4) containing RGBA bytes in case of color grids .. versionadded:: 9.3.1 """ nx = self.nx ny = self.ny if self.is_color: data = np.zeros((ny, nx, 4), np.dtype(np.uint8)) else: if dtype is None: dtype = self.dtype data = np.zeros((ny, nx), dtype=dtype) if self.gximg.query_kx() == -1: for i in range(self.nx): column = self.read_column(i).np if self.is_color: column = _transform_color_int_to_rgba(column) data[:, i] = column else: for i in range(self.ny): row = self.read_row(i).np if self.is_color: row = _transform_color_int_to_rgba(row) data[i, :] = row return data def xyzv(self): """ Return a numpy float array of (x, y, z, v) grid points. x, y, z) is the location of each grid point in 3D space and v is the grid value at that location. Dummies will be numpy.nan. :returns: numpy array shape (nx, ny, 4) .. versionadded:: 9.2 """ nx = self.nx ny = self.ny dx = self.dx dy = self.dy cs = self.coordinate_system xyzv = np.zeros((ny, nx, 4)) xyzv[:, :, 0:2] = np.mgrid[0: (nx - 0.5) * dx: dx, 0: (ny - 0.5) * dy: dy].swapaxes(0, 2) if self.rot != 0.: x = xyzv[:, :, 0] cosx = x * self._cos_rot sinx = x * self._sin_rot y = xyzv[:, :, 1] cosy = y * self._cos_rot siny = y * self._sin_rot xyzv[:, :, 0] = cosx + siny xyzv[:, :, 1] = cosy - sinx xyzv += (self.x0, self.y0, 0, 0) if cs.is_oriented: xyzv[:, :, :3] = cs.xyz_from_oriented(xyzv[:, :, :3].reshape((-1, 3))).reshape((ny, nx, 3)) if self.gximg.query_kx() == -1: for i in range(self.nx): xyzv[:, i, 3] = self.read_column(i).np else: for i in range(self.ny): xyzv[i, :, 3] = self.read_row(i).np return xyzv def xyz(self, item): """ Returns the (x, y, z) location of an indexed point in the grid. :param item: tuple (ix, iy) grid point, or the point number counting by row :return: tuple (x, y, z) location .. versionadded:: 9.2.1 """ if isinstance(item, int): ix = item % self.nx iy = item // self.nx else: ix, iy = item if self._buffered_xy != iy: self._buffered_xy = iy self._buffer_x = np.arange(self.nx, dtype=np.float64) self._buffer_x *= self.dx self._buffer_y = np.zeros(self.nx, dtype=np.float64) self._buffer_y += iy * self.dy if self.rot != 0.: rx = self._buffer_x * self._cos_rot + self._buffer_y * self._sin_rot self._buffer_y *= self._buffer_y * self._cos_rot self._buffer_y -= self._buffer_x * self._sin_rot self._buffer_x = rx self._buffer_x += self.x0 self._buffer_y += self.y0 ggx = self._buffer_x[ix] ggy = self._buffer_y[ix] ggz = 0. if self.coordinate_system.is_oriented: ggx, ggy, ggz = self.coordinate_system.xyz_from_oriented((ggx, ggy, ggz)) return ggx, ggy, ggz def image_file(self, image_file_name=None, image_type=gxmap.RASTER_FORMAT_PNG, pix_width=None, shade=False, color_map=None, contour=None, display_area=None, pix_32_bit=False): """ Save as a georeferenced image file. :param image_file_name: image file name. The extension should be consistent with the image_type. If not specified a temporary PNG file is created. :param image_type: image type, one ot the RASTER_FORMAT constants in `geosoft.gxpy.map`. :param pix_width: desired image width in pixels, default is the width of the aggregate base layer :param shade: `True` to add shading effect :param color_map: `geosoft.gxpy.group.Color_map` instance, or a colour ramp file name, default is grid's default :param contour: colour contour interval if colours need to break at exact levels :param display_area: `geosoft.gxpy.geometry.Point2` instance, which defines the desired display area. The display area coordinate system can be different from the grid. :param pix_32_bit: make 32-bit image (with 8-bit alpha background) :return: image file name. .. seealso:: `geosoft.gxpy.grid.image_file`, which creates an image directly from a grid file. .. Note:: Unless read-only this method saves the grid as a temporary file from which an aggregate and image are created. If the grid already exists as a grid file it is more efficient to call `geosoft.gxpy.grid.image_file`. .. versionadded:: 9.3.1 """ temp_grid = gx.gx().temp_file('grd') try: if self._mode == FILE_READ and self._file_name is not None: grd_decorated = self.file_name_decorated else: with self.__class__.copy(self, temp_grid) as g: grd_decorated = g.file_name_decorated if color_map is None: color_map = self.get_default_color_map() imagefile = image_file(grd_decorated, image_file=image_file_name, image_type=image_type, pix_width=pix_width, shade=shade, color_map=color_map, contour=contour, display_area=display_area, pix_32_bit=pix_32_bit) finally: delete_files(temp_grid) return imagefile def generate_color_map(self, method=gxapi.ITR_ZONE_DEFAULT): """ Generate color map for grid based on statistics and method :param method: :ref:`ITR_ZONE` :return: A `geosoft.gxpy.group.Color_map` instance. .. versionadded:: 9.4.0 """ itr = gxapi.GXITR.create_img(self._img, "", method, gxapi.rDUMMY) return geosoft.gxpy.group.Color_map(itr) def get_default_color_map(self): """ Get default color map for grid :return: A `geosoft.gxpy.group.Color_map` instance. .. versionadded:: 9.4.0 """ itr = gxapi.GXITR.create() if 1 == self._img.get_def_itr(itr): return self.generate_color_map() return geosoft.gxpy.group.Color_map(itr) def mask(self, mask): """ Mask against blank areas in `mask` grid. Both grids must be same dimension. :param mask: reference mask grid, file of `Grid` instance. .. versionadded:: 9.4 """ if not isinstance(mask, Grid): mask = Grid.open(mask) if (self.nx != mask.nx or self.ny != mask.ny): raise GridException(_t('Grids dimensions do not match')) for row in range(self.ny): mr = self.read_row(row) mr.gxvv.mask(mask.read_row(row).gxvv) self.write_row(mr, row) # grid utilities def array_locations(properties): """ Create an array of (x,y,z) points for a grid defined by properties :param properties: grid properties :returns: array of points, shaped (ny, nx, 3) .. versionadded:: 9.1 """ with Grid.new(properties=properties) as g: return g.xyzv()[:, :, :3] def gridMosaic(*args, **kwargs): """ .. deprecated:: 9.2 use :py:method: grid_mosaic """ return grid_mosaic(*args, **kwargs) def grid_mosaic(*args, **kwargs): """ .. deprecated:: 9.4 use `geosoft.gxpy.grid_utility.grid_mosaic` """ def gridBool(*args, **kwargs): """ .. deprecated:: 9.2 use `grid_bool` """ return grid_bool(*args, **kwargs) def grid_bool(*args, **kwargs): """ .. deprecated:: 9.4 use `geosoft.gxpy.grid_utility.grid_bool` """ return gxgrdu.grid_bool(*args, **kwargs) def figure_map(grid_file, map_file=None, shade=True, color_map=None, contour=None, **kwargs): """ Create a map figure from a grid file. :param grid_file: grid file name :param map_file: name of the map file, if `None` a default map is created. :param shade: `True` to add shading effect :param color_map: `geosoft.gxpy.group.Color_map` instance, or a colour ramp file name, default is user's default :param contour: colour contour interval if colours need to break at exact levels :param kwargs: passed to `geosoft.gxpy.agg.Aggregate_image.figure_map` and `geosoft.gxpy.map.Map.new` :return: `geosoft.gxpy.map.Map` instance .. versionadded:: 9.3 """ with gxagg.Aggregate_image.new(grid_file, shade=shade, color_map=color_map, contour=contour) as agg: return agg.figure_map(file_name=map_file, **kwargs) def image_file(grid_file, image_file=None, image_type=gxmap.RASTER_FORMAT_PNG, pix_width=None, shade=True, color_map=None, contour=None, display_area=None, pix_32_bit=False): """ Save a grid file grid as a georeferenced image file. :param grid_file: grid file name :param image_file: image file name. The extension should be consistent with the image_type. If not specified a temporary PNG file is created. :param image_type: image type, one ot the RASTER_FORMAT constants in `geosoft.gxpy.map`. :param pix_width: desired image width in pixels, default is the width of the aggregate base layer :param shade: `True` to add shading effect :param color_map: `geosoft.gxpy.group.Color_map` instance, or a colour ramp file name, default is grid's default :param contour: colour contour interval if colours need to break at exact levels :param display_area: `geosoft.gxpy.geometry.Point2` instance, which defines the desired display area. The display area coordinate system can be different from the grid. :param pix_32_bit: make 32-bit image (with 8-bit alpha background) :return: image file name. .. versionadded:: 9.3.1 """ if color_map is None: with Grid.open(grid_file) as g: color_map = g.get_default_color_map() with gxagg.Aggregate_image.new(grid_file, shade=shade, color_map=color_map, contour=contour) as agg: return agg.image_file(image_file, image_type=image_type, pix_width=pix_width, display_area=display_area, pix_32_bit=pix_32_bit) <file_sep>/geosoft/gxapi/GXPG.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXPG(gxapi_cy.WrapPG): """ GXPG class. Pager methods for large 2-D arrays This class handles very-large 2-D arrays in which efficient access is required along both rows and columns. **Note:** Typically a grid is accessed using the `GXIMG <geosoft.gxapi.GXIMG>` class, and a `GXPG <geosoft.gxapi.GXPG>` is obtained from the `GXIMG <geosoft.gxapi.GXIMG>` using the `GXIMG.get_pg <geosoft.gxapi.GXIMG.get_pg>` function. Following operations on the `GXPG <geosoft.gxapi.GXPG>`, it can be written back to the `GXIMG <geosoft.gxapi.GXIMG>` using `GXIMG.set_pg <geosoft.gxapi.GXIMG.set_pg>`. """ def __init__(self, handle=0): super(GXPG, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXPG <geosoft.gxapi.GXPG>` :returns: A null `GXPG <geosoft.gxapi.GXPG>` :rtype: GXPG """ return GXPG() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous # 2D Methods def copy(self, pgs): """ Copy the data from one pager to another. :param pgs: Source `GXPG <geosoft.gxapi.GXPG>` object :type pgs: GXPG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._copy(pgs) def copy_subset(self, pgs, y_row_d, x_col_d, y_row_s, x_col_s, ny, nx): """ Copy a subset of data from one pager to another. :param pgs: Source `GXPG <geosoft.gxapi.GXPG>` object :param y_row_d: Y (row) Origin on destination :param x_col_d: X (col) Origin on destination :param y_row_s: Y (row) Origin on source :param x_col_s: X (col) Origin on source :param ny: Number of Y (rows) to copy :param nx: Number of X (columns) to copy :type pgs: GXPG :type y_row_d: int :type x_col_d: int :type y_row_s: int :type x_col_s: int :type ny: int :type nx: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 2D Only """ self._copy_subset(pgs, y_row_d, x_col_d, y_row_s, x_col_s, ny, nx) @classmethod def create(cls, row, col, type): """ Creates a Pager object :param row: # elements in y (# of row) :param col: # elements in x (# of column) :param type: :ref:`GS_TYPES` :type row: int :type col: int :type type: int :returns: `GXPG <geosoft.gxapi.GXPG>` Object :rtype: GXPG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapPG._create(GXContext._get_tls_geo(), row, col, type) return GXPG(ret_val) @classmethod def create_s(cls, bf): """ Create a 2D `GXPG <geosoft.gxapi.GXPG>` from serialized source. :type bf: GXBF :returns: `GXPG <geosoft.gxapi.GXPG>` Object :rtype: GXPG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** For 3D pagers, use CreateBF_PG. """ ret_val = gxapi_cy.WrapPG._create_s(GXContext._get_tls_geo(), bf) return GXPG(ret_val) def dummy(self): """ Sets the Entire pager to dummy. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._dummy() def e_type(self): """ Gets the type of pager. :returns: :ref:`GS_TYPES` :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._e_type() return ret_val def n_cols(self): """ Gets the # of columns in pager. :returns: # of columns. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._n_cols() return ret_val def n_rows(self): """ Gets the # of rows in pager. :returns: # of rows. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._n_rows() return ret_val def n_slices(self): """ Gets the # of slices (z) in pager. :returns: # of rows. :rtype: int .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._n_slices() return ret_val def range(self, min, max): """ Computes the range of the entire pager. :param min: Minimum Data (Dummy if no range) :param max: Maximum Data (Dummy if no range) :type min: float_ref :type max: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ min.value, max.value = self._range(min.value, max.value) def get(self, col, row): """ Read a single value from a 2D `GXPG <geosoft.gxapi.GXPG>` :param col: iBx - element # in x (column #) :param row: iBy - element # in y (row #) :type col: int :type row: int :rtype: float .. versionadded:: 8.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is a low-performance method. """ ret_val = self._get(col, row) return ret_val def set(self, col, row, value): """ Write a single value to a 2D `GXPG <geosoft.gxapi.GXPG>` :param col: iBx - element # in x (column #) :param row: iBy - element # in y (row #) :param value: value to set :type col: int :type row: int :type value: float .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is a low-performance method. """ self._set(col, row, value) def read_col(self, col, o, n, vv): """ Read a set of elements in X (column) from pager into vv :param col: iBx - element # in x (column #) :param o: iBy - begining element # in y to read (0 is the first) :param n: iNy - # elements to read (0 for whole vector) :param vv: hVV - `GXVV <geosoft.gxapi.GXVV>` handle :type col: int :type o: int :type n: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._read_col(col, o, n, vv) def read_row(self, row, o, n, vv): """ Read a set of elements in Y (row) from pager into vv :param row: iBy - element # in y (row #) :param o: iBx - begining element # in x to read (0 is the first) :param n: iNx - # elements to read (0 for whole vector) :param vv: hVV - `GXVV <geosoft.gxapi.GXVV>` handle :type row: int :type o: int :type n: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._read_row(row, o, n, vv) def re_allocate(self, n_row, n_col): """ Changes the size of Pager :param n_row: Number of Y (rows) to reallocate :param n_col: Number of X (columns) to reallocate :type n_row: int :type n_col: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._re_allocate(n_row, n_col) def serial(self, bf): """ Serialize a 2D `GXPG <geosoft.gxapi.GXPG>` to a `GXBF <geosoft.gxapi.GXBF>`. :type bf: GXBF .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** For 3D pagers, use `write_bf <geosoft.gxapi.GXPG.write_bf>`. """ self._serial(bf) def statistics(self, st): """ Compute the statistics of a pager object. :param st: hST - statistics object :type st: GXST .. versionadded:: 6.3.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._statistics(st) def write_col(self, col, o, n, vv): """ Write a set of elements in X (column) from vv into pager :param col: iBx - element # in x (column #) :param o: iBy - begining element # in y to write (0 is the first) :param n: iNy - # elements to write (0 for whole vector) :param vv: hVV - `GXVV <geosoft.gxapi.GXVV>` handle :type col: int :type o: int :type n: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._write_col(col, o, n, vv) def write_row(self, row, o, n, vv): """ Write a set of elements in Y (row) from vv into pager :param row: iBy - element # in y (row #) :param o: iBx - begining element # in x to write (0 is the first) :param n: iNx - # elements to write (0 for whole vector) :param vv: hVV - `GXVV <geosoft.gxapi.GXVV>` handle :type row: int :type o: int :type n: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._write_row(row, o, n, vv) # 3D Methods def copy_subset_3d(self, pgs, sliced, n, vv, slices, rows, cols, n_slice, n_row, n_col): """ Copy a subset of data from one pager to another. :param pgs: Source `GXPG <geosoft.gxapi.GXPG>` object :param sliced: Z (slice) Origin on destination :param n: Y (row) Origin on destination :param vv: X (col) Origin on destination :param slices: Z (slice) Origin on source :param rows: Y (row) Origin on source :param cols: X (col) Origin on source :param n_slice: Number of Z (slice) to copy :param n_row: Number of Y (rows) to copy :param n_col: Number of X (columns) to copy :type pgs: GXPG :type sliced: int :type n: int :type vv: int :type slices: int :type rows: int :type cols: int :type n_slice: int :type n_row: int :type n_col: int .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 2D Only """ self._copy_subset_3d(pgs, sliced, n, vv, slices, rows, cols, n_slice, n_row, n_col) @classmethod def create_3d(cls, slice, row, col, type): """ Creates a Pager object :param slice: # elements in z (# of slices) :param row: # elements in y (# of row) :param col: # elements in x (# of column) :param type: :ref:`GS_TYPES` :type slice: int :type row: int :type col: int :type type: int :returns: `GXPG <geosoft.gxapi.GXPG>` Object :rtype: GXPG .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapPG._create_3d(GXContext._get_tls_geo(), slice, row, col, type) return GXPG(ret_val) def read_col_3d(self, slice, col, o, n, vv): """ Read a set of elements in X (column) from pager into vv :param slice: iBz - element # in z (slice #) :param col: iBx - element # in x (column #) :param o: iBy - begining element # in y to read (0 is the first) :param n: iNy - # elements to read (0 for whole vector) :param vv: hVV - `GXVV <geosoft.gxapi.GXVV>` handle :type slice: int :type col: int :type o: int :type n: int :type vv: GXVV .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._read_col_3d(slice, col, o, n, vv) def read_row_3d(self, slice, row, o, n, vv): """ Read a set of elements in Y (row) from pager into vv :param slice: iBz - element # in z (slice #) :param row: iBy - element # in y (row #) :param o: iBx - begining element # in x to read (0 is the first) :param n: iNx - # elements to read (0 for whole vector) :param vv: hVV - `GXVV <geosoft.gxapi.GXVV>` handle :type slice: int :type row: int :type o: int :type n: int :type vv: GXVV .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._read_row_3d(slice, row, o, n, vv) def read_trace_3d(self, col, row, o, n, vv): """ Read a set of elements in Z (trace) from pager into vv :param col: iBx - element # in x (column #) :param row: iBy - element # in y (row #) :param o: iBy - begining element # in z to read (0 is the first) :param n: iNy - # elements to read (0 for whole vector) :param vv: hVV - `GXVV <geosoft.gxapi.GXVV>` handle :type col: int :type row: int :type o: int :type n: int :type vv: GXVV .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._read_trace_3d(col, row, o, n, vv) def re_allocate_3d(self, n_slice, n_row, n_col): """ Changes the size of 3D Pager :param n_slice: Number of Z (slices) to reallocate :param n_row: Number of Y (rows) to reallocate :param n_col: Number of X (columns) to reallocate :type n_slice: int :type n_row: int :type n_col: int .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._re_allocate_3d(n_slice, n_row, n_col) def write_col_3d(self, slice, col, o, n, vv): """ Write a set of elements in X (column) from vv into pager :param slice: iBz - element # in z (slice #) :param col: iBx - element # in x (column #) :param o: iBy - begining element # in y to write (0 is the first) :param n: iNy - # elements to write (0 for whole vector) :param vv: hVV - `GXVV <geosoft.gxapi.GXVV>` handle :type slice: int :type col: int :type o: int :type n: int :type vv: GXVV .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._write_col_3d(slice, col, o, n, vv) def write_row_3d(self, slice, row, o, n, vv): """ Write a set of elements in Y (row) from vv into pager :param slice: iBz - element # in z (slice #) :param row: iBy - element # in y (row #) :param o: iBx - begining element # in x to write (0 is the first) :param n: iNx - # elements to write (0 for whole vector) :param vv: hVV - `GXVV <geosoft.gxapi.GXVV>` handle :type slice: int :type row: int :type o: int :type n: int :type vv: GXVV .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._write_row_3d(slice, row, o, n, vv) def write_trace_3d(self, col, row, o, n, vv): """ Write a set of elements in Z (trace) from pager into vv :param col: iBx - element # in x (column #) :param row: iBy - element # in y (row #) :param o: iBy - begining element # in z to read (0 is the first) :param n: iNy - # elements to read (0 for whole vector) :param vv: hVV - `GXVV <geosoft.gxapi.GXVV>` handle :type col: int :type row: int :type o: int :type n: int :type vv: GXVV .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._write_trace_3d(col, row, o, n, vv) # Utility Methods def read_bf(self, bf, dir, conv, rev_x, rev_y, rev_z): """ Read the contents of a 2D or 3D pager to from a `GXBF <geosoft.gxapi.GXBF>`. :param bf: `GXBF <geosoft.gxapi.GXBF>` to read from :param dir: :ref:`PG_3D_DIR` :param conv: :ref:`PG_BF_CONV` :param rev_x: Reverse X :param rev_y: Reverse Y :param rev_z: Reverse Z :type bf: GXBF :type dir: int :type conv: int :type rev_x: int :type rev_y: int :type rev_z: int .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._read_bf(bf, dir, conv, rev_x, rev_y, rev_z) def read_ra(self, ra, dir, rev_x, rev_y, rev_z, dummy): """ Read the contents of a 2D or 3D pager to from an `GXRA <geosoft.gxapi.GXRA>`. :param ra: `GXRA <geosoft.gxapi.GXRA>` to read from :param dir: :ref:`PG_3D_DIR` :param rev_x: Reverse X :param rev_y: Reverse Y :param rev_z: Reverse Z :param dummy: Dummy :type ra: GXRA :type dir: int :type rev_x: int :type rev_y: int :type rev_z: int :type dummy: str .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Each line must hold only 1 value """ self._read_ra(ra, dir, rev_x, rev_y, rev_z, dummy.encode()) def write_bf(self, bf, dir, conv, rev_x, rev_y, rev_z): """ Write the contents of a 2D or 3D pager to a `GXBF <geosoft.gxapi.GXBF>`. :param bf: `GXBF <geosoft.gxapi.GXBF>` to write to :param dir: :ref:`PG_3D_DIR` :param conv: :ref:`PG_BF_CONV` :param rev_x: Reverse X :param rev_y: Reverse Y :param rev_z: Reverse Z :type bf: GXBF :type dir: int :type conv: int :type rev_x: int :type rev_y: int :type rev_z: int .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._write_bf(bf, dir, conv, rev_x, rev_y, rev_z) def write_bf_ex(self, bf, dir, conv, rev_x, rev_y, rev_z, p_dummy): """ Write the contents of a 2D or 3D pager to a `GXBF <geosoft.gxapi.GXBF>`. :param bf: `GXBF <geosoft.gxapi.GXBF>` to write to :param dir: :ref:`PG_3D_DIR` :param conv: :ref:`PG_BF_CONV` :param rev_x: Reverse X :param rev_y: Reverse Y :param rev_z: Reverse Z :param p_dummy: Dummy value :type bf: GXBF :type dir: int :type conv: int :type rev_x: int :type rev_y: int :type rev_z: int :type p_dummy: float .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._write_bf_ex(bf, dir, conv, rev_x, rev_y, rev_z, p_dummy) def write_wa(self, wa, dir, rev_x, rev_y, rev_z, dummy): """ Write the contents of a 2D or 3D pager to a `GXWA <geosoft.gxapi.GXWA>` :param wa: `GXWA <geosoft.gxapi.GXWA>` to write to :param dir: :ref:`PG_3D_DIR` :param rev_x: Reverse X :param rev_y: Reverse Y :param rev_z: Reverse Z :param dummy: Dummy :type wa: GXWA :type dir: int :type rev_x: int :type rev_y: int :type rev_z: int :type dummy: str .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Each line will hold only 1 value """ self._write_wa(wa, dir, rev_x, rev_y, rev_z, dummy.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXVA.rst .. _GXVA: GXVA class ================================== .. autoclass:: geosoft.gxapi.GXVA :members: .. _VA_AVERAGE: VA_AVERAGE constants ----------------------------------------------------------------------- `GXVA <geosoft.gxapi.GXVA>` Object to average .. autodata:: geosoft.gxapi.VA_AVERAGE_ROWS :annotation: .. autoattribute:: geosoft.gxapi.VA_AVERAGE_ROWS .. autodata:: geosoft.gxapi.VA_AVERAGE_COLUMNS :annotation: .. autoattribute:: geosoft.gxapi.VA_AVERAGE_COLUMNS .. _VA_OBJECT: VA_OBJECT constants ----------------------------------------------------------------------- `GXVA <geosoft.gxapi.GXVA>` Object to select .. autodata:: geosoft.gxapi.VA_ROW :annotation: .. autoattribute:: geosoft.gxapi.VA_ROW .. autodata:: geosoft.gxapi.VA_COL :annotation: .. autoattribute:: geosoft.gxapi.VA_COL <file_sep>/geosoft/gxpy/_jdcal/README.rst jdcal ===== .. _TPM: http://www.sal.wisc.edu/~jwp/astro/tpm/tpm.html .. _<NAME>: http://www.sal.wisc.edu/~jwp/ .. _IAU SOFA: http://www.iausofa.org/ .. _pip: http://pypi.python.org/pypi/pip .. _easy_install: packages.python.org/distribute/easy_install.html .. image:: https://travis-ci.org/phn/jdcal.svg?branch=master :target: https://travis-ci.org/phn/jdcal This module contains functions for converting between Julian dates and calendar dates. A function for converting Gregorian calendar dates to Julian dates, and another function for converting Julian calendar dates to Julian dates are defined. Two functions for the reverse calculations are also defined. Different regions of the world switched to Gregorian calendar from Julian calendar on different dates. Having separate functions for Julian and Gregorian calendars allow maximum flexibility in choosing the relevant calendar. Julian dates are stored in two floating point numbers (double). Julian dates, and Modified Julian dates, are large numbers. If only one number is used, then the precision of the time stored is limited. Using two numbers, time can be split in a manner that will allow maximum precision. For example, the first number could be the Julian date for the beginning of a day and the second number could be the fractional day. Calculations that need the latter part can now work with maximum precision. All the above functions are "proleptic". This means that they work for dates on which the concerned calendar is not valid. For example, Gregorian calendar was not used prior to around October 1582. A function to test if a given Gregorian calendar year is a leap year is also defined. Zero point of Modified Julian Date (MJD) and the MJD of 2000/1/1 12:00:00 are also given as module level constants. Examples -------- Some examples are given below. For more information see http://oneau.wordpress.com/jdcal/. Gregorian calendar: .. code-block:: python >>> from jdcal import gcal2jd, jd2gcal >>> gcal2jd(2000,1,1) (2400000.5, 51544.0) >>> 2400000.5 + 51544.0 + 0.5 2451545.0 >>> gcal2jd(2000,2,30) (2400000.5, 51604.0) >>> gcal2jd(2000,3,1) (2400000.5, 51604.0) >>> gcal2jd(2001,2,30) (2400000.5, 51970.0) >>> gcal2jd(2001,3,2) (2400000.5, 51970.0) >>> jd2gcal(*gcal2jd(2000,1,1)) (2000, 1, 1, 0.0) >>> jd2gcal(*gcal2jd(1950,1,1)) (1950, 1, 1, 0.0) >>> gcal2jd(2000,1,1) (2400000.5, 51544.0) >>> jd2gcal(2400000.5, 51544.0) (2000, 1, 1, 0.0) >>> jd2gcal(2400000.5, 51544.5) (2000, 1, 1, 0.5) >>> jd2gcal(2400000.5, 51544.245) (2000, 1, 1, 0.24500000000261934) >>> jd2gcal(2400000.5, 51544.1) (2000, 1, 1, 0.099999999998544808) >>> jd2gcal(2400000.5, 51544.75) (2000, 1, 1, 0.75) Julian calendar: .. code-block:: python >>> jd2jcal(*jcal2jd(2000, 1, 1)) (2000, 1, 1, 0.0) >>> jd2jcal(*jcal2jd(-4000, 10, 11)) (-4000, 10, 11, 0.0) Gregorian leap year: .. code-block:: python >>> from jdcal import is_leap >>> is_leap(2000) True >>> is_leap(2100) False JD for zero point of MJD, and MJD for JD2000.0: .. code-block:: python >>> from jdcal import MJD_0, MJD_JD2000 >>> print MJD_0 2400000.5 >>> print MJD_JD2000 51544.5 Installation ------------ The module can be installed using `pip`_ or `easy_install`_:: $ pip install jdcal or, :: $ easy_install jdcal Tests are in ``test_jdcal.py``. Credits -------- 1. A good amount of the code is based on the excellent `TPM`_ C library by `<NAME>`_. 2. The inspiration to split Julian dates into two numbers came from the `IAU SOFA`_ C library. No code or algorithm from the SOFA library is used in `jdcal`. License ------- Released under BSD; see LICENSE.txt. For comments and suggestions, email to user `prasanthhn` in the `gmail.com` domain. <file_sep>/geosoft/gxapi/GXVA.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXVV import GXVV ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block import numpy as np from . import gxapi_cy_extend ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXVA(gxapi_cy.WrapVA): """ GXVA class. The `GXVA <geosoft.gxapi.GXVA>` class is the 2-Dimensional analogue to the `GXVV <geosoft.gxapi.GXVV>` class. When displayed in a database, `GXVA <geosoft.gxapi.GXVA>` objects are displayed graphically as profiles, one to a cell, and can also be displayed one column of data at a time by specifying an index; e.g. CH[0]. A `GXVA <geosoft.gxapi.GXVA>` object is declared with a fixed number of columns, which cannot be altered. The number of rows, however can be changed, in the same way that the length of a `GXVV <geosoft.gxapi.GXVV>` can be changed. Data can be added or extracted using VVs, either by row or column. A `GXVA <geosoft.gxapi.GXVA>` is used to store an array of data in which each element may have multiple elements. For example, 256-channel radiometric data can be stored in a `GXVA <geosoft.gxapi.GXVA>` that is 256 elements wide. """ def __init__(self, handle=0): super(GXVA, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXVA <geosoft.gxapi.GXVA>` :returns: A null `GXVA <geosoft.gxapi.GXVA>` :rtype: GXVA """ return GXVA() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def get_array(self, start_row, start_col, rows, cols, data, gs_type): """ Get an array of data from a `GXVA <geosoft.gxapi.GXVA>`. :param start_row: Starting Row :param start_col: Starting Column :param rows: # rows :param cols: # cols :param data: Data buffer to copy `GXVA <geosoft.gxapi.GXVA>` data into :param gs_type: :ref:`GS_TYPES` :type start_row: int :type start_col: int :type rows: int :type cols: int :type data: bytearray :type gs_type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_array(start_row, start_col, rows, cols, data, gs_type) def set_array(self, start_row, start_col, rows, cols, data, gs_type): """ Set a range of data in an array :param start_row: Starting Row :param start_col: Starting Column :param rows: # rows :param cols: # cols :param data: Data buffer to copy into `GXVA <geosoft.gxapi.GXVA>` :param gs_type: :ref:`GS_TYPES` :type start_row: int :type start_col: int :type rows: int :type cols: int :type data: bytearray :type gs_type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_array(start_row, start_col, rows, cols, data, gs_type) def add_elevations_vv_to_depths(self, vv, negative_depths): """ Add one `GXVV <geosoft.gxapi.GXVV>` value to each row of the `GXVA <geosoft.gxapi.GXVA>`, output true elevation. :param vv: Elevations to add :param negative_depths: Use negative `GXVA <geosoft.gxapi.GXVA>` depths (0:No, 1:Yes)? :type vv: GXVV :type negative_depths: int .. versionadded:: 7.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Adds each value in an input elevation `GXVV <geosoft.gxapi.GXVV>` to all the values at the same fid in a depths `GXVA <geosoft.gxapi.GXVA>`. Includes an option for negative depths down (e.g. a relative level). """ self._add_elevations_vv_to_depths(vv, negative_depths) def append(self, v_aa): """ Appends VAs :param v_aa: `GXVA <geosoft.gxapi.GXVA>` to append :type v_aa: GXVA .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the VAs have different numbers of columns, the smaller number is used in the copy operation. """ self._append(v_aa) def average(self, vv, rc): """ Average elements in a `GXVA <geosoft.gxapi.GXVA>` by row or column :param vv: `GXVV <geosoft.gxapi.GXVV>` in which to place average results :param rc: :ref:`VA_AVERAGE` :type vv: GXVV :type rc: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The output `GXVV <geosoft.gxapi.GXVV>` will be dimensioned by the number of rows or columns in the input `GXVV <geosoft.gxapi.GXVV>` depending on the :ref:`VA_AVERAGE` setting. Dummies are not included in the average. """ self._average(vv, rc) def copy(self, v_as): """ Copy one `GXVA <geosoft.gxapi.GXVA>` to another. :param v_as: source :type v_as: GXVA .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._copy(v_as) def copy2(self, d_row, d_col, v_as, s_row, s_col, rows, cols): """ Copy part of a vector into part of another vector. :param d_row: Destination start row :param d_col: Destination start column :param v_as: Source `GXVA <geosoft.gxapi.GXVA>` (can be the same as Destination) :param s_row: Source start row :param s_col: Source start column :param rows: Number of rows :param cols: Number of columns :type d_row: int :type d_col: int :type v_as: GXVA :type s_row: int :type s_col: int :type rows: int :type cols: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 1. Unlike `copy <geosoft.gxapi.GXVA.copy>` destination `GXVA <geosoft.gxapi.GXVA>` is not reallocated, nor are the dimensions changed. The caller must make any desired changes. 2. All `GXVA <geosoft.gxapi.GXVA>` types are supported and will be converted using Convert_GS if necessary. """ self._copy2(d_row, d_col, v_as, s_row, s_col, rows, cols) @classmethod def create(cls, type, rows, cols): """ Create a `GXVA <geosoft.gxapi.GXVA>`. :param type: :ref:`GEO_VAR` :param rows: Maximum number of rows in the `GXVA <geosoft.gxapi.GXVA>`, >= 0 :param cols: Number of columns in the `GXVA <geosoft.gxapi.GXVA>`, > 0 :type type: int :type rows: int :type cols: int :returns: `GXVA <geosoft.gxapi.GXVA>` Object :rtype: GXVA .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapVA._create(GXContext._get_tls_geo(), type, rows, cols) return GXVA(ret_val) @classmethod def create_ext(cls, type, rows, cols): """ Create a `GXVA <geosoft.gxapi.GXVA>`, using one of the :ref:`GS_TYPES` special data types. :param type: :ref:`GS_TYPES` :param rows: Maximum number of rows in the `GXVA <geosoft.gxapi.GXVA>`, >= 0 :param cols: Number of columns in the `GXVA <geosoft.gxapi.GXVA>`, > 0 :type type: int :type rows: int :type cols: int :returns: `GXVA <geosoft.gxapi.GXVA>`, aborts if creation fails :rtype: GXVA .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See `GXVV.create <geosoft.gxapi.GXVV.create>` """ ret_val = gxapi_cy.WrapVA._create_ext(GXContext._get_tls_geo(), type, rows, cols) return GXVA(ret_val) @classmethod def create_vv(cls, vv, rows, columns): """ Create a `GXVA <geosoft.gxapi.GXVA>` using the data in a `GXVV <geosoft.gxapi.GXVV>`. :param vv: `GXVV <geosoft.gxapi.GXVV>` with the data :param rows: # of rows :param columns: # of columns :type vv: GXVV :type rows: int :type columns: int :returns: `GXVA <geosoft.gxapi.GXVA>`, aborts if creation fails :rtype: GXVA .. versionadded:: 7.2.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See `GXVV.create <geosoft.gxapi.GXVV.create>` """ ret_val = gxapi_cy.WrapVA._create_vv(GXContext._get_tls_geo(), vv, rows, columns) return GXVA(ret_val) def get_full_vv(self): """ Get the full `GXVV <geosoft.gxapi.GXVV>` from the `GXVA <geosoft.gxapi.GXVA>`. :returns: `GXVV <geosoft.gxapi.GXVV>` Object :rtype: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** No data is copied, this is the handle to the data `GXVV <geosoft.gxapi.GXVV>` in the `GXVA <geosoft.gxapi.GXVA>`. The fid start/increment of the `GXVA <geosoft.gxapi.GXVA>` is passed to the `GXVV <geosoft.gxapi.GXVV>` at the time of the call. If a new `GXVA <geosoft.gxapi.GXVA>` is read, you must call GetFull_VV_VA to get the new fid in the `GXVV <geosoft.gxapi.GXVV>`. """ ret_val = self._get_full_vv() return GXVV(ret_val) def get_vv(self, no, row_col, vv): """ Get a row or column of data as a `GXVV <geosoft.gxapi.GXVV>` from an array. :param no: Row or Column # (0 is first) :param row_col: :ref:`VA_OBJECT` :param vv: `GXVV <geosoft.gxapi.GXVV>` in which to place data :type no: int :type row_col: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_vv(no, row_col, vv) def col(self): """ Return number of columns in `GXVA <geosoft.gxapi.GXVA>` :returns: Columns in `GXVA <geosoft.gxapi.GXVA>` :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** `len <geosoft.gxapi.GXVA.len>` returns the number of rows. """ ret_val = self._col() return ret_val def get_int(self, row, col): """ Get an integer element from a `GXVA <geosoft.gxapi.GXVA>`. :param row: Row :param col: Column :type row: int :type col: int :returns: Element wanted, `rDUMMY <geosoft.gxapi.rDUMMY>`, `iDUMMY <geosoft.gxapi.iDUMMY>` or blank string if the value is dummy or outside of the range of data. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Type conversions are performed if necessary. Dummy values are converted to "*" string. """ ret_val = self._get_int(row, col) return ret_val def get_string(self, row, col, str_val): """ Get a string element from a `GXVA <geosoft.gxapi.GXVA>`. :param row: Row :param col: Column :param str_val: String in which to place element :type row: int :type col: int :type str_val: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Returns element wanted, `rDUMMY <geosoft.gxapi.rDUMMY>`, `iDUMMY <geosoft.gxapi.iDUMMY>` or blank string if the value is dummy or outside of the range of data. Type conversions are performed if necessary. Dummy values are converted to "*" string. """ str_val.value = self._get_string(row, col, str_val.value.encode()) def len(self): """ Return length (number of rows) in a `GXVA <geosoft.gxapi.GXVA>`. :returns: Length of `GXVA <geosoft.gxapi.GXVA>` :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** `col <geosoft.gxapi.GXVA.col>` returns the number of columns. """ ret_val = self._len() return ret_val @classmethod def index_order(cls, vv, va): """ Reorder a `GXVA <geosoft.gxapi.GXVA>` based on an index `GXVV <geosoft.gxapi.GXVV>` :param vv: Index `GXVV <geosoft.gxapi.GXVV>` of type INT :param va: `GXVA <geosoft.gxapi.GXVA>` to order :type vv: GXVV :type va: GXVA .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Given a row index `GXVV <geosoft.gxapi.GXVV>` (of type INT), this method reorders a `GXVA <geosoft.gxapi.GXVA>`. Please make sure that the index holds valid information. """ gxapi_cy.WrapVA._index_order(GXContext._get_tls_geo(), vv, va) def lookup_index(self, vvi, var): """ Lookup a `GXVA <geosoft.gxapi.GXVA>` from another `GXVA <geosoft.gxapi.GXVA>` using an index `GXVV <geosoft.gxapi.GXVV>`. :param vvi: Index `GXVV <geosoft.gxapi.GXVV>` of REAL :param var: `GXVA <geosoft.gxapi.GXVA>` to output results (same type as Data `GXVA <geosoft.gxapi.GXVA>`) :type vvi: GXVV :type var: GXVA .. versionadded:: 6.4.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Fractional values in the `GXVV <geosoft.gxapi.GXVV>` will interpolate between the value at the whole integer value and the next whole integer, dummy if outside the `GXVA <geosoft.gxapi.GXVA>`. """ self._lookup_index(vvi, var) def range(self, startRow, startCol, rows, columns, min, max): """ Computes the minimum and maximum range of the data, in doubles, in a vector while ignoring dummies, for a range of columns and rows. :param startRow: Starting row (0 to nRows-1) :param startCol: Starting column (0 to nColumns-1 :param rows: Number of rows (-1 for all from start) :param columns: Number of columns (-1 for all from start) :param min: Minimum value - returned :param max: Maximum value - returned :type startRow: int :type startCol: int :type rows: int :type columns: int :type min: float_ref :type max: float_ref .. versionadded:: 9.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ min.value, max.value = self._range(startRow, startCol, rows, columns, min.value, max.value) def range_double(self, min, max): """ Computes the minimum and maximum range of the data, in doubles, in a vector while ignoring dummies. :param min: Minimum value - returned :param max: Maximum value - returned :type min: float_ref :type max: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ min.value, max.value = self._range_double(min.value, max.value) def range_columns(self, startRow, startCol, rows, columns, minimums, maximums): """ Computes the minimum and maximum range of the data for individual columns, in doubles, for a range of columns and rows. :param startRow: Starting row (0 to nRows-1) :param startCol: Starting column (0 to nColumns-1 :param rows: Number of rows (-1 for all from start) :param columns: Number of columns (-1 for all from start) :param minimums: Minimum values returned:`VV` object - GS_REAL :param maximums: Maximum values returned:`VV` object - GS_REAL :type startRow: int :type startCol: int :type rows: int :type columns: int :type minimums: GXVV :type maximums: GXVV .. versionadded:: 9.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._range_columns(startRow, startCol, rows, columns, minimums, maximums) def re_fid(self, start, incr, length): """ Re-sample a `GXVA <geosoft.gxapi.GXVA>` to a new fid start/icrement :param start: New fid start :param incr: New fid increment :param length: New length :type start: float :type incr: float :type length: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._re_fid(start, incr, length) def reverse(self): """ Reverses the order of the rows in a `GXVA <geosoft.gxapi.GXVA>`. .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._reverse() def get_fid_incr(self): """ Gets the Fiducial increment from a `GXVA <geosoft.gxapi.GXVA>` :returns: Fiducial increment of the `GXVA <geosoft.gxapi.GXVA>`. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_fid_incr() return ret_val def get_fid_start(self): """ Gets the Fiducial start from a `GXVA <geosoft.gxapi.GXVA>` :returns: Fiducial start of the `GXVA <geosoft.gxapi.GXVA>`. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_fid_start() return ret_val def get_double(self, row, col): """ Get a real element from a `GXVA <geosoft.gxapi.GXVA>`. :param row: Row :param col: Column :type row: int :type col: int :returns: Element wanted, `rDUMMY <geosoft.gxapi.rDUMMY>`, `iDUMMY <geosoft.gxapi.iDUMMY>` or blank string if the value is dummy or outside of the range of data. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Type conversions are performed if necessary. Dummy values are converted to "*" string. """ ret_val = self._get_double(row, col) return ret_val def set_fid_incr(self, incr): """ Sets the Fiducial increment of a `GXVA <geosoft.gxapi.GXVA>` :param incr: New increment :type incr: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_fid_incr(incr) def set_fid_start(self, start): """ Sets the Fiducial start of a `GXVA <geosoft.gxapi.GXVA>` :param start: New start :type start: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_fid_start(start) def set_int(self, row, col, value): """ Set an integer element in a `GXVA <geosoft.gxapi.GXVA>`. :param row: Row :param col: Column :param value: Value to set :type row: int :type col: int :type value: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Element being set cannot be < 0. If the element is > current `GXVA <geosoft.gxapi.GXVA>` length, the `GXVA <geosoft.gxapi.GXVA>` length is increased. """ self._set_int(row, col, value) def set_ln(self, rows): """ Set the length (number of rows) of the `GXVA <geosoft.gxapi.GXVA>` :param rows: Length :type rows: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The number of columns in a `GXVA <geosoft.gxapi.GXVA>` is fixed, and cannot be altered once the `GXVA <geosoft.gxapi.GXVA>` is created. """ self._set_ln(rows) def set_double(self, row, col, value): """ Set a real element in a `GXVA <geosoft.gxapi.GXVA>`. :param row: Row :param col: Column :param value: Value to set :type row: int :type col: int :type value: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Element being set cannot be < 0. If the element is > current `GXVA <geosoft.gxapi.GXVA>` length, the `GXVA <geosoft.gxapi.GXVA>` length is increased. """ self._set_double(row, col, value) def set_string(self, row, col, value): """ Set a string element in a `GXVA <geosoft.gxapi.GXVA>`. :param row: Row :param col: Column :param value: String to set :type row: int :type col: int :type value: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Element being set cannot be < 0. If the element is > current `GXVA <geosoft.gxapi.GXVA>` length, the `GXVA <geosoft.gxapi.GXVA>` length is increased. """ self._set_string(row, col, value.encode()) def set_vv(self, no, row_col, vv): """ Set a row or column of data in an array from a `GXVV <geosoft.gxapi.GXVV>`. :param no: Row or Column # (0 is first) :param row_col: :ref:`VA_OBJECT` :param vv: `GXVV <geosoft.gxapi.GXVV>` from which to get data :type no: int :type row_col: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_vv(no, row_col, vv) def trans(self, base, mult): """ Translate (`GXVA <geosoft.gxapi.GXVA>` + base ) * mult :param base: Base value :param mult: Mult value :type base: float :type mult: float .. versionadded:: 7.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Supports all `GXVA <geosoft.gxapi.GXVA>` types using an internal double `GXVV <geosoft.gxapi.GXVV>`. """ self._trans(base, mult) def window(self, start, count, vv): """ Window a `GXVA <geosoft.gxapi.GXVA>` to a `GXVV <geosoft.gxapi.GXVV>` based in intergral frame :param start: First element in the window :param count: Number of elements in the window :param vv: `GXVV <geosoft.gxapi.GXVV>` in which to place results :type start: int :type count: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The defined window must be within the `GXVA <geosoft.gxapi.GXVA>` element dimensions. The windowed result will be the simple sum of all values in the window. If any values are dummy, the result will be dummy. """ self._window(start, count, vv) def window2(self, start, end, vv): """ Window a `GXVA <geosoft.gxapi.GXVA>` to a `GXVV <geosoft.gxapi.GXVV>` based on fractional frame :param start: Start point (from 0.0) :param end: End point (< `GXVA <geosoft.gxapi.GXVA>` elements - 1.0) :param vv: `GXVV <geosoft.gxapi.GXVV>` in which to place results :type start: float :type end: float :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The defined window must be within the `GXVA <geosoft.gxapi.GXVA>` element dimensions. The windowed result will be the simple sum of all values in the window. If any values are dummy, the result will be dummy. """ self._window2(start, end, vv) def check_for_repeating(self, vv_t, subtract_vv, vv_sub, tol): """ Window a `GXVA <geosoft.gxapi.GXVA>` to a `GXVV <geosoft.gxapi.GXVV>` based on fractional frame :param vv_t: Items to test for repeats (length equal to the number of columns in the `GXVA <geosoft.gxapi.GXVA>`) :param subtract_vv: If set to 1, subtract single values in the following `GXVV <geosoft.gxapi.GXVV>` from every array row item before testing (e.g. an elevation value) :param vv_sub: Values to subtract from each row before doing the comparison test (length equal to the length of the `GXVA <geosoft.gxapi.GXVA>`). Can be VV_NULL (-1) if above subtraction parameter is zero :param tol: Comparison tolerance - set to zero or dummy for exact match :type vv_t: GXVV :type subtract_vv: int :type vv_sub: GXVV :type tol: float :returns: 1 if rows repeat, 0 if not. :rtype: int .. versionadded:: 8.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Returns 1 if all rows contain values which match the input values. Optionally, row values can be offset by amounts specified with a secondary `GXVV <geosoft.gxapi.GXVV>`. This function was designed to detect "depth" array channels, including those which might have been offset with topography on each row. An absolute tolerance can be specified to ignore numerical noise. """ ret_val = self._check_for_repeating(vv_t, subtract_vv, vv_sub, tol) return ret_val def check_for_repeating2(self, vv_t, subtract_vv, vv_sub, tol, bad_row, bad_col): """ Window a `GXVA <geosoft.gxapi.GXVA>` to a `GXVV <geosoft.gxapi.GXVV>` based on fractional frame :param vv_t: Items to test for repeats (length equal to the number of columns in the `GXVA <geosoft.gxapi.GXVA>`) :param subtract_vv: If set to 1, subtract single values in the following `GXVV <geosoft.gxapi.GXVV>` from every array row item before testing (e.g. an elevation value) :param vv_sub: Values to subtract from each row before doing the comparison test (length equal to the length of the `GXVA <geosoft.gxapi.GXVA>`). Can be VV_NULL (-1) if above subtraction parameter is zero :param tol: Comparison tolerance - set to zero or dummy for exact match :param bad_row: Row index of first mismatch :param bad_col: Column index of first mismatch :type vv_t: GXVV :type subtract_vv: int :type vv_sub: GXVV :type tol: float :type bad_row: int_ref :type bad_col: int_ref :returns: 1 if rows repeat, 0 if not. :rtype: int .. versionadded:: 8.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Returns 1 if all rows contain values which match the input values. Optionally, row values can be offset by amounts specified with a secondary `GXVV <geosoft.gxapi.GXVV>`. This function was designed to detect "depth" array channels, including those which might have been offset with topography on each row. An absolute tolerance can be specified to ignore numerical noise. This version returns the row and column index of first mismatch. """ ret_val, bad_row.value, bad_col.value = self._check_for_repeating2(vv_t, subtract_vv, vv_sub, tol, bad_row.value, bad_col.value) return ret_val ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block def get_array_np(self, start_row: int, start_col: int, rows: int, cols: int, np_dtype: type(np.dtype)): from .GXNumpy import gs_from_np gs_type = gs_from_np(np_dtype) return np.asarray(self.get_data_array(start_row, start_col, rows, cols, gs_type)) def set_array_np(self, start_row: int, start_col: int, np_array: type(np.ndarray)): from .GXNumpy import gs_from_np gs_type = gs_from_np(np_array.dtype) if np_array.ndim != 2: raise GXAPIError("Only 2D Numpy arrays supported for this method"); rows = np_array.shape[0]; columns = np_array.shape[1]; if not np_array.flags['C_CONTIGUOUS']: np_array = np.ascontiguousarray(np_array) self.set_array(start_row, start_col, rows, columns, np_array.data.tobytes(), gs_type) def get_data_array(self, start_row: int, start_col: int, rows: int, cols: int, gs_type: int): return gxapi_cy_extend.GXMemMethods.get_array_data_va(GXContext._internal_p(), self._internal_handle(), start_row, start_col, rows, cols, gs_type) ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXDATAMINE.rst .. _GXDATAMINE: GXDATAMINE class ================================== .. autoclass:: geosoft.gxapi.GXDATAMINE :members: .. _GIS_DMTYPE: GIS_DMTYPE constants ----------------------------------------------------------------------- Datamine file types .. autodata:: geosoft.gxapi.GIS_DMTYPE_STRING :annotation: .. autoattribute:: geosoft.gxapi.GIS_DMTYPE_STRING .. autodata:: geosoft.gxapi.GIS_DMTYPE_WIREFRAME_TR :annotation: .. autoattribute:: geosoft.gxapi.GIS_DMTYPE_WIREFRAME_TR .. autodata:: geosoft.gxapi.GIS_DMTYPE_DTM :annotation: .. autoattribute:: geosoft.gxapi.GIS_DMTYPE_DTM .. autodata:: geosoft.gxapi.GIS_DMTYPE_BLOCKMODEL :annotation: .. autoattribute:: geosoft.gxapi.GIS_DMTYPE_BLOCKMODEL .. autodata:: geosoft.gxapi.GIS_DMTYPE_WIREFRAME_PT :annotation: .. autoattribute:: geosoft.gxapi.GIS_DMTYPE_WIREFRAME_PT .. autodata:: geosoft.gxapi.GIS_DMTYPE_POINTDATA :annotation: .. autoattribute:: geosoft.gxapi.GIS_DMTYPE_POINTDATA <file_sep>/docs/GXTRANSFORMLAYER.rst .. _GXTRANSFORMLAYER: GXTRANSFORMLAYER class ================================== .. autoclass:: geosoft.gxapi.GXTRANSFORMLAYER :members: <file_sep>/docs/GXST2.rst .. _GXST2: GXST2 class ================================== .. autoclass:: geosoft.gxapi.GXST2 :members: .. _ST2_CORRELATION: ST2_CORRELATION constants ----------------------------------------------------------------------- Correlation style .. autodata:: geosoft.gxapi.ST2_CORR :annotation: .. autoattribute:: geosoft.gxapi.ST2_CORR .. autodata:: geosoft.gxapi.ST2_PCORR :annotation: .. autoattribute:: geosoft.gxapi.ST2_PCORR <file_sep>/examples/tutorial/Grids and Images/grid_convert_format_with_coordinate_system.py import geosoft.gxpy.gx as gx import geosoft.gxpy.grid as gxgrid # create context gxc = gx.GXpy() # open grid grid_surfer = gxgrid.Grid.open('elevation_surfer.grd(SRF;VER=V7)', mode=gxgrid.FILE_READWRITE) # define the coordinate system grid_surfer.coordinate_system = 'GDA94 / UTM zone 54S' # copy the grid to an ER Mapper format grid file grid_erm = gxgrid.Grid.copy(grid_surfer, 'elevation.ers(ERM)', overwrite=True) print(str(grid_erm.coordinate_system)) exit() <file_sep>/examples/tutorial/Geosoft Databases/modify_channel_data.py import geosoft.gxpy as gxpy import geosoft.gxpy.gdb as gxdb gxc = gxpy.gx.GXpy() # open the database, best practice is to use a 'with ...' construct with gxdb.Geosoft_gdb.open('mag_data_split') as gdb: # make a new channel for the output, duplicate properties of 'mag' channel new_mag_channel = gxdb.Channel.new(gdb, 'mag_base', dup='mag', replace=True) # work through each line for line in gdb.list_lines(): print ('processing line {}'.format(line)) # read data from the line. # The read_channel method returns the data as a numpy array, together with the fiducial mag_data, fid = gdb.read_channel(line, 'mag') # use simple numpy math to subtract 5000, then save to the new_mag_channel mag_data = mag_data - 5000 gdb.write_channel(line, new_mag_channel, mag_data, fid) exit() <file_sep>/geosoft/gxapi/GXDB.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXLST import GXLST ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXDB(gxapi_cy.WrapDB): """ GXDB class. The `GXDB <geosoft.gxapi.GXDB>` class is used to create, open and work with databases and database symbols. Database symbols are objects inside databases, such as lines, channels and blobs **Note:** The following defines are not used by any methods but are used by GX's: `DB_ACTIVITY_BLOB <geosoft.gxapi.DB_ACTIVITY_BLOB>` """ def __init__(self, handle=0): super(GXDB, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXDB <geosoft.gxapi.GXDB>` :returns: A null `GXDB <geosoft.gxapi.GXDB>` :rtype: GXDB """ return GXDB() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Channel def create_dup(self, file): """ This method makes a brand new database identical to the input Database in-size. The database is opened in ReadWrite Mode. :param file: Name of the Database File to Create :type file: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._create_dup(file.encode()) def create_dup_comp(self, file, level): """ This method makes a brand new database identical to the input Database in-size except it changes the compression. The database is opened in ReadWrite Mode. :param file: Name of the Database File to Create :param level: :ref:`DB_COMP` :type file: str :type level: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._create_dup_comp(file.encode(), level) def dup_symb_across(self, dbo, symb): """ Create a new Symbol by duplicating an existing symbol. exactly the same type but in output database. The symbol must not already exist in the output database. :param dbo: Database output :param symb: Symbol Handle to duplicate :type dbo: GXDB :type symb: int :returns: New Symbol Handle :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._dup_symb_across(dbo, symb) return ret_val def easy_maker_symb(self, symb, name, groups): """ Adds a Maker to the database symbol based on current GX :param symb: Symbol to create maker for :param name: Maker name, used in menu prompt :param groups: INI groups (terminate each with a ";") :type symb: int :type name: str :type groups: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._easy_maker_symb(symb, name.encode(), groups.encode()) def get_chan_str(self, line, chan, ind, str_val): """ Get individual elements in a channel. :param line: Line :param chan: Channel :param ind: Index :param str_val: String :type line: int :type chan: int :type ind: int :type str_val: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** These methods are slow and should only be used when performance is not an issue. """ str_val.value = self._get_chan_str(line, chan, ind, str_val.value.encode()) def get_chan_vv(self, line, chan, vv): """ Place the contents of a channel in a `GXVV <geosoft.gxapi.GXVV>`. :param line: Line :param chan: Channel :param vv: `GXVV <geosoft.gxapi.GXVV>` in which to place the data :type line: int :type chan: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If a `GXVA <geosoft.gxapi.GXVA>` channel is specified, then element [0] of this `GXVA <geosoft.gxapi.GXVA>` channel is used to populated the `GXVV <geosoft.gxapi.GXVV>`. .. seealso:: `GXVV <geosoft.gxapi.GXVV>` class. """ self._get_chan_vv(line, chan, vv) def get_chan_vv_expanded(self, line, chan, vv): """ Read a channel into a `GXVV <geosoft.gxapi.GXVV>`. If the channel is a `GXVA <geosoft.gxapi.GXVA>` channel it is treaded as a `GXVV <geosoft.gxapi.GXVV>` channel with multiple values per fid and the FID expation is set to the array size. :param line: Line :param chan: Channel :param vv: `GXVV <geosoft.gxapi.GXVV>` in which to place the data :type line: int :type chan: int :type vv: GXVV .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method is to be used in conjunction with the `GXVV.re_fid_vv <geosoft.gxapi.GXVV.re_fid_vv>` method that will honor the FID Expansion setting. .. seealso:: `GXVV <geosoft.gxapi.GXVV>` class. """ self._get_chan_vv_expanded(line, chan, vv) def get_ipj(self, ch, ipj): """ Get georeference information in an `GXIPJ <geosoft.gxapi.GXIPJ>`. :param ch: Symbol :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` to fill in :type ch: int :type ipj: GXIPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the channel does not have an `GXIPJ <geosoft.gxapi.GXIPJ>`, the `GXIPJ <geosoft.gxapi.GXIPJ>` that is returned will have an unknown projection. """ self._get_ipj(ch, ipj) def get_itr(self, ch, itr): """ Get `GXITR <geosoft.gxapi.GXITR>` for a channel. :param ch: Channel :param itr: `GXITR <geosoft.gxapi.GXITR>` to fill in :type ch: int :type itr: GXITR .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If a channel does not have an `GXITR <geosoft.gxapi.GXITR>`, `get_itr <geosoft.gxapi.GXDB.get_itr>` will not change the passed `GXITR <geosoft.gxapi.GXITR>`. Channel must be locked for READONLY or READWRITE. """ self._get_itr(ch, itr) def get_reg_symb(self, symb, reg): """ Get a `GXREG <geosoft.gxapi.GXREG>` object from a symbol :param symb: Symbol, `NULLSYMB <geosoft.gxapi.NULLSYMB>` for the database `GXREG <geosoft.gxapi.GXREG>` :param reg: `GXREG <geosoft.gxapi.GXREG>` to copy data into :type symb: int :type reg: GXREG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_reg_symb(symb, reg) def get_reg_symb_setting(self, symb, name, setting): """ Get a `GXREG <geosoft.gxapi.GXREG>` string setting from a symbol reg :param symb: Symbol, `NULLSYMB <geosoft.gxapi.NULLSYMB>` for the database `GXREG <geosoft.gxapi.GXREG>` :param name: `GXREG <geosoft.gxapi.GXREG>` entry name :param setting: Returned setting :type symb: int :type name: str :type setting: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The symbol `GXREG <geosoft.gxapi.GXREG>` is used to store a variety of attribute about the symbol. Following a conventionally used entries: UNITS - channel units CLASS - symbol class name (i.e. "Assay") _PJ_ipj - projection blob name _PJ_x - projection coordinate pair _PJ_y _PJ_name - projection GXF-style info _PJ_ellipsoid _PJ_projection _PJ_units _PJ_datum_transform This is a convenient but low-performance way to get/set `GXREG <geosoft.gxapi.GXREG>` settings. If performance is an issue, and more than one setting is to be Get and or Set, use the `GXREG <geosoft.gxapi.GXREG>` directly. """ setting.value = self._get_reg_symb_setting(symb, name.encode(), setting.value.encode()) def get_va_chan_vv(self, line, chan, vv, offset, items): """ Place the contents of a specific part of a channel in a `GXVV <geosoft.gxapi.GXVV>`. :param line: Line :param chan: Channel :param vv: `GXVV <geosoft.gxapi.GXVV>` in which to place the data :param offset: Offset :param items: Number to Write :type line: int :type chan: int :type vv: GXVV :type offset: int :type items: int .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If a `GXVA <geosoft.gxapi.GXVA>` channel is specified, then element [0] of this `GXVA <geosoft.gxapi.GXVA>` channel is used to populated the `GXVV <geosoft.gxapi.GXVV>`. .. seealso:: `GXVV <geosoft.gxapi.GXVV>` class. """ self._get_va_chan_vv(line, chan, vv, offset, items) def blobs_max(self): """ Gets Maximum Number of Blobs in the Database :returns: Maximum Number of Blobs in the Database :rtype: int .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._blobs_max() return ret_val def chans_max(self): """ Gets Maximum Number of Channels in the Database :returns: Maximum Number of Channels in the Database :rtype: int .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._chans_max() return ret_val def format_chan(self, chan, val, str_val): """ Format a real value based on a channel format. :param chan: Channel handle :param val: Value to format :param str_val: String :type chan: int :type val: float :type str_val: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the passed string is too short, the result will be "**". """ str_val.value = self._format_chan(chan, val, str_val.value.encode()) def get_chan_array_size(self, chan): """ This method Gets a channel's array size for a given channel handle. :param chan: Channel handle :type chan: int :returns: Channel type :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_chan_array_size(chan) return ret_val def get_chan_class(self, chan, cl): """ This method gets a channel's label :param chan: Channel handle :param cl: Returned class into :type chan: int :type cl: str_ref .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel label is stored in the "CLASS" parameter of the channel reg. If no class is defined, then an empty string is returned. The channel must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ cl.value = self._get_chan_class(chan, cl.value.encode()) def get_chan_decimal(self, chan): """ This method gets a channel's number of digits displayed to the right of the decimal point. :param chan: Channel handle :type chan: int :returns: Number of digits displayed to right of decimal :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ ret_val = self._get_chan_decimal(chan) return ret_val def get_chan_format(self, chan): """ This method Gets a channel's display format for a given channel handle. :param chan: Channel handle :type chan: int :returns: Channel display format :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The returned format is one of the :ref:`DB_CHAN_FORMAT`. The channel must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ ret_val = self._get_chan_format(chan) return ret_val def get_chan_int(self, line, chan, ind): """ Get individual elements in a channel. :param line: Line :param chan: Channel :param ind: Index :type line: int :type chan: int :type ind: int :returns: Value, or dummy if out of range. For settings, terminates if error. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** These methods are slow and should only be used when performance is not an issue. """ ret_val = self._get_chan_int(line, chan, ind) return ret_val def get_chan_label(self, chan, label): """ This method gets a channel's label :param chan: Channel handle :param label: Returned label into :type chan: int :type label: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel label is stored in the "LABEL" parameter of the channel reg. If the setting is empty, the channel name is returned. The channel must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ label.value = self._get_chan_label(chan, label.value.encode()) def get_chan_name(self, chan, name): """ This method Gets a channel's name for a given channel handle. :param chan: Channel handle :param name: String to place name into :type chan: int :type name: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ name.value = self._get_chan_name(chan, name.value.encode()) def get_chan_protect(self, chan): """ This method gets a channel's read-only protection status. :param chan: Channel handle :type chan: int :returns: :ref:`DB_CHAN_PROTECTION` :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ ret_val = self._get_chan_protect(chan) return ret_val def get_chan_type(self, chan): """ This method Gets a channel's type for a given channel handle. :param chan: Channel handle :type chan: int :returns: Channel type :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The type returned is one of the :ref:`DB_CATEGORY_CHAN`. Use the GS_SIMPLE_TYPE() macro to convert to INT,REAL or string types. The channel must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ ret_val = self._get_chan_type(chan) return ret_val def get_chan_unit(self, chan, unit): """ This method Gets a channel's unit :param chan: Channel handle :param unit: String to place unit into :type chan: int :type unit: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The unit label is stored in the "UNITS" parameter of the channel reg. The channel must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ unit.value = self._get_chan_unit(chan, unit.value.encode()) def get_chan_width(self, chan): """ This method gets a channel's display width for a given channel handle. :param chan: Channel handle :type chan: int :returns: Channel display width :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ ret_val = self._get_chan_width(chan) return ret_val def get_name(self, name, psz_name): """ Gets a name from the database. :param name: :ref:`DB_NAME` :param psz_name: Name returned :type name: int :type psz_name: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ psz_name.value = self._get_name(name, psz_name.value.encode()) def get_modification_count(self): """ Gets the modification count from the database. :rtype: int .. versionadded:: 2023.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A running count of the times a database has been modified between commits. Useful, for instance to be able to discard changes made by a GX on a database that are required for the operation of the GX but which the user does not want to persist, and to avoid the Save changes question when you go to change the database. So if the count is 0 when you start, it is safe to call Discard_DB on exit without worrying about throwing away changes made previously. """ ret_val = self._get_modification_count() return ret_val def get_reg_symb_setting_int(self, symb, name): """ Get an integer-valued `GXREG <geosoft.gxapi.GXREG>` setting from a symbol reg :param symb: Symbol, `NULLSYMB <geosoft.gxapi.NULLSYMB>` for the database `GXREG <geosoft.gxapi.GXREG>` :param name: `GXREG <geosoft.gxapi.GXREG>` entry name :type symb: int :type name: str :returns: The setting, or `iDUMMY <geosoft.gxapi.iDUMMY>` if not found or not convertable. :rtype: int .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Same as `get_reg_symb_setting <geosoft.gxapi.GXDB.get_reg_symb_setting>`, but converts the setting automatically to an integer value. This is a convenient but low-performance way to get/set `GXREG <geosoft.gxapi.GXREG>` settings. If performance is an issue, and more than one setting is to be Get and or Set, use the `GXREG <geosoft.gxapi.GXREG>` directly. """ ret_val = self._get_reg_symb_setting_int(symb, name.encode()) return ret_val def get_symb_name(self, symb, name): """ This method gets a symbol's name :param symb: Symbol handle :param name: String to place name into :type symb: int :type name: str_ref .. versionadded:: 6.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See GetChanName_DB for more information The channel must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ name.value = self._get_symb_name(symb, name.value.encode()) def have_itr(self, ch): """ Returns TRUE if channel has an `GXITR <geosoft.gxapi.GXITR>`. :param ch: Channel :type ch: int :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If a channel has an `GXITR <geosoft.gxapi.GXITR>`, the `GXITR <geosoft.gxapi.GXITR>` colors are used to display channel values in the spreadsheet. If a channel does not have an `GXITR <geosoft.gxapi.GXITR>`, `get_itr <geosoft.gxapi.GXDB.get_itr>` will not change the passed `GXITR <geosoft.gxapi.GXITR>`. """ ret_val = self._have_itr(ch) return ret_val def coord_pair(self, chan, pair): """ Get the matching coordinate pair of a channel. :param chan: Channel name :param pair: String in which to place paired channel name :type chan: str :type pair: str_ref :returns: :ref:`DB_COORDPAIR` :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the channel does not have a matching coordinate pair, or of the channel does not exist, the returned string will be empty. """ ret_val, pair.value = self._coord_pair(chan.encode(), pair.value.encode()) return ret_val def lines_max(self): """ Gets Maximum number of lines in the database :returns: Maximum number of lines in the database :rtype: int .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._lines_max() return ret_val def users_max(self): """ Gets Maximum number of Users :returns: Maximum number of Users :rtype: int .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._users_max() return ret_val def maker_symb(self, symb, prog, name, groups): """ Adds a Maker to the database symbol :param symb: Symbol to create maker for :param prog: Name of program :param name: Maker name, used in menu prompt :param groups: INI groups (terminate each with a ";") :type symb: int :type prog: str :type name: str :type groups: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._maker_symb(symb, prog.encode(), name.encode(), groups.encode()) def put_chan_vv(self, line, chan, vv): """ Place the contents of a `GXVV <geosoft.gxapi.GXVV>` in a channel. :param line: Line :param chan: Channel :param vv: `GXVV <geosoft.gxapi.GXVV>` from which to get the data :type line: int :type chan: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If a `GXVA <geosoft.gxapi.GXVA>` channel is specified, then element [0] of this `GXVA <geosoft.gxapi.GXVA>` channel will be populated with the `GXVV <geosoft.gxapi.GXVV>`. There is a limit of 2000 elements for non-licensed users. .. seealso:: `GXVV <geosoft.gxapi.GXVV>` class. """ self._put_chan_vv(line, chan, vv) def put_va_chan_vv(self, line, chan, vv, offset, items): """ Place the contents of a `GXVV <geosoft.gxapi.GXVV>` at a specific part of a channel. :param line: Line :param chan: Channel :param vv: `GXVV <geosoft.gxapi.GXVV>` from which to get the data :param offset: Offset :param items: Number to Write :type line: int :type chan: int :type vv: GXVV :type offset: int :type items: int .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If a `GXVA <geosoft.gxapi.GXVA>` channel is specified, then element [0] of this `GXVA <geosoft.gxapi.GXVA>` channel will be populated with the `GXVV <geosoft.gxapi.GXVV>`. .. seealso:: `GXVV <geosoft.gxapi.GXVV>` class. """ self._put_va_chan_vv(line, chan, vv, offset, items) def read_blob_bf(self, symb, bf): """ Read a blob from a database into a file. :param symb: Blob (`DB_SYMB_BLOB <geosoft.gxapi.DB_SYMB_BLOB>`) to read into `GXBF <geosoft.gxapi.GXBF>` from database :param bf: File to read blob from :type symb: int :type bf: GXBF .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._read_blob_bf(symb, bf) def get_chan_double(self, line, chan, ind): """ Get individual elements in a channel. :param line: Line :param chan: Channel :param ind: Index :type line: int :type chan: int :type ind: int :returns: Value, or dummy if out of range. For settings, terminates if error. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** These methods are slow and should only be used when performance is not an issue. """ ret_val = self._get_chan_double(line, chan, ind) return ret_val def get_reg_symb_setting_double(self, symb, name): """ Get a real-valued `GXREG <geosoft.gxapi.GXREG>` setting from a symbol reg :param symb: Symbol, `NULLSYMB <geosoft.gxapi.NULLSYMB>` for the database `GXREG <geosoft.gxapi.GXREG>` :param name: `GXREG <geosoft.gxapi.GXREG>` entry name :type symb: int :type name: str :returns: The setting, or `rDUMMY <geosoft.gxapi.rDUMMY>` if not found or not convertable. :rtype: float .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Same as `get_reg_symb_setting <geosoft.gxapi.GXDB.get_reg_symb_setting>`, but converts the setting automatically to a real value. This is a convenient but low-performance way to get/set `GXREG <geosoft.gxapi.GXREG>` settings. If performance is an issue, and more than one setting is to be Get and or Set, use the `GXREG <geosoft.gxapi.GXREG>` directly. """ ret_val = self._get_reg_symb_setting_double(symb, name.encode()) return ret_val def set_all_chan_protect(self, prot): """ This method sets all the channels' read-only protection status. :param prot: :ref:`DB_CHAN_PROTECTION` :type prot: int .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Value to set must be either `DB_CHAN_PROTECTED <geosoft.gxapi.DB_CHAN_PROTECTED>` or `DB_CHAN_UNPROTECTED <geosoft.gxapi.DB_CHAN_UNPROTECTED>` This method does its own channel locking/unlocking. Channels already lock `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` are ignored. """ self._set_all_chan_protect(prot) def set_chan_class(self, chan, cl): """ Set a channel class :param chan: Channel handle :param cl: Class :type chan: int :type cl: str .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel class is stored in the "CLASS" parameter of the channel reg. The channel must be locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ self._set_chan_class(chan, cl.encode()) def set_chan_decimal(self, chan, dec): """ This method sets a channel's number of digits displayed to the right of the decimal point. :param chan: Channel handle :param dec: Number of digits to display right of the decimal :type chan: int :type dec: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The number of display digits must be from 0 to 50. The channel must be locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ self._set_chan_decimal(chan, dec) def set_chan_format(self, chan, format): """ This method sets a channel's display format. :param chan: Channel handle :param format: :ref:`DB_CHAN_FORMAT` :type chan: int :type format: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ self._set_chan_format(chan, format) def set_chan_int(self, line, chan, ind, val): """ Set individual elements in a channel. :param line: Line :param chan: Channel :param ind: Index :param val: Value :type line: int :type chan: int :type ind: int :type val: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** These methods are slow and should only be used when performance is not an issue. """ self._set_chan_int(line, chan, ind, val) def set_chan_label(self, chan, label): """ Set a channel label :param chan: Channel handle :param label: Label :type chan: int :type label: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel label is stored in the "LABEL" parameter of the channel reg. The channel must be locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ self._set_chan_label(chan, label.encode()) def set_chan_name(self, chan, name): """ This method sets a channel's name. :param chan: Channel handle :param name: String to set channel name to :type chan: int :type name: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ self._set_chan_name(chan, name.encode()) def set_chan_protect(self, chan, prot): """ This method sets a channel's read-only protection status. :param chan: Channel handle :param prot: :ref:`DB_CHAN_PROTECTION` :type chan: int :type prot: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Value to set must be either `DB_CHAN_PROTECTED <geosoft.gxapi.DB_CHAN_PROTECTED>` or `DB_CHAN_UNPROTECTED <geosoft.gxapi.DB_CHAN_UNPROTECTED>` The channel must be locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ self._set_chan_protect(chan, prot) def set_chan_double(self, line, chan, ind, val): """ Set individual elements in a channel. :param line: Line :param chan: Channel :param ind: Index :param val: Value :type line: int :type chan: int :type ind: int :type val: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** These methods are slow and should only be used when performance is not an issue. """ self._set_chan_double(line, chan, ind, val) def set_chan_str(self, line, chan, ind, str_val): """ Set individual elements in a channel. :param line: Line :param chan: Channel :param ind: Index :param str_val: String :type line: int :type chan: int :type ind: int :type str_val: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** These methods are slow and should only be used when performance is not an issue. """ self._set_chan_str(line, chan, ind, str_val.encode()) def set_chan_unit(self, chan, unit): """ This method sets a channel's unit for a given channel handle. :param chan: Channel handle :param unit: String to put channel unit :type chan: int :type unit: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ self._set_chan_unit(chan, unit.encode()) def set_chan_width(self, chan, width): """ This method sets a channel's display width :param chan: Channel handle :param width: Display width :type chan: int :type width: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The number of display digits must be from 0 to 50. The channel must be locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ self._set_chan_width(chan, width) def set_ipj(self, ch1, ch2, ipj): """ Set an `GXIPJ <geosoft.gxapi.GXIPJ>` object into a pair of (X, Y) channels :param ch1: X channel :param ch2: Y channel :type ch1: int :type ch2: int :type ipj: GXIPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_ipj(ch1, ch2, ipj) def set_itr(self, ch, itr): """ Set `GXITR <geosoft.gxapi.GXITR>` for a channel. :param ch: Channel :param itr: `GXITR <geosoft.gxapi.GXITR>` to fill in :type ch: int :type itr: GXITR .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Use `ITR_NULL <geosoft.gxapi.ITR_NULL>` to clear the channel `GXITR <geosoft.gxapi.GXITR>`. Channel must be locked for READONLY or READWRITE. """ self._set_itr(ch, itr) def set_reg_symb(self, symb, reg): """ Set a `GXREG <geosoft.gxapi.GXREG>` object into a symbol :param symb: Symbol, `NULLSYMB <geosoft.gxapi.NULLSYMB>` for the database `GXREG <geosoft.gxapi.GXREG>` :param reg: `GXREG <geosoft.gxapi.GXREG>` to set into Blob :type symb: int :type reg: GXREG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_reg_symb(symb, reg) def set_reg_symb_setting(self, symb, name, setting): """ Set a `GXREG <geosoft.gxapi.GXREG>` string setting in a symbol reg :param symb: Symbol, `NULLSYMB <geosoft.gxapi.NULLSYMB>` for the database `GXREG <geosoft.gxapi.GXREG>` :param name: `GXREG <geosoft.gxapi.GXREG>` entry name :param setting: Setting :type symb: int :type name: str :type setting: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The symbol `GXREG <geosoft.gxapi.GXREG>` is used to store a variety of attribute about the symbol. Following a conventionally used entries: UNITS - channel units CLASS - symbol class name (i.e. "Assay") _PJ_ipj - projection blob name _PJ_x - projection coordinate pair _PJ_y _PJ_name - projection GXF-style info _PJ_ellipsoid _PJ_projection _PJ_units _PJ_datum_transform This is a convenient but low-performance way to get/set `GXREG <geosoft.gxapi.GXREG>` settings. If performance is an issue, and more than one setting is to be Get and or Set, use the `GXREG <geosoft.gxapi.GXREG>` directly. """ self._set_reg_symb_setting(symb, name.encode(), setting.encode()) def write_blob_bf(self, symb, bf): """ Write a blob from a file into a database. :param symb: Blob (`DB_SYMB_BLOB <geosoft.gxapi.DB_SYMB_BLOB>`) to write into database from file :param bf: File to write blob into :type symb: int :type bf: GXBF .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._write_blob_bf(symb, bf) # Control def commit(self): """ This method forces all changes to the database to be saved. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._commit() def compact(self): """ Removes any extra space from the database. This will reduce the database to its smallest size. .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._compact() @classmethod def create(cls, file, lines, chans, blobs, users, cache, super, password): """ This method makes a brand new database of the specified size. The database is opened in ReadWrite Mode. :param file: Name of the Database File to Create :param lines: Max Lines in the Database (200) :param chans: Max Channels in the Database (50) :param blobs: Max Blobs in the Database (Channels+Lines+20) :param users: Max Users in the Database (10) :param cache: Number of Erase Caches (100) :param super: Name of the Super User "SUPER" :param password: <PASSWORD> "" :type file: str :type lines: int :type chans: int :type blobs: int :type users: int :type cache: int :type super: str :type password: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapDB._create(GXContext._get_tls_geo(), file.encode(), lines, chans, blobs, users, cache, super.encode(), password.encode()) @classmethod def create_comp(cls, file, lines, chans, blobs, users, cache, super, password, page, level): """ This method makes a brand new database of the specified size. The database is opened in ReadWrite Mode. Also allows you to set paging size and the Compression Level. :param file: Name of the Database File to Create :param lines: Max Lines in the Database (200) :param chans: Max Channels in the Database (50) :param blobs: Max Blobs in the Database (Channels+Lines+20) :param users: Max Users in the Database (10) :param cache: Number of Erase Caches (100) :param super: Name of the Super User "SUPER" :param password: <PASSWORD> "" :param page: Page Size Must be (64,128,256,512,1024,2048,4096) normally 1024 :param level: :ref:`DB_COMP` :type file: str :type lines: int :type chans: int :type blobs: int :type users: int :type cache: int :type super: str :type password: str :type page: int :type level: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapDB._create_comp(GXContext._get_tls_geo(), file.encode(), lines, chans, blobs, users, cache, super.encode(), password.encode(), page, level) @classmethod def create_ex(cls, file, lines, chans, blobs, users, cache, super, password, page): """ This method makes a brand new database of the specified size. The database is opened in ReadWrite Mode. Also allows you to set paging size. :param file: Name of the Database File to Create :param lines: Max Lines in the Database (200) :param chans: Max Channels in the Database (50) :param blobs: Max Blobs in the Database (Channels+Lines+20) :param users: Max Users in the Database (10) :param cache: Number of Erase Caches (100) :param super: Name of the Super User "SUPER" :param password: <PASSWORD> "" :param page: Page Size Must be (64,128,256,512,1024,2048,4096) normally 1024 :type file: str :type lines: int :type chans: int :type blobs: int :type users: int :type cache: int :type super: str :type password: str :type page: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapDB._create_ex(GXContext._get_tls_geo(), file.encode(), lines, chans, blobs, users, cache, super.encode(), password.encode(), page) def del_line0(self): """ Delete Empty Line 0. .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A new database is created with a single, empty line L0, but many processes create databases then create their own lines, so the empty line L0 may remain after the process finishes. This function will delete a line L0 a) If it exists and is empty b) It is not the only line in the database. .. seealso:: `GXEDB.del_line0 <geosoft.gxapi.GXEDB.del_line0>` - deletes an empty line 0 from the currently edited database. """ self._del_line0() def discard(self): """ This method discards all changes made to the database since the last commit or opening. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._discard() @classmethod def grow(cls, file, lines, chans, blobs, users, cache): """ Enlarges the database. :param file: Name of the Database File to Create :param lines: Max Lines in the Database (200) :param chans: Max Channels in the Database (50) :param blobs: Max Blobs in the Database (Channels+Lines+20) :param users: Max Users in the Database (10) :param cache: Number of Erase Caches (100) :type file: str :type lines: int :type chans: int :type blobs: int :type users: int :type cache: int .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapDB._grow(GXContext._get_tls_geo(), file.encode(), lines, chans, blobs, users, cache) @classmethod def can_open(cls, file, user, password): """ This method checks whether it is possible to open a database. :param file: Name of the Database File to Open :param user: Name of the user ("SUPER" normally) :param password: <PASSWORD> ("" normally) :type file: str :type user: str :type password: str :rtype: bool .. versionadded:: 6.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method is useful to determine if another session already locked a database. By using this method before an `open <geosoft.gxapi.GXDB.open>` a GX may handle errors like this more gracefully. .. seealso:: `open <geosoft.gxapi.GXDB.open>`, `open_read_only <geosoft.gxapi.GXDB.open_read_only>`, `can_open_read_only <geosoft.gxapi.GXDB.can_open_read_only>` """ ret_val = gxapi_cy.WrapDB._can_open(GXContext._get_tls_geo(), file.encode(), user.encode(), password.encode()) return ret_val @classmethod def can_open_read_only(cls, file, user, password): """ This method checks whether it is possible to open a database in read-only mode. :param file: Name of the Database File to Open :param user: Name of the user ("SUPER" normally) :param password: Password of the user ("" normally) :type file: str :type user: str :type password: str :rtype: bool .. versionadded:: 6.4.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method is useful to determine if another session already locked a database. By using this method before an `open_read_only <geosoft.gxapi.GXDB.open_read_only>` a GX may handle errors like this more gracefully. .. seealso:: `open <geosoft.gxapi.GXDB.open>`, `open_read_only <geosoft.gxapi.GXDB.open_read_only>`, `can_open <geosoft.gxapi.GXDB.can_open>` """ ret_val = gxapi_cy.WrapDB._can_open_read_only(GXContext._get_tls_geo(), file.encode(), user.encode(), password.encode()) return ret_val def check(self): """ Does an integrity check of the data in the database to ensure it is valid. :returns: 0 - Ok 1 - Invalid Blocks in the Database :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._check() return ret_val def is_empty(self): """ See if a database contains only empty lines. :returns: 1 if the database contains only empty lines. :rtype: int .. versionadded:: 6.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This function does not check for other information or blobs, it merely looks at all lines in the database to see if they are empty. If all are empty, it returns 1. """ ret_val = self._is_empty() return ret_val def is_line_empty(self, symb): """ See if a specific line in the database is empty. :param symb: Line symbol :type symb: int :returns: 1 if the database contains only empty lines. :rtype: int .. versionadded:: 6.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_line_empty(symb) return ret_val @classmethod def open(cls, file, user, password): """ This method opens a database. :param file: Name of the Database File to Open :param user: Name of the user ("SUPER" normally) :param password: <PASSWORD> ("" normally) :type file: str :type user: str :type password: str :returns: `GXDB <geosoft.gxapi.GXDB>` Object :rtype: GXDB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `open_read_only <geosoft.gxapi.GXDB.open_read_only>`, `can_open <geosoft.gxapi.GXDB.can_open>`, `can_open_read_only <geosoft.gxapi.GXDB.can_open_read_only>` """ ret_val = gxapi_cy.WrapDB._open(GXContext._get_tls_geo(), file.encode(), user.encode(), password.encode()) return GXDB(ret_val) @classmethod def open_read_only(cls, file, user, password): """ This method opens a database. :param file: Name of the Database File to Open :param user: Name of the user ("SUPER" normally) :param password: <PASSWORD> of the user ("" normally) :type file: str :type user: str :type password: str :returns: `GXDB <geosoft.gxapi.GXDB>` Object :rtype: GXDB .. versionadded:: 6.4.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method is useful to open multiple reader instances on the same database. This call will fail if a `GXDB <geosoft.gxapi.GXDB>` has already been opened with `open <geosoft.gxapi.GXDB.open>` or locked in the application with `GXEDB.lock <geosoft.gxapi.GXEDB.lock>`. .. seealso:: `open <geosoft.gxapi.GXDB.open>`, `can_open <geosoft.gxapi.GXDB.can_open>`, `can_open_read_only <geosoft.gxapi.GXDB.can_open_read_only>` """ ret_val = gxapi_cy.WrapDB._open_read_only(GXContext._get_tls_geo(), file.encode(), user.encode(), password.encode()) return GXDB(ret_val) @classmethod def repair(cls, file): """ Cleans the database by removing invalid blocks :param file: Name of the Database File to Create :type file: str .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapDB._repair(GXContext._get_tls_geo(), file.encode()) def sync(self): """ Syncronize the Metadata from this database to the XML .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._sync() # Data def copy_data(self, line, i_chan, o_chan): """ This method copies the data from one channel to another on on the specified line. The data is converted if such conversion in neccessary. :param line: Line :param i_chan: Channel to Copy Data From :param o_chan: Channel to Copy Data To :type line: int :type i_chan: int :type o_chan: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** All the data in the destination channel is destroyed along with the fiducial start and increment. """ self._copy_data(line, i_chan, o_chan) def get_col_va(self, chan): """ Returns the # of columns in a `GXVA <geosoft.gxapi.GXVA>` channel. :param chan: Channel (read locked) :type chan: int :returns: # of columns 0 if error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the channel is `GXVV <geosoft.gxapi.GXVV>`, this function returns 1. """ ret_val = self._get_col_va(chan) return ret_val def get_channel_length(self, line, chan): """ Returns the # of elements in a channel. :param line: Line (read or write locked) :param chan: Channel (read or write locked) :type line: int :type chan: int :returns: # of elements :rtype: int .. versionadded:: 8.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Returns the actual number of data items (rows) in a channel. For `GXVA <geosoft.gxapi.GXVA>` channels no correction is necessary for the number of columns. """ ret_val = self._get_channel_length(line, chan) return ret_val def get_fid_incr(self, line, chan): """ This method returns the fiducial increment value of a specified Channel. :param line: Line (read or write locked) :param chan: Channel (read locked) :type line: int :type chan: int :returns: Fiducial Start. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_fid_incr(line, chan) return ret_val def get_fid_start(self, line, chan): """ This method returns the fiducial start value of a specified Channel. :param line: Line (read or write locked) :param chan: Channel (read locked) :type line: int :type chan: int :returns: Fiducial Start. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_fid_start(line, chan) return ret_val def set_fid(self, line, chan, start, incr): """ This method allows the user to set the fiducial start and increment of a channel. The Increment should never be 0. :param line: Line (read or write locked) :param chan: Channel to set fiducial (write locked) :param start: Start Fiducial Value :param incr: Increment Fiducial Value :type line: int :type chan: int :type start: float :type incr: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_fid(line, chan, start, incr) def window_va_ch(self, line, i_ch, o_ch, col_s, col_e): """ Copy a window of data in a channel into a new channel :param line: Line symbol :param i_ch: Original channel :param o_ch: Output channel :param col_s: Start column number to copy, 0 is first column :param col_e: End column number to copy :type line: int :type i_ch: int :type o_ch: int :type col_s: int :type col_e: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This function normally used for `GXVA <geosoft.gxapi.GXVA>` channels. A copy of the original channel will be made if start and end column numbers to copy are dummies. All the columns including start and end columns will be copied """ self._window_va_ch(line, i_ch, o_ch, col_s, col_e) def window_va_ch2(self, line, i_ch, o_ch, gvv): """ Copy a windowed version of data in a channel into a new channel :param line: Line symbol :param i_ch: Original channel :param o_ch: Output channel :param gvv: `GXVV <geosoft.gxapi.GXVV>` containing 0/1 values for each channel. :type line: int :type i_ch: int :type o_ch: int :type gvv: GXVV .. versionadded:: 5.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Similar to `window_va_ch <geosoft.gxapi.GXDB.window_va_ch>`, but the input and output channels must contain the same number of columns. The input `GXVV <geosoft.gxapi.GXVV>` tells which columns to copy over; 0 values indicate that the output column is to be dummied, and non-zero values indicate the column is to be copied. The `GXVV <geosoft.gxapi.GXVV>` length must be the same as the number of columns. """ self._window_va_ch2(line, i_ch, o_ch, gvv) # Line def set_line_selection(self, line, mode): """ Set the selection status for a line. :param line: Handle of line to select/deselect :param mode: :ref:`DB_LINE_SELECT` :type line: int :type mode: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_line_selection(line, mode) def get_line_selection(self, line): """ Get the selection status for a line. :param line: Line handle :type line: int :returns: One of :ref:`DB_LINE_SELECT` :rtype: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_line_selection(line) return ret_val def first_sel_line(self): """ This method will return a handle to the first selected line in the database. :returns: Line Handle (use iIsLineValid method to see if valid) :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._first_sel_line() return ret_val def get_line_map_fid(self, line, start, end): """ This method gets a line map clip fiducial. :param line: Line handle to look at :param start: Start Fid :param end: End Fid :type line: int :type start: float_ref :type end: float_ref .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ start.value, end.value = self._get_line_map_fid(line, start.value, end.value) def get_select(self): """ Gets the Line Selections. :returns: Selections Object. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_select() return ret_val def count_sel_lines(self): """ This method counts the number of selected lines in the database. :returns: x - Number of selected lines in the database :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._count_sel_lines() return ret_val @classmethod def is_chan_name(cls, chan): """ Is this a valid channel name? :param chan: Name to test :type chan: str :returns: 1 if it is a valid channel name 0 if it is not a valid channel name :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Channel names must only contain alpha-numeric characters or the underscore character "_", and the first letter must be a letter or an underscore. """ ret_val = gxapi_cy.WrapDB._is_chan_name(GXContext._get_tls_geo(), chan.encode()) return ret_val def is_chan_valid(self, chan): """ This method checks to see if the channel handle is a valid channel. :param chan: Channel handle to check :type chan: int :returns: 0 - Not a valid channel 1 - Valid :rtype: int .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_chan_valid(chan) return ret_val @classmethod def is_line_name(cls, line): """ Is this a valid line name. :param line: Name to test :type line: str :returns: 1 if it is a valid line name 0 if it is not a valid line name :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapDB._is_line_name(GXContext._get_tls_geo(), line.encode()) return ret_val def is_line_valid(self, line): """ This method checks to see if the line handle returned by the Line methods is a valid line. :param line: Line handle to check :type line: int :returns: 0 - Not a valid line 1 - Valid :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_line_valid(line) return ret_val def line_category(self, line): """ This method returns the category (group, line) of a line. :param line: Line handle to look at :type line: int :returns: :ref:`DB_CATEGORY_LINE` or `iDUMMY <geosoft.gxapi.iDUMMY>`. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ ret_val = self._line_category(line) return ret_val def line_flight(self, line): """ This method returns the flight number of a line. :param line: Line handle to look at :type line: int :returns: Line Flight Number. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ ret_val = self._line_flight(line) return ret_val def line_label(self, line, label, format): """ Create a line label :param line: Line symbol :param label: String in which to place label :param format: :ref:`DB_LINE_LABEL_FORMAT` :type line: int :type label: str_ref :type format: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Label formats. example full format is "L1023.4 13" type "L" number "1023" version "4" flight "13" formats can be added to get combined format Use LINK format to create a database link label. """ label.value = self._line_label(line, label.value.encode(), format) def line_number(self, line): """ This method returns the number of a line. :param line: Line handle to look at :type line: int :returns: Line Number. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The line must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ ret_val = self._line_number(line) return ret_val def line_number2(self, line, line_number): """ Returns the string form of the line number (can be alphanumeric) :param line: Line handle to look at :param line_number: Line number :type line: int :type line_number: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The line must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ line_number.value = self._line_number2(line, line_number.value.encode()) def line_type(self, line): """ This method returns the type of a line. :param line: Line handle to look at :type line: int :returns: :ref:`DB_LINE_TYPE` :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The line must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ ret_val = self._line_type(line) return ret_val def line_version(self, line): """ This method returns the version number of a line. :param line: Line handle to look at :type line: int :returns: Line Number. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The line must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ ret_val = self._line_version(line) return ret_val @classmethod def set_line_name(cls, num, type, ver, name): """ This method sets up a line name given the line's number, type, and version. :param num: Line number :param type: Line type :param ver: Line version :param name: String to set line name to :type num: int :type type: int :type ver: int :type name: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This MUST be called to generate a line name when calls are made to `exist_symb <geosoft.gxapi.GXDB.exist_symb>`, `create_symb <geosoft.gxapi.GXDB.create_symb>` or `delete_symb <geosoft.gxapi.GXDB.delete_symb>` for an operation on a line. See also SetLineName2_DB. """ name.value = gxapi_cy.WrapDB._set_line_name(GXContext._get_tls_geo(), num, type, ver, name.value.encode()) @classmethod def set_line_name2(cls, al_num, type, ver, name): """ Like SetLineName_DB, but can use alphanumeric for line number :param al_num: Line number (alphanumeric) :param type: Line type :param ver: Line version :param name: String to set line name to :type al_num: str :type type: int :type ver: int :type name: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This MUST be called to generate a line name when calls are made to `exist_symb <geosoft.gxapi.GXDB.exist_symb>`, `create_symb <geosoft.gxapi.GXDB.create_symb>` or `delete_symb <geosoft.gxapi.GXDB.delete_symb>` for an operation on a line. The line number can be any combination of letters and numbers, i.e. XU324, 98765, A, 23NGV etc. """ name.value = gxapi_cy.WrapDB._set_line_name2(GXContext._get_tls_geo(), al_num.encode(), type, ver, name.value.encode()) def rename_line(self, line, al_num, type, ver): """ Change the name for a line. :param line: Line handle of line to modify (READWRITE :param al_num: Line number (alphanumeric) :param type: Line type :param ver: Line version :type line: int :type al_num: str :type type: int :type ver: int .. versionadded:: 9.9 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The line must be locked :const:`DB_LOCK_READWRITE' Sets/resets the name of an existing line directly from line type, number, and version. The line number can be any combination of letters and numbers, i.e. XU324, 98765, A, 23NGV etc. Returns an error if the renamed line already exists. """ self._rename_line(line, al_num.encode(), type, ver) def load_select(self, file): """ Load selections to from a file. :param file: File Name :type file: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._load_select(file.encode()) def next_sel_line(self, prev): """ This method will advance to the next selected line based on the currently selected line handle. :param prev: Previous Line :type prev: int :returns: Line Handle (use iIsLineValid method to see if valid). :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._next_sel_line(prev) return ret_val def line_bearing(self, line): """ This method returns the bearing of a line. :param line: Line handle to look at :type line: int :returns: Bearing value, `rDUMMY <geosoft.gxapi.rDUMMY>` if not set. :rtype: float .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` This function simply returns a value set using the `set_line_bearing <geosoft.gxapi.GXDB.set_line_bearing>` function. It returns `rDUMMY <geosoft.gxapi.rDUMMY>` for line categories other than `DB_CATEGORY_LINE_FLIGHT <geosoft.gxapi.DB_CATEGORY_LINE_FLIGHT>`. To calculate the line azimuth based on the first and last non-dummy locations, use the `GXDU.direction <geosoft.gxapi.GXDU.direction>` function. .. seealso:: `set_line_bearing <geosoft.gxapi.GXDB.set_line_bearing>`, `GXDU.direction <geosoft.gxapi.GXDU.direction>` """ ret_val = self._line_bearing(line) return ret_val def line_date(self, line): """ This method returns the date of a line. :param line: Line handle to look at :type line: int :returns: Date value. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` or `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ ret_val = self._line_date(line) return ret_val def save_select(self, file): """ Saves current selections to a file. :param file: File Name :type file: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._save_select(file.encode()) def select(self, select, mode): """ Select/deselect lines based on selection string :param select: Selection :param mode: :ref:`DB_LINE_SELECT` :type select: str :type mode: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Selections/deselections are cumulative. If lines had already been selected, then any further selection/deselection will affect that set of selections. E.g. "L99:800" is the string to select all normal lines from 99 to 800. If `select <geosoft.gxapi.GXDB.select>` is called again to select "L1000", then lines 99 to 800 and 1000 would all be selected. Use a "T" prefix for Tie lines. Use an "F" prefix to specify lines of a specific flight. E.g. "F10" would select all lines of flight 10. Use an empty string ("") to select/deselect ALL lines. """ self._select(select.encode(), mode) def set_line_bearing(self, line, bearing): """ Sets a line's bearing. :param line: Line handle :param bearing: Value to set bearing to :type line: int :type bearing: float .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` This function simply sets a value in the line's metadata that is retrieved using the `line_bearing <geosoft.gxapi.GXDB.line_bearing>` function. It terminates for line categories other than `DB_CATEGORY_LINE_FLIGHT <geosoft.gxapi.DB_CATEGORY_LINE_FLIGHT>`. .. seealso:: `line_bearing <geosoft.gxapi.GXDB.line_bearing>`, `GXDU.direction <geosoft.gxapi.GXDU.direction>` """ self._set_line_bearing(line, bearing) def set_line_date(self, line, date): """ This method sets a line's date. :param line: Line handle :param date: Value to set date to :type line: int :type date: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ self._set_line_date(line, date) def set_line_flight(self, line, fl): """ This method sets a line's flight. :param line: Line handle :param fl: Value to set line flight to :type line: int :type fl: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ self._set_line_flight(line, fl) def set_line_map_fid(self, line, start, end): """ This method changes a line map clip fiducial. :param line: Line handle to look at :param start: Start Fid :param end: End Fid :type line: int :type start: float :type end: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** for full range, set Start Fid to `rMIN <geosoft.gxapi.rMIN>` and End Fid to `rMAX <geosoft.gxapi.rMAX>`. for no data, set Start and End Fids to `rDUMMY <geosoft.gxapi.rDUMMY>`. """ self._set_line_map_fid(line, start, end) def set_line_num(self, line, num): """ This method sets a line's number. :param line: Line handle :param num: Value to set line number to :type line: int :type num: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ self._set_line_num(line, num) def set_line_type(self, line, type): """ This method sets a line's type. :param line: Line handle :param type: :ref:`DB_LINE_TYPE` :type line: int :type type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ self._set_line_type(line, type) def set_line_ver(self, line, ver): """ This method sets a line's version. :param line: Line handle :param ver: Value to set line version to :type line: int :type ver: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The channel must be locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` """ self._set_line_ver(line, ver) def set_select(self, sel): """ Sets the Line Selections. :param sel: Selections :type sel: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method also destroys the DB_SELECT object. """ self._set_select(sel) # META def get_meta(self, meta): """ Get the metadata of a database. :param meta: Meta object to fill with database's meta :type meta: GXMETA .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_meta(meta) def set_meta(self, meta): """ Set the metadata of a database. :param meta: Meta object to add to database's meta :type meta: GXMETA .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_meta(meta) # Symbols @classmethod def create_symb_lst(cls): """ Create a `GXLST <geosoft.gxapi.GXLST>` object large enough to contain channel names and symbols numbers. :rtype: GXLST .. versionadded:: 9.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapDB._create_symb_lst(GXContext._get_tls_geo()) return GXLST(ret_val) def array_lst(self, lst): """ Load a `GXLST <geosoft.gxapi.GXLST>` object with array (`GXVA <geosoft.gxapi.GXVA>`) channel symbols. :param lst: List to Populate (construct with CreateSymbLST_DB) :type lst: GXLST .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._array_lst(lst) def array_size_lst(self, columns, lst): """ Load a `GXLST <geosoft.gxapi.GXLST>` object with array (`GXVA <geosoft.gxapi.GXVA>`) channel symbols with a particular number of columns. :param columns: Number of columns in array ( > 1 ) :param lst: List to Populate (construct with CreateSymbLST_DB) :type columns: int :type lst: GXLST .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._array_size_lst(columns, lst) def chan_lst(self, lst): """ Load a `GXLST <geosoft.gxapi.GXLST>` with database channels. :param lst: List to Populate (construct with CreateSymbLST_DB) :type lst: GXLST .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Populates a `GXLST <geosoft.gxapi.GXLST>` with channel symbols. The name is put into the "Name" part of the `GXLST <geosoft.gxapi.GXLST>` (0), and the handle, an integer value written as a string, is placed in the value part of the `GXLST <geosoft.gxapi.GXLST>` (1). Array channels are included, as well as virtual channels (array channel single columns loaded in the database like \\"Chan[1]\\". The `GXLST <geosoft.gxapi.GXLST>` is cleared first, and the items are sorted by name. """ self._chan_lst(lst) def normal_chan_lst(self, lst): """ Load a `GXLST <geosoft.gxapi.GXLST>` with non-array database channels. :param lst: List to Populate (construct with CreateSymbLST_DB) :type lst: GXLST .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Like `chan_lst <geosoft.gxapi.GXDB.chan_lst>`, but does not include array channels or virtual channels. """ self._normal_chan_lst(lst) def non_string_and_non_array_chan_lst(self, lst): """ Load a `GXLST <geosoft.gxapi.GXLST>` with non-string and non-array database channels. :param lst: List to Populate (construct with CreateSymbLST_DB) :type lst: GXLST .. versionadded:: 9.9 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Like `chan_lst <geosoft.gxapi.GXDB.chan_lst>`, but does not include array channels, virtual channels or string channels. """ self._non_string_and_non_array_chan_lst(lst) def class_chan_lst(self, lst, cl): """ Load a `GXLST <geosoft.gxapi.GXLST>` with channels in a particular class. :param lst: `GXLST <geosoft.gxapi.GXLST>` object to populate (construct with CreateSymbLST_DB) :param cl: CLASS name for the channel ("" for all) :type lst: GXLST :type cl: str .. versionadded:: 5.0.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The Name of the symbol is placed in the item name and the item value is set to the symbol handle. Only channels with the given class name are included, e.g. use "ASSAY" for assay channels in `GXCHIMERA <geosoft.gxapi.GXCHIMERA>`. The `GXLST <geosoft.gxapi.GXLST>` is cleared first, and the items are sorted by name. """ self._class_chan_lst(lst, cl.encode()) def class_group_lst(self, lst, cl): """ Load a `GXLST <geosoft.gxapi.GXLST>` with group lines in a particular class. :param lst: `GXLST <geosoft.gxapi.GXLST>` object to populate (construct with CreateSymbLST_DB) :param cl: CLASS name for the group ("" for all) :type lst: GXLST :type cl: str .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The Name of the symbol is placed in the item name and the item value is set to the symbol handle. Only group lines with the given class name are included, e.g. use "TARGETS" for UX-Detect Target groups. The `GXLST <geosoft.gxapi.GXLST>` is cleared first, and the items are sorted by name. """ self._class_group_lst(lst, cl.encode()) def create_symb(self, name, symb, owner, category): """ Create a new Symbol. :param name: Symbol Name :param symb: :ref:`DB_SYMB_TYPE` :param owner: :ref:`DB_OWN` :param category: :ref:`DB_CATEGORY_USER`, :ref:`DB_CATEGORY_LINE`, :ref:`DB_CATEGORY_CHAN`, :ref:`DB_CATEGORY_BLOB` :type name: str :type symb: int :type owner: int :type category: int :returns: DB_SYMB Object :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If symbol already exits, and it is the same type simply returns a handle to the existing symbol. This method simple calls `create_symb_ex <geosoft.gxapi.GXDB.create_symb_ex>` with the extra info set to 1. STRING-type channels: To create a string-type channel, enter a negative number for the channel category below. For instance, "-32" will create a string channel with 32 characters per item. BLOBS: Blobs (Binary Large Objects) can be used for storing miscellaneous data which does not fit well into any of the other various storage objects, such as a `GXREG <geosoft.gxapi.GXREG>`. Generally, one or more objects is serialized to a `GXBF <geosoft.gxapi.GXBF>` object, which is then written to the blob using the sWriteBlobBF_DB() function. Retrieval is done in the reverse order, using sWriteBlobBF_DB() first, then extracting the objects from the `GXBF <geosoft.gxapi.GXBF>` object. To avoid namespace problems, Geosoft reserves the following name prefixes: OE. (Core functions) GS. (Applications) CS. (Custom Solutions applications) Programmers should avoid using the above prefixes as the starting letters of their blob names to avoid any possible conflicts. """ ret_val = self._create_symb(name.encode(), symb, owner, category) return ret_val def create_symb_ex(self, name, symb, owner, category, extra): """ Create a new Symbol. :param name: Symbol Name :param symb: :ref:`DB_SYMB_TYPE` :param owner: :ref:`DB_OWN` :param category: :ref:`DB_CATEGORY_USER`, :ref:`DB_CATEGORY_LINE`, :ref:`DB_CATEGORY_CHAN`, :ref:`DB_CATEGORY_BLOB` :param extra: Extra info, which depends on :ref:`DB_SYMB_TYPE` `DB_SYMB_CHAN <geosoft.gxapi.DB_SYMB_CHAN>` - element width for a `GXVA <geosoft.gxapi.GXVA>` channel ignores for all other :ref:`DB_SYMB_TYPE` types :type name: str :type symb: int :type owner: int :type category: int :type extra: int :returns: DB_SYMB handle. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If symbol already exits it is returned. STRING-type channels: To create a string-type channel, enter a negative number for the channel category below. For instance, "-32" will create a string channel with 32 characters per item. Symbol name for `DB_CATEGORY_LINE_FLIGHT <geosoft.gxapi.DB_CATEGORY_LINE_FLIGHT>` must conform to the following line naming syntax: [type][number.version:flight] Type can be: L - normal line B - base line T - tie line R - trend line S - test line P - special line Examples: L100, T100.1:16 Note the "Flight" is any whole number that may be useful to differentiate processing tasks. The ability to create a `GXVA <geosoft.gxapi.GXVA>` channel is not available in the free interface and requires a Montaj license. """ ret_val = self._create_symb_ex(name.encode(), symb, owner, category, extra) return ret_val def csv_chan_lst(self, lst, channels): """ Load a `GXLST <geosoft.gxapi.GXLST>` with channels in a comma-separated list. :param lst: `GXLST <geosoft.gxapi.GXLST>` object to populate (construct with CreateSymbLST_DB) :param channels: Comma-separated list of channels :type lst: GXLST :type channels: str .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The Name of the symbol is placed in the item name and the item value is set to the symbol handle. Only channels in the list which are present in the database are included. The `GXLST <geosoft.gxapi.GXLST>` is cleared first. """ self._csv_chan_lst(lst, channels.encode()) def delete_symb(self, symb): """ This method destroys a symbol in the database and all the data associated with it. The symbol's lock is automatically removed. :param symb: Symbol to Delete (must be READWRITE locked) :type symb: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._delete_symb(symb) def dup_line_symb(self, symb, new_name): """ Duplicate a line symbol from a group or line symbol. The new name must not already exist in the database. :param symb: Symbol Handle to duplicate :param new_name: Name of the New Symbol :type symb: int :type new_name: str :returns: New Symbol Handle :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._dup_line_symb(symb, new_name.encode()) return ret_val def dup_symb(self, symb, new_name): """ New Symbol by duplicating an existing symbol, LOCKED :param symb: Symbol Handle to duplicate :param new_name: Name of the New Symbol :type symb: int :type new_name: str :returns: New Symbol Handle :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The symbol will be locked READWRITE. The new name must not already exist in the database. .. seealso:: `dup_symb_no_lock <geosoft.gxapi.GXDB.dup_symb_no_lock>` """ ret_val = self._dup_symb(symb, new_name.encode()) return ret_val def dup_symb_no_lock(self, symb, new_name): """ New Symbol by duplicating an existing symbol, NO LOCK. :param symb: Symbol Handle to duplicate :param new_name: Name of the New Symbol :type symb: int :type new_name: str :returns: New Symbol Handle :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The symbol will be NOT be locked. The new name must not already exist in the database. .. seealso:: `dup_symb <geosoft.gxapi.GXDB.dup_symb>` """ ret_val = self._dup_symb_no_lock(symb, new_name.encode()) return ret_val def find_chan(self, chan): """ Get handle to the specified channel. :param chan: Name of channel :type chan: str :returns: Channel Handle, `NULLSYMB <geosoft.gxapi.NULLSYMB>` if not defined :rtype: int .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** To work with a specific column from a `GXVA <geosoft.gxapi.GXVA>` channel, specify the `GXVA <geosoft.gxapi.GXVA>` element number in square brackets as part of the `GXVA <geosoft.gxapi.GXVA>` channel name (e.g. "EM[3]" will treat the fourth column of the `GXVA <geosoft.gxapi.GXVA>` channel as a `GXVV <geosoft.gxapi.GXVV>`). See notes for `find_symb <geosoft.gxapi.GXDB.find_symb>`. Introduced in v5.1.3. The new `find_chan <geosoft.gxapi.GXDB.find_chan>` searches using the exact channel name. """ ret_val = self._find_chan(chan.encode()) return ret_val def find_symb(self, symb, type): """ Get handle to the specified symbol. :param symb: Name of symbol :param type: :ref:`DB_SYMB_TYPE` :type symb: str :type type: int :returns: Symbol Handle, `NULLSYMB <geosoft.gxapi.NULLSYMB>` if not defined :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** To work with a specific column from a `GXVA <geosoft.gxapi.GXVA>` channel, specify the `GXVA <geosoft.gxapi.GXVA>` element number in square brackets as part of the `GXVA <geosoft.gxapi.GXVA>` channel name (e.g. "EM[3]" will treat the fourth column of the `GXVA <geosoft.gxapi.GXVA>` channel as a `GXVV <geosoft.gxapi.GXVV>`). For backward compatibility with GXs not employing the `get_xyz_chan_symb <geosoft.gxapi.GXDB.get_xyz_chan_symb>` function, the following behaviour has been introduced as of v5.1.3: (also true for "Y"). `find_symb <geosoft.gxapi.GXDB.find_symb>`(hDB, "X", `DB_SYMB_CHAN <geosoft.gxapi.DB_SYMB_CHAN>`) is now equivalent to: `get_xyz_chan_symb <geosoft.gxapi.GXDB.get_xyz_chan_symb>`(hDB, `DB_CHAN_X <geosoft.gxapi.DB_CHAN_X>`); In other words, the current X or Y is searched for, not necessarily the literal "X" or "Y". This ensures that newer databases, which might have "Easting" and "Northing" (or other similar names) instead of "X" and "Y" will still work with older GXs expecting "X" and "Y". The new `find_chan <geosoft.gxapi.GXDB.find_chan>` searches using the exact channel name. """ ret_val = self._find_symb(symb.encode(), type) return ret_val def get_chan_order_lst(self, lst): """ This method gets the channel display order for a database. The list will be stored in an `GXLST <geosoft.gxapi.GXLST>` object. In order to modify this displayed channels list, call `set_chan_order_lst <geosoft.gxapi.GXDB.set_chan_order_lst>` after. :param lst: `GXLST <geosoft.gxapi.GXLST>` object to populate (construct with CreateSymbLST_DB) :type lst: GXLST .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_chan_order_lst(lst) def get_xyz_chan_symb(self, chan): """ Searches for current X, Y or Z channel symbol :param chan: :ref:`DB_CHAN_SYMBOL` :type chan: int :returns: x - Symbol Handle `NULLSYMB <geosoft.gxapi.NULLSYMB>` - Symbol not found searches for the "current" X, Y or Z channel. If none is defined, then looks for "X", "Y" or "Z" channel If the channel is defined, but not present, returns `NULLSYMB <geosoft.gxapi.NULLSYMB>`. :rtype: int .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_xyz_chan_symb(chan) return ret_val def class_chan_list(self, vv, cl): """ Place a list of channels for a given class in a `GXVV <geosoft.gxapi.GXVV>`. :param vv: `GXVV <geosoft.gxapi.GXVV>` to populate, must be type INT. :param cl: Class name to match ("" for all) :type vv: GXVV :type cl: str :returns: Number of symbols. :rtype: int .. versionadded:: 5.0.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method generates a list of symbols in the database and places their handles into a `GXVV <geosoft.gxapi.GXVV>`. The list is not sorted. Only channels with the given class name are included, e.g. use "ASSAY" for assay channels used in `GXCHIMERA <geosoft.gxapi.GXCHIMERA>`. """ ret_val = self._class_chan_list(vv, cl.encode()) return ret_val def exist_chan(self, chan): """ See if the specified channel exists in the database. :param chan: Name of Channel :type chan: str :returns: 0 - Symbol does not exist in the database 1 - Symbol Exists :rtype: int .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See notes for `exist_symb <geosoft.gxapi.GXDB.exist_symb>`. Introduced in v5.1.3. `exist_chan <geosoft.gxapi.GXDB.exist_chan>` searches using the exact channel name. """ ret_val = self._exist_chan(chan.encode()) return ret_val def exist_symb(self, symb, type): """ This method checks to see if the specified symbol exists in the database. :param symb: Name of Symbol :param type: :ref:`DB_SYMB_TYPE` :type symb: str :type type: int :returns: 0 - Symbol does not exist in the database 1 - Symbol Exists :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** For backward compatibility with GXs not employing the GetXYZChan_DB function, the following behaviour has been introduced as of v5.1.3: (also true for "Y"). `exist_symb <geosoft.gxapi.GXDB.exist_symb>`(hDB, "X", `DB_SYMB_CHAN <geosoft.gxapi.DB_SYMB_CHAN>`) is now equivalent to: GetXYZChan_DB(hDB, `DB_CHAN_X <geosoft.gxapi.DB_CHAN_X>`, sXCh); `exist_symb <geosoft.gxapi.GXDB.exist_symb>`(hDB, sXCh, `DB_SYMB_CHAN <geosoft.gxapi.DB_SYMB_CHAN>`); In other words, the current X or Y is searched for, not necessarily the literal "X" or "Y". This ensures that newer databases, which might have "Easting" and "Northing" (or other similar names) instead of "X" and "Y" will still work with older GXs expecting "X" and "Y". The new `exist_chan <geosoft.gxapi.GXDB.exist_chan>` searches using the exact channel name. """ ret_val = self._exist_symb(symb.encode(), type) return ret_val def valid_symb(self, symb, type): """ This method checks to see if the specified symbol is a valid symbol in the database. :param symb: Symbol to check :param type: :ref:`DB_SYMB_TYPE` :type symb: int :type type: int :returns: 0 - Invalid symbol 1 - Symbol is valid :rtype: int .. versionadded:: 9.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._valid_symb(symb, type) return ret_val def get_symb_lock(self, symb): """ Determines if a symbol is locked :param symb: Symbol to Lock :type symb: int :returns: :ref:`DB_LOCK` :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_symb_lock(symb) return ret_val def get_xyz_chan(self, chan_symb, chan): """ Gets current X, Y or Z channel name :param chan_symb: :ref:`DB_CHAN_SYMBOL` :param chan: Returned name :type chan_symb: int :type chan: str_ref .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** searches for the "current" X, Y or Z channel. If none is defined, then returns "X", "Y" or "Z". """ chan.value = self._get_xyz_chan(chan_symb, chan.value.encode()) def symb_list(self, vv, symb): """ Place a list of symbols in a `GXVV <geosoft.gxapi.GXVV>`. :param vv: `GXVV <geosoft.gxapi.GXVV>` to populate, must be type INT. :param symb: :ref:`DB_SYMB_TYPE` :type vv: GXVV :type symb: int :returns: Number of symbols. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._symb_list(vv, symb) return ret_val def line_lst(self, lst): """ Load a `GXLST <geosoft.gxapi.GXLST>` with database lines. :param lst: List to Populate (construct with CreateSymbLST_DB) :type lst: GXLST .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Populates a `GXLST <geosoft.gxapi.GXLST>` with channel symbols. The name is put into the "Name" part of the `GXLST <geosoft.gxapi.GXLST>` (0), and the handle, an integer value written as a string, is placed in the value part of the `GXLST <geosoft.gxapi.GXLST>` (1). The `GXLST <geosoft.gxapi.GXLST>` is cleared first, and the items are sorted in logical line order. """ self._line_lst(lst) def lock_symb(self, symb, lock, wait): """ Locks a symbol for READONLY or READWRITE. :param symb: Symbol to Lock :param lock: :ref:`DB_LOCK` :param wait: :ref:`DB_WAIT` :type symb: int :type lock: int :type wait: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._lock_symb(symb, lock, wait) def mask_chan_lst(self, lst): """ Load a `GXLST <geosoft.gxapi.GXLST>` with mask channels. :param lst: `GXLST <geosoft.gxapi.GXLST>` object to populate :type lst: GXLST .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Loads a `GXLST <geosoft.gxapi.GXLST>` with all channels with CLASS "MASK", as well as all channels containing the string "MASK", as long as the CLASS for these channels is not set to something other than "" or "MASK". This function is a duplicate of the `GXCHIMERA.mask_chan_lst <geosoft.gxapi.GXCHIMERA.mask_chan_lst>` function, and can be used if `GXCHIMERA <geosoft.gxapi.GXCHIMERA>` is not installed. The `GXLST <geosoft.gxapi.GXLST>` is cleared first, and the items are sorted by name. "None" is added at the end, with a handle value of "-1". """ self._mask_chan_lst(lst) def selected_line_lst(self, lst): """ Load a `GXLST <geosoft.gxapi.GXLST>` with the selected lines. :param lst: List to Populate (construct with CreateSymbLST_DB) :type lst: GXLST .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method populates a `GXLST <geosoft.gxapi.GXLST>` object with all of the symbols of the selected lines in the database. The name is put into the "Name" part of the `GXLST <geosoft.gxapi.GXLST>` (0), and the handle, an integer value written as a string, is placed in the value part of the `GXLST <geosoft.gxapi.GXLST>` (1). Symbols are automatically sorted in logical line order. """ self._selected_line_lst(lst) def set_chan_order_lst(self, lst): """ This method sets the channel display order for a database. The list to modify will be stored in an `GXLST <geosoft.gxapi.GXLST>` object. Call `get_chan_order_lst <geosoft.gxapi.GXDB.get_chan_order_lst>` to populate the `GXLST <geosoft.gxapi.GXLST>`. :param lst: `GXLST <geosoft.gxapi.GXLST>` object to modify :type lst: GXLST .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_chan_order_lst(lst) def set_xyz_chan(self, chan_symb, chan): """ Sets current X, Y or Z channel name :param chan_symb: :ref:`DB_CHAN_SYMBOL` :param chan: Channel name :type chan_symb: int :type chan: str .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the value specified is "", the internally stored value is cleared, and GetXYZChan_DB will return "X", "Y" or "Z" This can be used, for instance, to make "Easting" and "Northing" the current X and Y channels, and have GXs using the `get_xyz_chan_symb <geosoft.gxapi.GXDB.get_xyz_chan_symb>` function to load "X" and "Y" work as desired. """ self._set_xyz_chan(chan_symb, chan.encode()) def string_chan_lst(self, lst): """ Load a `GXLST <geosoft.gxapi.GXLST>` with string-type channels. :param lst: `GXLST <geosoft.gxapi.GXLST>` object to populate :type lst: GXLST .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The Name of the symbol is placed in the item name and the item value is set to the symbol handle. Only channels with the string-type data (sChanType_DB < 0) are included. The `GXLST <geosoft.gxapi.GXLST>` is cleared first, and the items are sorted by name. """ self._string_chan_lst(lst) def symb_lst(self, lst, type): """ Populate a `GXLST <geosoft.gxapi.GXLST>` with database symbols. :param lst: List to Populate (construct with CreateSymbLST_DB) :param type: :ref:`DB_SYMB_TYPE` :type lst: GXLST :type type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Populates a `GXLST <geosoft.gxapi.GXLST>` with channel, line, blob or user symbols. The name is put into the "Name" part of the `GXLST <geosoft.gxapi.GXLST>` (0), and the handle, an integer value written as a string, is placed in the value part of the `GXLST <geosoft.gxapi.GXLST>` (1). Line symbols are automatically sorted in logical line order. NOTE: The `GXLST <geosoft.gxapi.GXLST>` is NOT cleared before being filled. If you want to clear the `GXLST <geosoft.gxapi.GXLST>` and get sorted values, use the `chan_lst <geosoft.gxapi.GXDB.chan_lst>` and `line_lst <geosoft.gxapi.GXDB.line_lst>` functions. .. seealso:: `chan_lst <geosoft.gxapi.GXDB.chan_lst>`, `line_lst <geosoft.gxapi.GXDB.line_lst>`, `selected_line_lst <geosoft.gxapi.GXDB.selected_line_lst>` """ self._symb_lst(lst, type) def un_lock_all_symb(self): """ UnLocks all symbols. .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._un_lock_all_symb() def un_lock_symb(self, symb): """ UnLocks a symbol. :param symb: Symbol to Lock :type symb: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._un_lock_symb(symb) # VA Channels def add_associated_load(self, group, chan): """ Add this channel to the auto-load feature of the group. :param group: Line :param chan: Channel :type group: int :type chan: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the channel is not yet associated, it is first associated. If the channel is already in the associated-load list, this does nothing. As of v6.0, the load-status of channels is no longer stored in the database, but in the project, so this function is equivalent to calling `associate <geosoft.gxapi.GXDB.associate>`. """ self._add_associated_load(group, chan) def add_comment(self, comment, str_val, indent): """ Add a comment with a string to the activity log of the database. :param comment: Comment :param str_val: String :param indent: Indent comment one tab? (TRUE or FALSE) :type comment: str :type str_val: str :type indent: int .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The comment is written in the form: Comment: String2 and is followed by a carriage return. The activity log is created automatically if it does not exist. """ self._add_comment(comment.encode(), str_val.encode(), indent) def add_int_comment(self, comment, val, indent): """ Add a comment with an integer to the activity log of the database. :param comment: Comment :param val: Value :param indent: Indent comment one tab? :type comment: str :type val: int :type indent: bool .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The comment is written in the form: Comment: Value and is followed by a carriage return. The activity log is created automatically if it does not exist. See Notes in `add_comment <geosoft.gxapi.GXDB.add_comment>`. """ self._add_int_comment(comment.encode(), val, indent) def add_double_comment(self, comment, val, indent): """ Add a comment with a float value to the activity log of the database. :param comment: Comment :param val: Value :param indent: Indent comment one tab? :type comment: str :type val: float :type indent: bool .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The comment is written in the form: Comment: Value and if followed by a carriage return. The activity log is created automatically if it does not exist. See Notes in `add_comment <geosoft.gxapi.GXDB.add_comment>`. """ self._add_double_comment(comment.encode(), val, indent) def add_time_comment(self, comment, indent): """ Add a comment with the date and time to the activity log of the database. :param comment: Comment :param indent: Indent comment one tab? :type comment: str :type indent: bool .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The comment is written in the form: Comment: 2001/12/31 23:59:59 and is followed by a carriage return. The activity log is created automatically if it does not exist. See Notes in `add_comment <geosoft.gxapi.GXDB.add_comment>`. """ self._add_time_comment(comment.encode(), indent) def associate(self, group, chan): """ Associate a channel with a group. :param group: Group line :param chan: Channel :type group: int :type chan: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If it is already associated, or if the group has no defined group class, does nothing. As of v6.3, if a group line has no class defined, then ALL channels are assumed to be associated with it. This means that you need to associate a new channel with a group only in those cases where the group class is defined. If this function is used on a group with a group class, then the channel is added to class's association list, and the channel will be recognized as being associated with all groups of that class. """ self._associate(group, chan) def associate_all(self, group): """ Associate all channels with a group. :param group: Group line :type group: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** As of v6.3, if a group line has no class defined, then ALL channels are already assumed to be associated with it, and this function does nothing. """ self._associate_all(group) def associate_class(self, chan, cl): """ Associate a channel with all groups of a specific class. :param chan: Channel :param cl: Class name of groups to associate the channel with. (Must be defined). :type chan: int :type cl: str .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** As of v6.3, if a group line has no class defined, then ALL channels are automatically assumed to be associated with it. """ self._associate_class(chan, cl.encode()) @classmethod def gen_valid_chan_symb(cls, str_in, str_out): """ Generate a valid channel name from a name candidate :param str_in: Input string :param str_out: Outout string :type str_in: str :type str_out: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ str_out.value = gxapi_cy.WrapDB._gen_valid_chan_symb(GXContext._get_tls_geo(), str_in.encode(), str_out.value.encode()) @classmethod def gen_valid_line_symb(cls, str_in, str_out): """ Generate a valid line symb name string from given string. :param str_in: Input string :param str_out: Outout string :type str_in: str :type str_out: str_ref .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The returned name is either the same size as the input or shorter. Escapes, leading and trailing spaces are removed, then all illegal characters are replaced with an underscore. """ str_out.value = gxapi_cy.WrapDB._gen_valid_line_symb(GXContext._get_tls_geo(), str_in.encode(), str_out.value.encode()) def get_chan_va(self, line, chan, va): """ Place the contents of a channel in a `GXVA <geosoft.gxapi.GXVA>`. :param line: Line :param chan: Channel :param va: `GXVA <geosoft.gxapi.GXVA>` in which to place the data :type line: int :type chan: int :type va: GXVA .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `GXVA <geosoft.gxapi.GXVA>` class. """ self._get_chan_va(line, chan, va) def get_va_scaling(self, ch, base, range): """ Get base and range for `GXVA <geosoft.gxapi.GXVA>` channel cell display. :param ch: Channel (Locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`) :param base: Base value (rDummy for none) :param range: Range value (rDummy for none) :type ch: int :type base: float_ref :type range: float_ref .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See `set_va_scaling <geosoft.gxapi.GXDB.set_va_scaling>`. """ base.value, range.value = self._get_va_scaling(ch, base.value, range.value) def get_va_windows(self, ch, min_w, max_w): """ Get the range of windows displayed for a `GXVA <geosoft.gxapi.GXVA>` channel. :param ch: Channel (Locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`) :param min_w: First window (0 to N-2, iDummy for default) :param max_w: Last window (1 to N-1, iDummy for default) :type ch: int :type min_w: int_ref :type max_w: int_ref .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See `set_va_windows <geosoft.gxapi.GXDB.set_va_windows>`. """ min_w.value, max_w.value = self._get_va_windows(ch, min_w.value, max_w.value) def set_va_base_coordinate_info(self, ch, domain, base, vv, units, allow_changes): """ Set the array channel base coordinate type, offset and values. :param ch: Channel (Locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`) :param domain: :ref:`DB_ARRAY_BASETYPE` :param base: Optional offset or base frequency :param vv: Values (one per array channel column) (REAL) :param units: Units :param allow_changes: Allow changes to existing values? :type ch: int :type domain: int :type base: float :type vv: GXVV :type units: str :type allow_changes: bool .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See `get_va_base_coordinate_info <geosoft.gxapi.GXDB.get_va_base_coordinate_info>`. """ self._set_va_base_coordinate_info(ch, domain, base, vv, units.encode(), allow_changes) def get_va_base_coordinate_info(self, ch, domain, base, vv, units): """ Set the array channel base coordinate type, offset and values. :param ch: Channel (Locked `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`) :param domain: :ref:`DB_ARRAY_BASETYPE` :param base: Optional offset or base frequency :param vv: Values (one per array channel column) (REAL) :param units: Units :type ch: int :type domain: int_ref :type base: float_ref :type vv: GXVV :type units: str_ref .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See `set_va_base_coordinate_info <geosoft.gxapi.GXDB.set_va_base_coordinate_info>`. """ domain.value, base.value, units.value = self._get_va_base_coordinate_info(ch, domain.value, base.value, vv, units.value.encode()) def get_group_class(self, symb, cl): """ Set the Class name for a group line. :param symb: Group line - `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` or `DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>` :param cl: Returned class name - max size = `DB_GROUP_CLASS_SIZE <geosoft.gxapi.DB_GROUP_CLASS_SIZE>` - 1 :type symb: int :type cl: str_ref .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method fails if the line is not a group line. Group classes are used to identify group lines used for special purposes, e.g.: "COLLAR" for the Wholeplot collar table, or "TARGETS" for the UX-Detect Targets list. .. seealso:: `line_category <geosoft.gxapi.GXDB.line_category>` - to see if a line is a group line. """ cl.value = self._get_group_class(symb, cl.value.encode()) def get_info(self, item): """ Get information about the database. :param item: :ref:`DB_INFO` :type item: int :returns: x - Return Value :rtype: int .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_info(item) return ret_val def get_va_prof_color_file(self, ch, file): """ Get colors for a `GXVA <geosoft.gxapi.GXVA>` channel when displayed in the profile window. :param ch: Channel (Locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`) :param file: Zone file name, "" to clear. :type ch: int :type file: str_ref .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See `set_va_prof_color_file <geosoft.gxapi.GXDB.set_va_prof_color_file>`. """ file.value = self._get_va_prof_color_file(ch, file.value.encode()) def get_va_prof_sect_option(self, ch, option): """ Get the display options of `GXVA <geosoft.gxapi.GXVA>` channels :param ch: Channel (Locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`) :param option: Option "Profile", "Section" or "Section and Profile" :type ch: int :type option: str_ref .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ option.value = self._get_va_prof_sect_option(ch, option.value.encode()) def get_va_sect_color_file(self, ch, file): """ Get colors for a `GXVA <geosoft.gxapi.GXVA>` channel when displayed section in the profile window. :param ch: Channel (Locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`) :param file: Zone file name :type ch: int :type file: str_ref .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Fails in the channel is not an array channel """ file.value = self._get_va_sect_color_file(ch, file.value.encode()) def is_associated(self, group, chan): """ Check to see if a channel is associated with group. :param group: Line :param chan: Channel :type group: int :type chan: int :returns: 0 if not a group line, or if the channel is not associated. As of v6.3, if a group line has no class defined, then ALL channels are automatically assumed to be associated with it. :rtype: int .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_associated(group, chan) return ret_val def is_wholeplot(self): """ Is this a Wholeplot database? :returns: 1 if it is a Wholeplot database 0 if it is not. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Currently checks to see if the DH_COLLAR line exists. """ ret_val = self._is_wholeplot() return ret_val def put_chan_va(self, line, chan, va): """ Place the contents of a `GXVA <geosoft.gxapi.GXVA>` in a channel. :param line: Line :param chan: Channel :param va: `GXVA <geosoft.gxapi.GXVA>` from which to get the data :type line: int :type chan: int :type va: GXVA .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `GXVA <geosoft.gxapi.GXVA>` class. """ self._put_chan_va(line, chan, va) def set_group_class(self, symb, cl): """ Set the Class name for a group line. :param symb: Group line - `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>` :param cl: `DB_GROUP_CLASS_SIZE <geosoft.gxapi.DB_GROUP_CLASS_SIZE>` :type symb: int :type cl: str .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method fails if the line is not a group line. Group classes are used to identify group lines used for special purposes. All group lines with the same class share the same list of associated channels. As of v6.3, if a group line has no class defined, then ALL channels are assumed to be associated with it. This means that a group class should only be defined when you wish to associate a subset of the available channels to group line. .. seealso:: `line_category <geosoft.gxapi.GXDB.line_category>` - to see if a line is a group line. `associate <geosoft.gxapi.GXDB.associate>` - Associate a channel with a group. """ self._set_group_class(symb, cl.encode()) def set_va_prof_color_file(self, ch, file): """ Set colors for a `GXVA <geosoft.gxapi.GXVA>` channel when displayed in the profile window. :param ch: Channel (Locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`) :param file: Zone file name, "" to clear. :type ch: int :type file: str .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Fails in the channel is not an array channel, if the file does not exist, or if it is not a valid color zone file. The individual columns in the array channel are displayed using the input zone file colors. A linear `GXITR <geosoft.gxapi.GXITR>` from 0 to 1 is created on the color zones to map to individual channel indices (expressed as a fraction as shown below). For instance, for a file with 8 colors the ranges are as follows: Color Range Color 1 0 > value >= 0.125 Color 2 0.125 > value >= 0.25 Color 3 0.25 > value >= 0.375 Color 4 0.375 > value >= 0.5 Color 5 0.5 > value >= 0.625 Color 6 0.625 > value >= 0.75 Color 7 0.75 > value >= 0.875 Color 8 0.875 > value >= 1.0 When an array channel is displayed, the index of each element (column) is mapped into the corresponding range above using the following formula: value = (column index) / (# of columns - 1) For an array with 8 columns, you get the following values: Column Value Color 0 0 1 1 0.14 2 2 0.28 3 3 0.43 4 4 0.57 5 5 0.71 6 6 0.86 7 7 1.0 8 The color file search path is: Local directory, then oasismontaj\\tbl. """ self._set_va_prof_color_file(ch, file.encode()) def set_va_prof_sect_option(self, ch, option): """ Set the display options of `GXVA <geosoft.gxapi.GXVA>` channels :param ch: Channel (Locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`) :param option: Option "Profile", "Section" or "Section and Profile" :type ch: int :type option: str .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_va_prof_sect_option(ch, option.encode()) def set_va_scaling(self, ch, base, range): """ Set base and range for `GXVA <geosoft.gxapi.GXVA>` channel cell display. :param ch: Channel (Locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`) :param base: Base value (rDummy for none) :param range: Range value (rDummy for none) :type ch: int :type base: float :type range: float .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** By default, `GXVA <geosoft.gxapi.GXVA>` profiles autoscale to fit in the database cell. This lets the user set a single base and range for all cells. If either input is a dummy, both are set as dummies, and autoscaling is used. """ self._set_va_scaling(ch, base, range) def set_va_sect_color_file(self, ch, file): """ Set colors for a `GXVA <geosoft.gxapi.GXVA>` channel when displayed section in the profile window. :param ch: Channel (Locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`) :param file: Zone file name :type ch: int :type file: str .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Fails in the channel is not an array channel, if the file does not exist, or if it is not a valid color zone file. """ self._set_va_sect_color_file(ch, file.encode()) def set_va_windows(self, ch, min_w, max_w): """ Set the range of windows to display for a `GXVA <geosoft.gxapi.GXVA>` channel. :param ch: Channel (Locked `DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`) :param min_w: First window (0 to N-1, iDummy for default) :param max_w: Last window (0 to N-1, iDummy for default) :type ch: int :type min_w: int :type max_w: int .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Use to display a subset of the `GXVA <geosoft.gxapi.GXVA>` channel windows in the GDB. Windows index from 0. """ self._set_va_windows(ch, min_w, max_w) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/om-extensions/hello_world.py # Oasis montaj Python extension to say Hello. # To run this extension, select "Settings / Run GX or Python...", # then browse to this script file. import geosoft.gxpy as gxpy # a python script must have a rungx() def rungx(): gxpy.utility.check_version('9.2') # Get the current gx context # This is normally not required but in this example we want the gid. with gxpy.gx.gx() as gxp: # say hello to the user identified by gxp.gid. gxpy.utility.display_message("GX Python", "Hello {}".format(gxp.gid)) <file_sep>/geosoft/gxapi/GXSBF.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXSBF(gxapi_cy.WrapSBF): """ GXSBF class. The `GXSBF <geosoft.gxapi.GXSBF>` class provides a means of storing data in a file-type directory structure within a workspace, database or map. Each of these three objects contains its own `GXSBF <geosoft.gxapi.GXSBF>` object, which may be accessed using the `h_get_sys <geosoft.gxapi.GXSBF.h_get_sys>`, `h_get_db <geosoft.gxapi.GXSBF.h_get_db>` and `h_get_map <geosoft.gxapi.GXSBF.h_get_map>` functions. To access data in a file, or create a new file in the `GXSBF <geosoft.gxapi.GXSBF>` object, call the CreatSBF_BF function (see `GXBF <geosoft.gxapi.GXBF>`), which will return a `GXBF <geosoft.gxapi.GXBF>` object to use. """ def __init__(self, handle=0): super(GXSBF, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXSBF <geosoft.gxapi.GXSBF>` :returns: A null `GXSBF <geosoft.gxapi.GXSBF>` :rtype: GXSBF """ return GXSBF() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def create(self, file, status): """ Create a child `GXSBF <geosoft.gxapi.GXSBF>` object inside an `GXSBF <geosoft.gxapi.GXSBF>`. :param file: Directory name to open / create :param status: :ref:`SBF_OPEN` :type file: str :type status: int :returns: `GXSBF <geosoft.gxapi.GXSBF>` object, terminates if fails. :rtype: GXSBF .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._create(file.encode(), status) return GXSBF(ret_val) def create_obj_list(self, lst, type): """ Fills an `GXLST <geosoft.gxapi.GXLST>` with embedded storage names of an `GXSBF <geosoft.gxapi.GXSBF>`. :param lst: `GXLST <geosoft.gxapi.GXLST>` handle :param type: :ref:`SBF_TYPE` :type lst: GXLST :type type: int .. versionadded:: 5.0.7 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Function will populate an `GXLST <geosoft.gxapi.GXLST>` object with embedded files (`SBF_TYPE_FILES <geosoft.gxapi.SBF_TYPE_FILES>`), directories (`SBF_TYPE_DIRS <geosoft.gxapi.SBF_TYPE_DIRS>`), or both (pass `SBF_TYPE_BOTH <geosoft.gxapi.SBF_TYPE_BOTH>`) in an `GXSBF <geosoft.gxapi.GXSBF>`. Along with the Name of the file or directory, a constant "dir" or "file" string is written to the `GXLST <geosoft.gxapi.GXLST>` also. """ self._create_obj_list(lst, type) def del_dir(self, dir): """ Delete a directory (storage) from this storage. :param dir: Dir/Storage Name :type dir: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._del_dir(dir.encode()) def del_file(self, file): """ Delete a file from this storage. :param file: File Name :type file: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._del_file(file.encode()) @classmethod def h_get_db(cls, db): """ Get the embedded file storage from a database. :param db: Database :type db: GXDB :returns: `GXSBF <geosoft.gxapi.GXSBF>` Object :rtype: GXSBF .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSBF._h_get_db(GXContext._get_tls_geo(), db) return GXSBF(ret_val) @classmethod def h_get_map(cls, map): """ Get the embedded file storage from a map. :param map: `GXMAP <geosoft.gxapi.GXMAP>` object :type map: GXMAP :returns: `GXSBF <geosoft.gxapi.GXSBF>` Object :rtype: GXSBF .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSBF._h_get_map(GXContext._get_tls_geo(), map) return GXSBF(ret_val) @classmethod def h_get_sys(cls): """ Get the main embedded file storage (in workspace). :returns: `GXSBF <geosoft.gxapi.GXSBF>` Object :rtype: GXSBF .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSBF._h_get_sys(GXContext._get_tls_geo()) return GXSBF(ret_val) def exist_dir(self, dir): """ Check to see if a directory (storage) exists inside this storage. :param dir: Dir/Storage Name :type dir: str :returns: 0 - Does not exist 1 - Exists :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._exist_dir(dir.encode()) return ret_val def exist_file(self, file): """ Check to see if a file exists inside this storage. :param file: File Name :type file: str :returns: 0 - Does not exist 1 - Exists :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._exist_file(file.encode()) return ret_val def save_log(self, dir, file, file_save, p5): """ Save an embedded file to an ASCII file. :param dir: Directory name in the Parent `GXSBF <geosoft.gxapi.GXSBF>` :param file: File name in the directory :param file_save: File to save as (as an ASCII file) :param p5: Append Mode: 0 - New file, 1 - Append file :type dir: str :type file: str :type file_save: str :type p5: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._save_log(dir.encode(), file.encode(), file_save.encode(), p5) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/tutorial/Geosoft modules - gxapi and gxpy/compression_ratio.py import geosoft.gxpy as gxpy import geosoft.gxapi as gxapi gxc = gxpy.gx.GXpy() grid = gxpy.grid.Grid.open('test.grd(GRD)') cr = grid.gximg.query_double(gxapi.IMG_QUERY_rCOMPRESSION_RATIO) print('compression ratio: {}'.format(cr))<file_sep>/docs/GXIMU.rst .. _GXIMU: GXIMU class ================================== .. autoclass:: geosoft.gxapi.GXIMU :members: .. _IMU_BOOL_OLAP: IMU_BOOL_OLAP constants ----------------------------------------------------------------------- Overlapping area option .. autodata:: geosoft.gxapi.IMU_BOOL_OLAP_AVE :annotation: .. autoattribute:: geosoft.gxapi.IMU_BOOL_OLAP_AVE .. autodata:: geosoft.gxapi.IMU_BOOL_OLAP_1 :annotation: .. autoattribute:: geosoft.gxapi.IMU_BOOL_OLAP_1 .. autodata:: geosoft.gxapi.IMU_BOOL_OLAP_2 :annotation: .. autoattribute:: geosoft.gxapi.IMU_BOOL_OLAP_2 .. autodata:: geosoft.gxapi.IMU_BOOL_OLAP_MINUS :annotation: .. autoattribute:: geosoft.gxapi.IMU_BOOL_OLAP_MINUS .. _IMU_BOOL_OPT: IMU_BOOL_OPT constants ----------------------------------------------------------------------- Boolean logic option .. autodata:: geosoft.gxapi.IMU_BOOL_OPT_AND :annotation: .. autoattribute:: geosoft.gxapi.IMU_BOOL_OPT_AND .. autodata:: geosoft.gxapi.IMU_BOOL_OPT_OR :annotation: .. autoattribute:: geosoft.gxapi.IMU_BOOL_OPT_OR .. autodata:: geosoft.gxapi.IMU_BOOL_OPT_XOR :annotation: .. autoattribute:: geosoft.gxapi.IMU_BOOL_OPT_XOR .. _IMU_BOOL_SIZING: IMU_BOOL_SIZING constants ----------------------------------------------------------------------- Sizing option .. autodata:: geosoft.gxapi.IMU_BOOL_SIZING_MIN :annotation: .. autoattribute:: geosoft.gxapi.IMU_BOOL_SIZING_MIN .. autodata:: geosoft.gxapi.IMU_BOOL_SIZING_0 :annotation: .. autoattribute:: geosoft.gxapi.IMU_BOOL_SIZING_0 .. autodata:: geosoft.gxapi.IMU_BOOL_SIZING_1 :annotation: .. autoattribute:: geosoft.gxapi.IMU_BOOL_SIZING_1 .. autodata:: geosoft.gxapi.IMU_BOOL_SIZING_MAX :annotation: .. autoattribute:: geosoft.gxapi.IMU_BOOL_SIZING_MAX .. _IMU_DOUBLE_CRC_BITS: IMU_DOUBLE_CRC_BITS constants ----------------------------------------------------------------------- Bits to use in double CRC's .. autodata:: geosoft.gxapi.IMU_DOUBLE_CRC_BITS_EXACT :annotation: .. autoattribute:: geosoft.gxapi.IMU_DOUBLE_CRC_BITS_EXACT .. autodata:: geosoft.gxapi.IMU_DOUBLE_CRC_BITS_DEFAULT :annotation: .. autoattribute:: geosoft.gxapi.IMU_DOUBLE_CRC_BITS_DEFAULT .. autodata:: geosoft.gxapi.IMU_DOUBLE_CRC_BITS_MAX :annotation: .. autoattribute:: geosoft.gxapi.IMU_DOUBLE_CRC_BITS_MAX .. _IMU_EXPAND_SHAPE: IMU_EXPAND_SHAPE constants ----------------------------------------------------------------------- Shape of output grid .. autodata:: geosoft.gxapi.IMU_EXPAND_SHAPE_RECTANGLE :annotation: .. autoattribute:: geosoft.gxapi.IMU_EXPAND_SHAPE_RECTANGLE .. autodata:: geosoft.gxapi.IMU_EXPAND_SHAPE_SQUARE :annotation: .. autoattribute:: geosoft.gxapi.IMU_EXPAND_SHAPE_SQUARE .. _IMU_FILL_ROLLOPT: IMU_FILL_ROLLOPT constants ----------------------------------------------------------------------- Defines for Grid Filling Method Options .. autodata:: geosoft.gxapi.IMU_FILL_ROLLOPT_LINEAR :annotation: .. autoattribute:: geosoft.gxapi.IMU_FILL_ROLLOPT_LINEAR .. autodata:: geosoft.gxapi.IMU_FILL_ROLLOPT_SQUARE :annotation: .. autoattribute:: geosoft.gxapi.IMU_FILL_ROLLOPT_SQUARE .. _IMU_FILT_DUMMY: IMU_FILT_DUMMY constants ----------------------------------------------------------------------- Settings for placing dummy values in grid if any of filter values are dummy .. autodata:: geosoft.gxapi.IMU_FILT_DUMMY_NO :annotation: .. autoattribute:: geosoft.gxapi.IMU_FILT_DUMMY_NO .. autodata:: geosoft.gxapi.IMU_FILT_DUMMY_YES :annotation: .. autoattribute:: geosoft.gxapi.IMU_FILT_DUMMY_YES .. _IMU_FILT_FILE: IMU_FILT_FILE constants ----------------------------------------------------------------------- Flags which indicate if a file is to be used to read the filter values .. autodata:: geosoft.gxapi.IMU_FILT_FILE_NO :annotation: .. autoattribute:: geosoft.gxapi.IMU_FILT_FILE_NO .. autodata:: geosoft.gxapi.IMU_FILT_FILE_YES :annotation: .. autoattribute:: geosoft.gxapi.IMU_FILT_FILE_YES .. _IMU_FILT_HZDRV: IMU_FILT_HZDRV constants ----------------------------------------------------------------------- Flags which indicate which type of horizontal derivative is being applied (X direction, Y direction, none at all) .. autodata:: geosoft.gxapi.IMU_FILT_HZDRV_NO :annotation: .. autoattribute:: geosoft.gxapi.IMU_FILT_HZDRV_NO .. autodata:: geosoft.gxapi.IMU_FILT_HZDRV_X :annotation: .. autoattribute:: geosoft.gxapi.IMU_FILT_HZDRV_X .. autodata:: geosoft.gxapi.IMU_FILT_HZDRV_Y :annotation: .. autoattribute:: geosoft.gxapi.IMU_FILT_HZDRV_Y .. _IMU_FLOAT_CRC_BITS: IMU_FLOAT_CRC_BITS constants ----------------------------------------------------------------------- Bits to use in float CRC's .. autodata:: geosoft.gxapi.IMU_FLOAT_CRC_BITS_EXACT :annotation: .. autoattribute:: geosoft.gxapi.IMU_FLOAT_CRC_BITS_EXACT .. autodata:: geosoft.gxapi.IMU_FLOAT_CRC_BITS_DEFAULT :annotation: .. autoattribute:: geosoft.gxapi.IMU_FLOAT_CRC_BITS_DEFAULT .. autodata:: geosoft.gxapi.IMU_FLOAT_CRC_BITS_MAX :annotation: .. autoattribute:: geosoft.gxapi.IMU_FLOAT_CRC_BITS_MAX .. _IMU_MASK: IMU_MASK constants ----------------------------------------------------------------------- Defined options for masking grids .. autodata:: geosoft.gxapi.IMU_MASK_INSIDE :annotation: .. autoattribute:: geosoft.gxapi.IMU_MASK_INSIDE .. autodata:: geosoft.gxapi.IMU_MASK_OUTSIDE :annotation: .. autoattribute:: geosoft.gxapi.IMU_MASK_OUTSIDE .. _IMU_STAT_FORCED: IMU_STAT_FORCED constants ----------------------------------------------------------------------- Defined options for forcing recalculating the grid values .. autodata:: geosoft.gxapi.IMU_STAT_FORCED_NO :annotation: .. autoattribute:: geosoft.gxapi.IMU_STAT_FORCED_NO .. autodata:: geosoft.gxapi.IMU_STAT_FORCED_YES :annotation: .. autoattribute:: geosoft.gxapi.IMU_STAT_FORCED_YES .. _IMU_TRANS: IMU_TRANS constants ----------------------------------------------------------------------- Transpose Options available for `grid_trns <geosoft.gxapi.GXIMU.grid_trns>` implies original grid lines: .. autodata:: geosoft.gxapi.IMU_TRANS_DEFAULT :annotation: .. autoattribute:: geosoft.gxapi.IMU_TRANS_DEFAULT .. autodata:: geosoft.gxapi.IMU_TRANS_Y :annotation: .. autoattribute:: geosoft.gxapi.IMU_TRANS_Y .. autodata:: geosoft.gxapi.IMU_TRANS_X :annotation: .. autoattribute:: geosoft.gxapi.IMU_TRANS_X .. _IMU_TREND: IMU_TREND constants ----------------------------------------------------------------------- Points in grid to use .. autodata:: geosoft.gxapi.IMU_TREND_ALL :annotation: .. autoattribute:: geosoft.gxapi.IMU_TREND_ALL .. autodata:: geosoft.gxapi.IMU_TREND_EDGE :annotation: .. autoattribute:: geosoft.gxapi.IMU_TREND_EDGE .. _IMU_WIND_COORD: IMU_WIND_COORD constants ----------------------------------------------------------------------- Output grid coordinate units .. autodata:: geosoft.gxapi.IMU_WIND_GRID :annotation: .. autoattribute:: geosoft.gxapi.IMU_WIND_GRID .. autodata:: geosoft.gxapi.IMU_WIND_GROUND :annotation: .. autoattribute:: geosoft.gxapi.IMU_WIND_GROUND .. _IMU_WIND_DUMMIES: IMU_WIND_DUMMIES constants ----------------------------------------------------------------------- Option for handling out-of-range Z values .. autodata:: geosoft.gxapi.IMU_WIND_DUMMY :annotation: .. autoattribute:: geosoft.gxapi.IMU_WIND_DUMMY .. autodata:: geosoft.gxapi.IMU_WIND_CLIP :annotation: .. autoattribute:: geosoft.gxapi.IMU_WIND_CLIP .. _IMU_XYZ_INDEX: IMU_XYZ_INDEX constants ----------------------------------------------------------------------- Flags whether to use grid index numbers as station numbers. .. autodata:: geosoft.gxapi.IMU_XYZ_INDEX_NO :annotation: .. autoattribute:: geosoft.gxapi.IMU_XYZ_INDEX_NO .. autodata:: geosoft.gxapi.IMU_XYZ_INDEX_YES :annotation: .. autoattribute:: geosoft.gxapi.IMU_XYZ_INDEX_YES .. _IMU_XYZ_LABEL: IMU_XYZ_LABEL constants ----------------------------------------------------------------------- XYZ Label Flags .. autodata:: geosoft.gxapi.IMU_XYZ_LABEL_NO :annotation: .. autoattribute:: geosoft.gxapi.IMU_XYZ_LABEL_NO .. autodata:: geosoft.gxapi.IMU_XYZ_LABEL_YES :annotation: .. autoattribute:: geosoft.gxapi.IMU_XYZ_LABEL_YES <file_sep>/docs/GXMXD.rst .. _GXMXD: GXMXD class ================================== .. autoclass:: geosoft.gxapi.GXMXD :members: <file_sep>/docs/GXLMSG.rst .. _GXLMSG: GXLMSG class ================================== .. autoclass:: geosoft.gxapi.GXLMSG :members: <file_sep>/docs/templates/geosoft.gxpy.rst .. _gxpy: gxpy modules reference ====================== The :mod:`geosoft.gxpy` modules provide a Python-oriented API that takes advantage of Python to simplify much of the complexity of the `low-level GX API <geosoft.gxapi.html>`__. Submodules ---------- The following sub-modules organize functions into logical groups. .. currentmodule:: geosoft.gxpy .. autosummary:: :toctree: {% for module in modules %} geosoft.gxpy.{{ module[0] }} {% endfor %} See https://github.com/GeosoftInc/gxpy/tree/master/examples for example scripts that use the Python modules. See https://github.com/GeosoftInc/gxpy/tree/master/geosoft/gxpy/tests for tests that exercise the gxpy modules. .. automodule:: geosoft.gxpy :members: :undoc-members: :show-inheritance: <file_sep>/geosoft/gxapi/GXVOX.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXPG import GXPG from .GXST import GXST ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXVOX(gxapi_cy.WrapVOX): """ GXVOX class. High Performance 3D Grid. Designed for accessing 3D grids quickly using slices. It designed arround non-uniform multi-resolution compressed storage. To sample a voxel at specific locations, use `GXVOXE <geosoft.gxapi.GXVOXE>`. """ def __init__(self, handle=0): super(GXVOX, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXVOX <geosoft.gxapi.GXVOX>` :returns: A null `GXVOX <geosoft.gxapi.GXVOX>` :rtype: GXVOX """ return GXVOX() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def calc_stats(self, st): """ Calculate Statistics :param st: `GXST <geosoft.gxapi.GXST>` Object :type st: GXST .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._calc_stats(st) @classmethod def create(cls, name): """ Create a handle to an `GXVOX <geosoft.gxapi.GXVOX>` object :param name: File Name :type name: str :returns: `GXVOX <geosoft.gxapi.GXVOX>` handle, terminates if creation fails :rtype: GXVOX .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapVOX._create(GXContext._get_tls_geo(), name.encode()) return GXVOX(ret_val) def create_pg(self): """ Create a 3D `GXPG <geosoft.gxapi.GXPG>` from a `GXVOX <geosoft.gxapi.GXVOX>` object :returns: `GXPG <geosoft.gxapi.GXPG>` Object :rtype: GXPG .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._create_pg() return GXPG(ret_val) def create_type_pg(self, type): """ Create a 3D `GXPG <geosoft.gxapi.GXPG>` from a `GXVOX <geosoft.gxapi.GXVOX>` object with a specific Type :param type: :ref:`GS_TYPES` :type type: int :returns: `GXPG <geosoft.gxapi.GXPG>` Object :rtype: GXPG .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._create_type_pg(type) return GXPG(ret_val) def dump(self, name): """ Export all layers of this `GXVOX <geosoft.gxapi.GXVOX>` in all directions. :param name: Name of grids (each layers adds _Dir_Z to the name) :type name: str .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._dump(name.encode()) def export_img(self, name, dir): """ Export all layers of this `GXVOX <geosoft.gxapi.GXVOX>` into grid files. :param name: Name of grids (each layers adds _Number to the name) :param dir: :ref:`VOX_DIR` :type name: str :type dir: int .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._export_img(name.encode(), dir) def export_to_grids(self, name, dir, start, incr, num, cell_size, interp): """ Export all layers of this `GXVOX <geosoft.gxapi.GXVOX>` into grid files, with optional cell size. :param name: Name of grids (each layers adds _Number to the name) :param dir: :ref:`VOX_DIR` :param start: Starting index :param incr: Increment in index :param num: Total number of grids (-1 or `iDUMMY <geosoft.gxapi.iDUMMY>` for all) :param cell_size: Cell size (can be `GS_R8DM <geosoft.gxapi.GS_R8DM>`) :param interp: :ref:`VOX_SLICE_MODE` :type name: str :type dir: int :type start: int :type incr: int :type num: int :type cell_size: float :type interp: int .. versionadded:: 7.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the cell size is not specified, then: 1. If the cell sizes are uniform in a given direction, that size is used 2. If the cell sizes are variable in a given direction, then the smallest size is used """ self._export_to_grids(name.encode(), dir, start, incr, num, cell_size, interp) @classmethod def export_xml(cls, voxel, crc, file): """ Export a `GXVOX <geosoft.gxapi.GXVOX>` to a compressed XML file :param voxel: Voxel file name :param crc: CRC returned - not implemented - always returns 0. :param file: Output XML file :type voxel: str :type crc: int_ref :type file: str .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ crc.value = gxapi_cy.WrapVOX._export_xml(GXContext._get_tls_geo(), voxel.encode(), crc.value, file.encode()) def export_seg_y(self, output_segy_filename, sample_interval): """ Export a voxel to a depth SEG-Y file :param output_segy_filename: SEG-Y filename to create :param sample_interval: Sampling interval (can be `GS_R8DM <geosoft.gxapi.GS_R8DM>` if input voxel has constant Z cell size) :type output_segy_filename: str :type sample_interval: float .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._export_seg_y(output_segy_filename.encode(), sample_interval) @classmethod def export_ji_gs_xml(cls, voxel, file): """ Export a `GXVOX <geosoft.gxapi.GXVOX>` to a compressed XML file. Verbose version. :param voxel: Voxel file name :param file: Output XML file :type voxel: str :type file: str .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapVOX._export_ji_gs_xml(GXContext._get_tls_geo(), voxel.encode(), file.encode()) def export_xyz(self, xyz, dir, rev_x, rev_y, rev_z, dummies): """ Export a Voxel to an XYZ File :param xyz: File Name :param dir: :ref:`VOX_DIRECTION` :param rev_x: Reverse X ? (0/1) :param rev_y: Reverse Y ? (0/1) :param rev_z: Reverse Z ? (0/1) :param dummies: Write Dummies? (0/1) :type xyz: str :type dir: int :type rev_x: int :type rev_y: int :type rev_z: int :type dummies: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._export_xyz(xyz.encode(), dir, rev_x, rev_y, rev_z, dummies) def filter(self, filter, filter_file, n_passes, interpolate_dummies, output_vox): """ Apply a 3D filter to a voxel. :param filter: :ref:`VOX_FILTER3D` :param filter_file: Filter file, if filter is `VOX_FILTER3D_FILE <geosoft.gxapi.VOX_FILTER3D_FILE>` :param n_passes: Number of filter passes :param interpolate_dummies: (1: interpolate dummies) :param output_vox: Output voxel file name. :type filter: int :type filter_file: str :type n_passes: int :type interpolate_dummies: int :type output_vox: str .. versionadded:: 7.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._filter(filter, filter_file.encode(), n_passes, interpolate_dummies, output_vox.encode()) @classmethod def generate_db(cls, voxel_file, db, symb): """ Generate a `GXVOX <geosoft.gxapi.GXVOX>` from a Database :param voxel_file: Voxel Name :param db: `GXDB <geosoft.gxapi.GXDB>` To import from :param symb: Symbol to import data from :type voxel_file: str :type db: GXDB :type symb: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapVOX._generate_db(GXContext._get_tls_geo(), voxel_file.encode(), db, symb) @classmethod def generate_vector_voxel_from_db(cls, voxel_file, db, type, symb_x, symb_y, symb_z, inc, dec): """ Generate a vector voxel `GXVOX <geosoft.gxapi.GXVOX>` from a Database :param voxel_file: Voxel Name :param db: `GXDB <geosoft.gxapi.GXDB>` To import from :param type: VOX_VECTORVOX_IMPORTImport XYZ, UVW or Amplitude/Inclination/Declination channels :param symb_x: Symbol to import X, U or Amplitude data from :param symb_y: Symbol to import Y, V or Inclination data from :param symb_z: Symbol to import Z, W or Declination data from :param inc: Inclination value for `VOX_VECTORVOX_UVW <geosoft.gxapi.VOX_VECTORVOX_UVW>` (-90° to 90°) :param dec: Declination value for `VOX_VECTORVOX_UVW <geosoft.gxapi.VOX_VECTORVOX_UVW>` (-180° to 180°) :type voxel_file: str :type db: GXDB :type type: int :type symb_x: int :type symb_y: int :type symb_z: int :type inc: float :type dec: float .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapVOX._generate_vector_voxel_from_db(GXContext._get_tls_geo(), voxel_file.encode(), db, type, symb_x, symb_y, symb_z, inc, dec) @classmethod def generate_pg(cls, name, pg, ox, oy, oz, cx, cy, cz, ipj, meta): """ Generate a `GXVOX <geosoft.gxapi.GXVOX>` from a 3D Pager :param name: Name of output `GXVOX <geosoft.gxapi.GXVOX>` :param pg: Pager with the Voxel Data :param ox: Origin X :param oy: Origin Y :param oz: Origin Z :param cx: Cell Size X :param cy: Cell Size Y :param cz: Cell Size Z :param ipj: Projection :param meta: Metadata :type name: str :type pg: GXPG :type ox: float :type oy: float :type oz: float :type cx: float :type cy: float :type cz: float :type ipj: GXIPJ :type meta: GXMETA :returns: `GXVOX <geosoft.gxapi.GXVOX>` Object :rtype: GXVOX .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapVOX._generate_pg(GXContext._get_tls_geo(), name.encode(), pg, ox, oy, oz, cx, cy, cz, ipj, meta) return GXVOX(ret_val) @classmethod def generate_pgvv(cls, name, pg, ox, oy, oz, cx, cy, cz, ipj, meta): """ Generate a `GXVOX <geosoft.gxapi.GXVOX>` from a 3D Pager, cells sizes passed in VVs. :param name: Name of output `GXVOX <geosoft.gxapi.GXVOX>` :param pg: Pager with the Voxel Data :param ox: Origin X :param oy: Origin Y :param oz: Origin Z :param cx: Cell Sizes X :param cy: Cell Sizes Y :param cz: Cell Sizes Z :param ipj: Projection :param meta: Metadata :type name: str :type pg: GXPG :type ox: float :type oy: float :type oz: float :type cx: GXVV :type cy: GXVV :type cz: GXVV :type ipj: GXIPJ :type meta: GXMETA :returns: `GXVOX <geosoft.gxapi.GXVOX>` Object :rtype: GXVOX .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The input cell size VVs' lengths must match the input `GXPG <geosoft.gxapi.GXPG>` dimensions. """ ret_val = gxapi_cy.WrapVOX._generate_pgvv(GXContext._get_tls_geo(), name.encode(), pg, ox, oy, oz, cx, cy, cz, ipj, meta) return GXVOX(ret_val) @classmethod def init_generate_by_subset_pg(cls, data_type, nx, ny, nz): """ Initialize the generate of a `GXVOX <geosoft.gxapi.GXVOX>` from a series of 3D subset pagers :param data_type: :ref:`GS_TYPES` :param nx: Points in X :param ny: Points in Y :param nz: Points in Z :type data_type: int :type nx: int :type ny: int :type nz: int :returns: `GXVOX <geosoft.gxapi.GXVOX>` Object :rtype: GXVOX .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Call `init_generate_by_subset_pg <geosoft.gxapi.GXVOX.init_generate_by_subset_pg>` first, then add a series of subset PGs using `add_generate_by_subset_pg <geosoft.gxapi.GXVOX.add_generate_by_subset_pg>`, and finally serialize using `end_generate_by_subset_pg <geosoft.gxapi.GXVOX.end_generate_by_subset_pg>` """ ret_val = gxapi_cy.WrapVOX._init_generate_by_subset_pg(GXContext._get_tls_geo(), data_type, nx, ny, nz) return GXVOX(ret_val) def add_generate_by_subset_pg(self, pg, dir, offset): """ Add a subset 3D pagers. These should be "slabs", 16 wide in the input direction, and the size of the full voxel in the other two directions. :param pg: Subset pager with the Voxel Data :param dir: Subset orientation - the "16" (thin) dimension is in the other axis.:ref:`VOX_DIR` :param offset: Offset of the subset `GXPG <geosoft.gxapi.GXPG>` corner, along the "thin" dimension. :type pg: GXPG :type dir: int :type offset: int .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See `init_generate_by_subset_pg <geosoft.gxapi.GXVOX.init_generate_by_subset_pg>` and `end_generate_by_subset_pg <geosoft.gxapi.GXVOX.end_generate_by_subset_pg>`. """ self._add_generate_by_subset_pg(pg, dir, offset) def end_generate_by_subset_pg(self, name, ox, oy, oz, cx, cy, cz, ipj, meta): """ Output the voxel, after adding all the subset PGs. :param name: Name of output `GXVOX <geosoft.gxapi.GXVOX>` :param ox: Origin X :param oy: Origin Y :param oz: Origin Z :param cx: Cell Size X :param cy: Cell Size Y :param cz: Cell Size Z :param ipj: Projection :param meta: Metadata :type name: str :type ox: float :type oy: float :type oz: float :type cx: float :type cy: float :type cz: float :type ipj: GXIPJ :type meta: GXMETA .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** You must begin by calling `init_generate_by_subset_pg <geosoft.gxapi.GXVOX.init_generate_by_subset_pg>` and add data using `add_generate_by_subset_pg <geosoft.gxapi.GXVOX.add_generate_by_subset_pg>`. """ self._end_generate_by_subset_pg(name.encode(), ox, oy, oz, cx, cy, cz, ipj, meta) def get_area(self, min_x, min_y, min_z, max_x, max_y, max_z): """ Get the area of the voxel. :param min_x: Min X :param min_y: Min Y :param min_z: Min Z :param max_x: Max X :param max_y: Max Y :param max_z: Max Z :type min_x: float_ref :type min_y: float_ref :type min_z: float_ref :type max_x: float_ref :type max_y: float_ref :type max_z: float_ref .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value = self._get_area(min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value) def get_gocad_location(self, origin_x, origin_y, origin_z, vect_xx, vect_xy, vect_xz, vect_yx, vect_yy, vect_yz, vect_zx, vect_zy, vect_zz): """ Get the location of a voxel with origin and scaled xyz vectors for use with GOCAD. :param origin_x: Origin X :param origin_y: Origin Y :param origin_z: Origin Z :param vect_xx: VectX X :param vect_xy: VectX Y :param vect_xz: VectX Z :param vect_yx: VectY X :param vect_yy: VectY Y :param vect_yz: VectY Z :param vect_zx: VectZ X :param vect_zy: VectZ Y :param vect_zz: VectZ Z :type origin_x: float_ref :type origin_y: float_ref :type origin_z: float_ref :type vect_xx: float_ref :type vect_xy: float_ref :type vect_xz: float_ref :type vect_yx: float_ref :type vect_yy: float_ref :type vect_yz: float_ref :type vect_zx: float_ref :type vect_zy: float_ref :type vect_zz: float_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is used for GOCAD voxel calculations, and begins with the origin at (0,0,0), not the actual location of the corner point. """ origin_x.value, origin_y.value, origin_z.value, vect_xx.value, vect_xy.value, vect_xz.value, vect_yx.value, vect_yy.value, vect_yz.value, vect_zx.value, vect_zy.value, vect_zz.value = self._get_gocad_location(origin_x.value, origin_y.value, origin_z.value, vect_xx.value, vect_xy.value, vect_xz.value, vect_yx.value, vect_yy.value, vect_yz.value, vect_zx.value, vect_zy.value, vect_zz.value) def get_grid_section_cell_sizes(self, az, cell_size_x, cell_size_y): """ Get default cell sizes in X and Y for a section grid. :param az: Input section azimuth (degrees CCW from North) :param cell_size_x: Returned X cell size (horizontal) in m :param cell_size_y: Returned Y cell size (vertical) in m :type az: float :type cell_size_x: float_ref :type cell_size_y: float_ref .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This function determines default cell sizes for a vertical grid slicing a voxel. It tries to match the "X" and "Y" sizes (in the grid coordinates) with the projection of the voxel's cells onto the grid plane. It uses a few simple rules: If the voxel is rotated about a horizontal axis (i.e. if its own "Z" axis is not vertical, then both cell sizes are set to the smallest voxel dimension (a single volume pixel) in X, Y and Z. If the voxel is "horizontal", then the angle between the section azimuth and the voxel's own X and Y axes is used to calculate a value which varies between the minimum X size and the minimum Y size, and this is used for the grid's "X" cell size. (in other words, if the section is parallel to the voxel "X" axis, then the returned "X" cells size is equal to the voxel's minimum "Y" cell size. The grid's "Y" cell size is set to the voxel's minimum "Z" cell size. """ cell_size_x.value, cell_size_y.value = self._get_grid_section_cell_sizes(az, cell_size_x.value, cell_size_y.value) def get_info(self, type, array, x, y, z): """ Get information about a voxel. :param type: Data Type :param array: Array Size :param x: Elements in X :param y: Elements in Y :param z: Elements in Z :type type: int_ref :type array: int_ref :type x: int_ref :type y: int_ref :type z: int_ref .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ type.value, array.value, x.value, y.value, z.value = self._get_info(type.value, array.value, x.value, y.value, z.value) def get_ipj(self, ipj): """ Get the projection of the voxel. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` object to save `GXVOX <geosoft.gxapi.GXVOX>`'s meta to :type ipj: GXIPJ .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_ipj(ipj) def get_limits(self, min_x, min_y, min_z, max_x, max_y, max_z): """ Get the range of indices with non-dummy data. :param min_x: Index of minimum valid data in X. :param min_y: Index of minimum valid data in Y. :param min_z: Index of minimum valid data in Z. :param max_x: Index of maximum valid data in X. :param max_y: Index of maximum valid data in Y. :param max_z: Index of maximum valid data in Z. :type min_x: int_ref :type min_y: int_ref :type min_z: int_ref :type max_x: int_ref :type max_y: int_ref :type max_z: int_ref .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Find the non-dummy volume of a `GXVOX <geosoft.gxapi.GXVOX>` object. If the voxel is all dummies, returns `iMAX <geosoft.gxapi.iMAX>` for the minima, and `iMIN <geosoft.gxapi.iMIN>` for the maxima. """ min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value = self._get_limits(min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value) def get_limits_xyz(self, min_x, min_y, min_z, max_x, max_y, max_z): """ Get the range in true XYZ of non-dummy data. :param min_x: Minimum valid data in X. :param min_y: Minimum valid data in Y. :param min_z: Minimum valid data in Z. :param max_x: Maximum valid data in X. :param max_y: Maximum valid data in Y. :param max_z: Maximum valid data in Z. :type min_x: float_ref :type min_y: float_ref :type min_z: float_ref :type max_x: float_ref :type max_y: float_ref :type max_z: float_ref .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Find the non-dummy volume of a `GXVOX <geosoft.gxapi.GXVOX>` in true (X, Y, Z). This method works for voxels which are rotated or oriented in 3D, and returns the true min and max X, Y and Z limits in the data. The bounds are the bounds for the voxel center points. If the voxel is all dummies, returns `rMAX <geosoft.gxapi.rMAX>` for the minima, and `rMIN <geosoft.gxapi.rMIN>` for the maxima. """ min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value = self._get_limits_xyz(min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value) def get_location(self, origin_x, origin_y, origin_z, vv_x, vv_y, vv_z): """ Get Location information :param origin_x: Origin X :param origin_y: Origin Y :param origin_z: Origin Z :param vv_x: Cell sizes in X :param vv_y: Cell sizes in Y :param vv_z: Cell sizes in Z :type origin_x: float_ref :type origin_y: float_ref :type origin_z: float_ref :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ origin_x.value, origin_y.value, origin_z.value = self._get_location(origin_x.value, origin_y.value, origin_z.value, vv_x, vv_y, vv_z) def get_location_points(self, vv_x, vv_y, vv_z): """ Get the computed location points. :param vv_x: Locations in X :param vv_y: Locations in Y :param vv_z: Locations in Z :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_location_points(vv_x, vv_y, vv_z) def get_meta(self, meta): """ Get the metadata of a voxel. :param meta: `GXMETA <geosoft.gxapi.GXMETA>` object to save `GXVOX <geosoft.gxapi.GXVOX>`'s meta to :type meta: GXMETA .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_meta(meta) def get_double_location(self, origin_x, origin_y, origin_z, vect_xx, vect_xy, vect_xz, vect_yx, vect_yy, vect_yz, vect_zx, vect_zy, vect_zz): """ Get the location of a voxel with origin and scaled xyz vectors :param origin_x: Origin X :param origin_y: Origin Y :param origin_z: Origin Z :param vect_xx: VectX X :param vect_xy: VectX Y :param vect_xz: VectX Z :param vect_yx: VectY X :param vect_yy: VectY Y :param vect_yz: VectY Z :param vect_zx: VectZ X :param vect_zy: VectZ Y :param vect_zz: VectZ Z :type origin_x: float_ref :type origin_y: float_ref :type origin_z: float_ref :type vect_xx: float_ref :type vect_xy: float_ref :type vect_xz: float_ref :type vect_yx: float_ref :type vect_yy: float_ref :type vect_yz: float_ref :type vect_zx: float_ref :type vect_zy: float_ref :type vect_zz: float_ref .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ origin_x.value, origin_y.value, origin_z.value, vect_xx.value, vect_xy.value, vect_xz.value, vect_yx.value, vect_yy.value, vect_yz.value, vect_zx.value, vect_zy.value, vect_zz.value = self._get_double_location(origin_x.value, origin_y.value, origin_z.value, vect_xx.value, vect_xy.value, vect_xz.value, vect_yx.value, vect_yy.value, vect_yz.value, vect_zx.value, vect_zy.value, vect_zz.value) def get_simple_location(self, origin_x, origin_y, origin_z, cell_x, cell_y, cell_z): """ Get Simple Location information :param origin_x: Origin X :param origin_y: Origin Y :param origin_z: Origin Z :param cell_x: Cell Sizes in X (`rDUMMY <geosoft.gxapi.rDUMMY>` if not uniform) :param cell_y: Cell Sizes in Y (`rDUMMY <geosoft.gxapi.rDUMMY>` if not uniform) :param cell_z: Cell Sizes in Z (`rDUMMY <geosoft.gxapi.rDUMMY>` if not uniform) :type origin_x: float_ref :type origin_y: float_ref :type origin_z: float_ref :type cell_x: float_ref :type cell_y: float_ref :type cell_z: float_ref .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ origin_x.value, origin_y.value, origin_z.value, cell_x.value, cell_y.value, cell_z.value = self._get_simple_location(origin_x.value, origin_y.value, origin_z.value, cell_x.value, cell_y.value, cell_z.value) def get_stats(self): """ Get precomputed statistics on this object. :returns: `GXST <geosoft.gxapi.GXST>` object :rtype: GXST .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_stats() return GXST(ret_val) def get_tpat(self, tpat): """ Get a copy of a thematic voxel's `GXTPAT <geosoft.gxapi.GXTPAT>` object. :param tpat: `GXTPAT <geosoft.gxapi.GXTPAT>` object to get :type tpat: GXTPAT .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Each row in the `GXTPAT <geosoft.gxapi.GXTPAT>` object corresponds to a stored index value in the thematic voxel. The `GXTPAT <geosoft.gxapi.GXTPAT>` should NOT be modified by the addition or deletion of items, if it is to be restored into the `GXVOX <geosoft.gxapi.GXVOX>` object, but the CODE, LABEL, DESCRIPTION or COLOR info can be changed. The `GXTPAT <geosoft.gxapi.GXTPAT>` object is stored inside the `GXVOX <geosoft.gxapi.GXVOX>` `GXMETA <geosoft.gxapi.GXMETA>` object. """ self._get_tpat(tpat) @classmethod def grid_points(cls, name, error, cell_size, var_only, min_radius, max_radius, min_points, max_points, model, power, slope, range, nugget, sill, type, vv_x, vv_y, vv_z, vv_d, ipj): """ Grid a `GXVOX <geosoft.gxapi.GXVOX>` from point `GXVV <geosoft.gxapi.GXVV>`'s. :param name: Name of output `GXVOX <geosoft.gxapi.GXVOX>` :param error: Name of error `GXVOX <geosoft.gxapi.GXVOX>` ("" for none) :param cell_size: Cell size (DUMMY for default) :param var_only: Variogram Only :param min_radius: Minimum Search Radius (DUMMY for none) :param max_radius: Maximum Search Radius (DUMMY for none) :param min_points: Minimum Search Points :param max_points: Maximum Search Points :param model: Model number 1-power, 2-sperical, 3-gaussian, 4-exponential :param power: Power :param slope: Slope :param range: Range :param nugget: Nugget :param sill: Sill :param type: :ref:`GS_TYPES` :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` :param vv_z: Z `GXVV <geosoft.gxapi.GXVV>` :param vv_d: Data `GXVV <geosoft.gxapi.GXVV>` :type name: str :type error: str :type cell_size: float :type var_only: int :type min_radius: float :type max_radius: float :type min_points: int :type max_points: int :type model: int :type power: float :type slope: float :type range: float :type nugget: float :type sill: float :type type: int :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_d: GXVV :type ipj: GXIPJ :returns: `GXVOX <geosoft.gxapi.GXVOX>` Object :rtype: GXVOX .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapVOX._grid_points(GXContext._get_tls_geo(), name.encode(), error.encode(), cell_size, var_only, min_radius, max_radius, min_points, max_points, model, power, slope, range, nugget, sill, type, vv_x, vv_y, vv_z, vv_d, ipj) return GXVOX(ret_val) @classmethod def grid_points_z(cls, name, error, cell_size, cell_size_z, var_only, min_radius, max_radius, min_points, max_points, model, power, slope, range, nugget, sill, type, vv_x, vv_y, vv_z, vv_d, ipj): """ Grid a `GXVOX <geosoft.gxapi.GXVOX>` from point `GXVV <geosoft.gxapi.GXVV>`'s (using variable Z's) :param name: Name of output `GXVOX <geosoft.gxapi.GXVOX>` :param error: Name of error `GXVOX <geosoft.gxapi.GXVOX>` ("" for none) :param cell_size: Cell size (DUMMY for default) :param cell_size_z: Cell size in Z ("" for default) :param var_only: Variogram Only :param min_radius: Minimum Search Radius (DUMMY for none) :param max_radius: Maximum Search Radius (DUMMY for none) :param min_points: Minimum Search Points :param max_points: Maximum Search Points :param model: Model number 1-power, 2-sperical, 3-gaussian, 4-exponential :param power: Power :param slope: Slope :param range: Range :param nugget: Nugget :param sill: Sill :param type: :ref:`GS_TYPES` :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` :param vv_z: Z `GXVV <geosoft.gxapi.GXVV>` :param vv_d: Data `GXVV <geosoft.gxapi.GXVV>` :type name: str :type error: str :type cell_size: float :type cell_size_z: str :type var_only: int :type min_radius: float :type max_radius: float :type min_points: int :type max_points: int :type model: int :type power: float :type slope: float :type range: float :type nugget: float :type sill: float :type type: int :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_d: GXVV :type ipj: GXIPJ :returns: `GXVOX <geosoft.gxapi.GXVOX>` Object :rtype: GXVOX .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapVOX._grid_points_z(GXContext._get_tls_geo(), name.encode(), error.encode(), cell_size, cell_size_z.encode(), var_only, min_radius, max_radius, min_points, max_points, model, power, slope, range, nugget, sill, type, vv_x, vv_y, vv_z, vv_d, ipj) return GXVOX(ret_val) @classmethod def grid_points_z_ex(cls, name, error, cell_size, cell_size_z, var_only, min_radius, max_radius, min_points, max_points, model, power, slope, range, nugget, sill, strike, dip, plunge, along_strike_weight, down_dip_weight, type, vv_x, vv_y, vv_z, vv_d, ipj): """ Grid a `GXVOX <geosoft.gxapi.GXVOX>` from point `GXVV <geosoft.gxapi.GXVV>`'s (using variable Z's) :param name: Name of output `GXVOX <geosoft.gxapi.GXVOX>` :param error: Name of error `GXVOX <geosoft.gxapi.GXVOX>` ("" for none) :param cell_size: Cell size (DUMMY for default) :param cell_size_z: Cell size in Z ("" for default) :param var_only: Variogram Only :param min_radius: Minimum Search Radius (DUMMY for none) :param max_radius: Maximum Search Radius (DUMMY for none) :param min_points: Minimum Search Points :param max_points: Maximum Search Points :param model: Model number 1-power, 2-sperical, 3-gaussian, 4-exponential :param power: Power :param slope: Slope :param range: Range :param nugget: Nugget :param sill: Sill :param strike: Strike :param dip: Dip :param plunge: Plunge :param along_strike_weight: Strike Weight :param down_dip_weight: Dip Plane Weight :param type: :ref:`GS_TYPES` :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` :param vv_z: Z `GXVV <geosoft.gxapi.GXVV>` :param vv_d: Data `GXVV <geosoft.gxapi.GXVV>` :type name: str :type error: str :type cell_size: float :type cell_size_z: str :type var_only: int :type min_radius: float :type max_radius: float :type min_points: int :type max_points: int :type model: int :type power: float :type slope: float_ref :type range: float_ref :type nugget: float :type sill: float_ref :type strike: float :type dip: float :type plunge: float :type along_strike_weight: float :type down_dip_weight: float :type type: int :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_d: GXVV :type ipj: GXIPJ :returns: `GXVOX <geosoft.gxapi.GXVOX>` Object :rtype: GXVOX .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, slope.value, range.value, sill.value = gxapi_cy.WrapVOX._grid_points_z_ex(GXContext._get_tls_geo(), name.encode(), error.encode(), cell_size, cell_size_z.encode(), var_only, min_radius, max_radius, min_points, max_points, model, power, slope.value, range.value, nugget, sill.value, strike, dip, plunge, along_strike_weight, down_dip_weight, type, vv_x, vv_y, vv_z, vv_d, ipj) return GXVOX(ret_val) def can_append_to(self, surface_file): """ Check if this voxel can append to a surface file. :param surface_file: Surface file :type surface_file: str :returns: 1 if can append :rtype: int .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._can_append_to(surface_file.encode()) return ret_val def get_cell_size_strings(self, loc_x, loc_y, loc_z, scale_x, scale_y, scale_z): """ Get the Location Strings :param loc_x: X String :param loc_y: Y String :param loc_z: Z String :param scale_x: Scale to multiply X :param scale_y: Scale to multiply Y :param scale_z: Scale to multiply Z :type loc_x: str_ref :type loc_y: str_ref :type loc_z: str_ref :type scale_x: float :type scale_y: float :type scale_z: float .. versionadded:: 6.3.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ loc_x.value, loc_y.value, loc_z.value = self._get_cell_size_strings(loc_x.value.encode(), loc_y.value.encode(), loc_z.value.encode(), scale_x, scale_y, scale_z) def is_thematic(self): """ Is this a thematic voxel? :returns: 1 if `GXVOX <geosoft.gxapi.GXVOX>` is thematic :rtype: int .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A thematic voxel is one where the stored integer values represent indices into an internally stored `GXTPAT <geosoft.gxapi.GXTPAT>` object. Thematic voxels contain their own color definitions, and normal numerical operations, such as applying ITRs for display, are not valid. """ ret_val = self._is_thematic() return ret_val def is_vector_voxel(self): """ Is this a vector voxel? :returns: 1 if `GXVOX <geosoft.gxapi.GXVOX>` is a vector voxel :rtype: int .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A vector voxel is one where each data element consists of 3 4-byte float values. Vector voxels normally have the file type "geosoft_vectorvoxel". """ ret_val = self._is_vector_voxel() return ret_val def set_cell_size_strings(self, loc_x, loc_y, loc_z): """ Set the Location Strings :param loc_x: X String :param loc_y: Y String :param loc_z: Z String :type loc_x: str :type loc_y: str :type loc_z: str :returns: 0 - Ok 1 - Invalid data :rtype: int .. versionadded:: 6.3.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._set_cell_size_strings(loc_x.encode(), loc_y.encode(), loc_z.encode()) return ret_val @classmethod def log_grid_points_z_ex(cls, name, error, cell_size, cell_size_z, var_only, min_radius, max_radius, min_points, max_points, model, power, slope, range, nugget, sill, strike, dip, plunge, along_strike_weight, down_dip_weight, log_opt, min_log, type, vv_x, vv_y, vv_z, vv_d, ipj): """ Log grid a `GXVOX <geosoft.gxapi.GXVOX>` from point `GXVV <geosoft.gxapi.GXVV>`'s (using variable Z's) :param name: Name of output `GXVOX <geosoft.gxapi.GXVOX>` :param error: Name of error `GXVOX <geosoft.gxapi.GXVOX>` ("" for none) :param cell_size: Cell size (DUMMY for default) :param cell_size_z: Cell size in Z ("" for default) :param var_only: Variogram Only :param min_radius: Minimum Search Radius (DUMMY for none) :param max_radius: Maximum Search Radius (DUMMY for none) :param min_points: Minimum Search Points :param max_points: Maximum Search Points :param model: Model number 1-power, 2-sperical, 3-gaussian, 4-exponential :param power: Power :param slope: Slope :param range: Range :param nugget: Nugget :param sill: Sill :param strike: Strike :param dip: Dip :param plunge: Plunge :param along_strike_weight: Strike Weight :param down_dip_weight: Dip Plane Weight :param log_opt: :ref:`VOX_GRID_LOGOPT` Log Option :param min_log: Minimum log :param type: :ref:`GS_TYPES` :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` :param vv_z: Z `GXVV <geosoft.gxapi.GXVV>` :param vv_d: Data `GXVV <geosoft.gxapi.GXVV>` :type name: str :type error: str :type cell_size: float :type cell_size_z: str :type var_only: int :type min_radius: float :type max_radius: float :type min_points: int :type max_points: int :type model: int :type power: float :type slope: float_ref :type range: float_ref :type nugget: float :type sill: float_ref :type strike: float :type dip: float :type plunge: float :type along_strike_weight: float :type down_dip_weight: float :type log_opt: int :type min_log: float :type type: int :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_d: GXVV :type ipj: GXIPJ :returns: `GXVOX <geosoft.gxapi.GXVOX>` Object :rtype: GXVOX .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, slope.value, range.value, sill.value = gxapi_cy.WrapVOX._log_grid_points_z_ex(GXContext._get_tls_geo(), name.encode(), error.encode(), cell_size, cell_size_z.encode(), var_only, min_radius, max_radius, min_points, max_points, model, power, slope.value, range.value, nugget, sill.value, strike, dip, plunge, along_strike_weight, down_dip_weight, log_opt, min_log, type, vv_x, vv_y, vv_z, vv_d, ipj) return GXVOX(ret_val) @classmethod def krig(cls, name, cell_size, type, vv_x, vv_y, vv_z, vv_d, ipj, reg): """ A more compact and extensible form of `log_grid_points_z_ex <geosoft.gxapi.GXVOX.log_grid_points_z_ex>`. :param name: Name of output `GXVOX <geosoft.gxapi.GXVOX>` :param cell_size: Cell size (DUMMY for default) :param type: :ref:`GS_TYPES` :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` :param vv_z: Z `GXVV <geosoft.gxapi.GXVV>` :param vv_d: Data `GXVV <geosoft.gxapi.GXVV>` :type name: str :type cell_size: float :type type: int :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_d: GXVV :type ipj: GXIPJ :type reg: GXREG :returns: `GXVOX <geosoft.gxapi.GXVOX>` Object :rtype: GXVOX .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Optional Parameters. If these values are not set in the `GXREG <geosoft.gxapi.GXREG>`, then default parameters will be used. ERROR_VOXEL: Name of error `GXVOX <geosoft.gxapi.GXVOX>` ("" for none) CELLSIZEZ: Z Cell size string (space delimited, "" for default) RADIUS_MIN: Minimum Search Radius (REAL) (Default = 4) (Blanking Distance) RADIUS_MAX: Maximum Search Radius (REAL) (Default = 16) SEARCH_MIN: Minimum Search Points (INT) (Default = 16) SEARCH_MAX: Maximum Search Points (INT) (Default = 32) VARIOGRAM_ONLY: Set to 1 to calculate the variogram only (INT) (Default = 0) MODEL: Variogram Model number 1-power, 2-sperical, 3-gaussian, 4-exponential (INT) (Default = 2) POWER: Power (Default = DUMMY) SLOPE: Slope (REAL) (if input is DUMMY, value calculated and set on return) RANGE: Range (REAL) (if input is DUMMY, value calculated and set on return) SILL : Sill (REAL) (if input is DUMMY, value calculated and set on return) STRIKE: Strike (REAL) (Default = 0) DIP: Dip (REAL) (Default = 90) PLUNGE: Plunge (REAL) (Default = 0) STRIKE WEIGHT: Along-Strike Weight (REAL) (Default = 1) DIP_WEIGHT: Down-Dip Weight (REAL) (Default = 1) LOG_OPT: One of :ref:`VOX_GRID_LOGOPT` (Default = 0) MIN_LOG: Log Minimum (REAL) (Default = 1) MIN_X: Minimum X (REAL) (default = DUMMY to determine from the data. If input, nearest lt. or eq. multiple of cell size chosen) MAX_X: Maximum X (REAL) (default = DUMMY to determine from the data. If input, nearest gt. or eq. multiple of cell size chosen) MIN_Y: Minimum Y (REAL) (default = DUMMY to determine from the data. If input, nearest lt. or eq. external multiple of cell size chosen) MAX_Y: Maximum Y (REAL) (default = DUMMY to determine from the data. If input, nearest gt. or eq. multiple of cell size chosen) MIN_Z: Minimum Z (REAL) (default = DUMMY to determine from the data. If input, nearest lt. or eq. multiple of cell size chosen) MAX_Z: Maximum Z (REAL) (default = DUMMY to determine from the data. If input, nearest gt. or eq. multiple of cell size chosen)A more compact and extensible form of `log_grid_points_z_ex <geosoft.gxapi.GXVOX.log_grid_points_z_ex>`. Only the most basic parameters are entered directly. Optional parameters are passed via a `GXREG <geosoft.gxapi.GXREG>` object. """ ret_val = gxapi_cy.WrapVOX._krig(GXContext._get_tls_geo(), name.encode(), cell_size, type, vv_x, vv_y, vv_z, vv_d, ipj, reg) return GXVOX(ret_val) @classmethod def math(cls, master, mastervar, output, outvar, formula, lst): """ Produces a new voxes using a formula on existing voxels/Grids :param master: Master `GXVOX <geosoft.gxapi.GXVOX>` Name :param mastervar: Master `GXVOX <geosoft.gxapi.GXVOX>` Variable Name :param output: Output `GXVOX <geosoft.gxapi.GXVOX>` Name :param outvar: Output `GXVOX <geosoft.gxapi.GXVOX>` Variable Name :param formula: Formula :param lst: List of Voxels/Grids to use as inputs :type master: str :type mastervar: str :type output: str :type outvar: str :type formula: str :type lst: GXLST :returns: VOXEL handle :rtype: GXVOX .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The input voxels must all be of the same type. """ ret_val = gxapi_cy.WrapVOX._math(GXContext._get_tls_geo(), master.encode(), mastervar.encode(), output.encode(), outvar.encode(), formula.encode(), lst) return GXVOX(ret_val) def merge(self, vox2, reg, output_vox): """ Merge two Voxels. :param vox2: `GXVOX <geosoft.gxapi.GXVOX>` object :param reg: Parameters (see above) :param output_vox: Output voxel file name. :type vox2: GXVOX :type reg: GXREG :type output_vox: str .. versionadded:: 7.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._merge(vox2, reg, output_vox.encode()) @classmethod def nearest_neighbour_grid(cls, name, cell_size, max_radius, type, vv_x, vv_y, vv_z, vv_d, ipj): """ Grid a `GXVOX <geosoft.gxapi.GXVOX>` from point `GXVV <geosoft.gxapi.GXVV>`'s using the Nearest Neighbours method. :param name: Name of output `GXVOX <geosoft.gxapi.GXVOX>` :param cell_size: Cell size (DUMMY for default) :param max_radius: Maximum radius (DUMMY for none) :param type: :ref:`GS_TYPES` :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` :param vv_z: Z `GXVV <geosoft.gxapi.GXVV>` :param vv_d: Data `GXVV <geosoft.gxapi.GXVV>` :type name: str :type cell_size: float :type max_radius: float :type type: int :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_d: GXVV :type ipj: GXIPJ :returns: `GXVOX <geosoft.gxapi.GXVOX>` Object :rtype: GXVOX .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapVOX._nearest_neighbour_grid(GXContext._get_tls_geo(), name.encode(), cell_size, max_radius, type, vv_x, vv_y, vv_z, vv_d, ipj) return GXVOX(ret_val) @classmethod def compute_cell_size(cls, min_x, min_y, min_z, max_x, max_y, max_z): """ Compute the Cell size based on specific Area :param min_x: MinX :param min_y: MinY :param min_z: MinZ :param max_x: MaxX :param max_y: MaxY :param max_z: MaxZ :type min_x: float :type min_y: float :type min_z: float :type max_x: float :type max_y: float :type max_z: float :returns: Cell Size :rtype: float .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapVOX._compute_cell_size(GXContext._get_tls_geo(), min_x, min_y, min_z, max_x, max_y, max_z) return ret_val def re_grid(self, vox_to_regrid, reg, output_vox): """ Regrid a Voxel. :param vox_to_regrid: `GXVOX <geosoft.gxapi.GXVOX>` object to regrid :param reg: Parameters (not implemented) :param output_vox: Output voxel file name. :type vox_to_regrid: GXVOX :type reg: GXREG :type output_vox: str .. versionadded:: 7.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._re_grid(vox_to_regrid, reg, output_vox.encode()) def resample_pg(self, ipj, orig_x, orig_y, orig_z, spacing_x, spacing_y, spacing_z, size_x, size_y, size_z, min_z, max_z, interp): """ Resample a voxel over an input volume to a `GXPG <geosoft.gxapi.GXPG>`. :param ipj: Projection to use for Origin, Spacing values :param orig_x: Origin X :param orig_y: Origin Y :param orig_z: Origin Z :param spacing_x: Spacing in X :param spacing_y: Spacing in Y :param spacing_z: Spacing in Z :param size_x: Samples in X :param size_y: Samples in Y :param size_z: Samples in Z :param min_z: Minimum Z to resample (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param max_z: Maximum Z to resample (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param interp: :ref:`VOX_SLICE_MODE` :type ipj: GXIPJ :type orig_x: float :type orig_y: float :type orig_z: float :type spacing_x: float :type spacing_y: float :type spacing_z: float :type size_x: int :type size_y: int :type size_z: int :type min_z: float :type max_z: float :type interp: int :returns: `GXPG <geosoft.gxapi.GXPG>` object, terminates on error :rtype: GXPG .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Creates and dummies a `GXPG <geosoft.gxapi.GXPG>` object based on the input dimensions, then resamples the voxel to the pager at the locations determined by input projection, origin and spacings. """ ret_val = self._resample_pg(ipj, orig_x, orig_y, orig_z, spacing_x, spacing_y, spacing_z, size_x, size_y, size_z, min_z, max_z, interp) return GXPG(ret_val) def rescale_cell_sizes(self, scale): """ Multiply all cell sizes by a fixed factor. :param scale: Scaling factor (>0) :type scale: float .. versionadded:: 7.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is useful, for instance for converting sizes in one unit to sizes in another unit if changing the projection and the projection's unit changes, since the voxel inherits its projection's units. """ self._rescale_cell_sizes(scale) def sample_cdi(self, db, line, x_ch, y_ch, elev_ch, negative_depths_down, topo_ch, mode, out_ch): """ Sample a voxel at locations/elevations in a CDI database. :param db: CDI Database handle :param line: Line handle :param x_ch: X channel handle :param y_ch: Y channel handle :param elev_ch: Depth array channel handle :param negative_depths_down: Depths sign: 0 - positive down, 1 - negative down :param topo_ch: Elevation channel handle (can be `NULLSYMB <geosoft.gxapi.NULLSYMB>`) :param mode: Interpolation mode: 0 - linear, 1 - nearest :param out_ch: Output channel name :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :type elev_ch: int :type negative_depths_down: int :type topo_ch: int :type mode: int :type out_ch: str .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A "CDI" database does not need to be conductivity/depth. It normally contains an array channel of depth values for each (X, Y) location, with corresponding data array channels of values taken at those (X, Y, Z) locations. If the optional elevation channel is used, its value is used as an offset to the depth channel values. Depths are positive down by default; use the "Negative depths down" parameter if the depths become more negative as you go deeper. """ self._sample_cdi(db, line, x_ch, y_ch, elev_ch, negative_depths_down, topo_ch, mode, out_ch.encode()) def sample_cdi_to_topography(self, db, line, x_ch, y_ch, zvv, mode, out_ch, topo_ch): """ Sample a voxel at fixed elevations along a path in a CDI database, and output them to an array channel, deleting leading dummy values, and writing the elevation of the first non-dummy item to a topography channel. :param db: CDI Database handle :param line: Line handle :param x_ch: X channel handle :param y_ch: Y channel handle :param zvv: Z values to sample at each X, Y :param mode: Interpolation mode: 0 - linear, 1 - nearest :param out_ch: Output data array channel name :param topo_ch: Output topography channel name :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :type zvv: GXVV :type mode: int :type out_ch: str :type topo_ch: str .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._sample_cdi_to_topography(db, line, x_ch, y_ch, zvv, mode, out_ch.encode(), topo_ch.encode()) def sample_vv(self, xvv, yvv, zvv, interp, dvv): """ Sample a voxel at multiple locations. :param xvv: X locations (input) :param yvv: Y locations (input) :param zvv: Z locations (input) :param interp: Interpolation mode: 0 - linear, 1 - nearest :param dvv: Returned values :type xvv: GXVV :type yvv: GXVV :type zvv: GXVV :type interp: int :type dvv: GXVV .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Sample at voxel at XYZ locations input in VVs. Values returned in a `GXVV <geosoft.gxapi.GXVV>`. """ self._sample_vv(xvv, yvv, zvv, interp, dvv) def set_ipj(self, ipj): """ Set the projection of the voxel. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` object to save `GXVOX <geosoft.gxapi.GXVOX>`'s meta to :type ipj: GXIPJ .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_ipj(ipj) def set_location(self, origin_x, origin_y, origin_z, vv_x, vv_y, vv_z): """ Set Location information :param origin_x: Origin X :param origin_y: Origin Y :param origin_z: Origin Z :param vv_x: Cell sizes in X :param vv_y: Cell sizes in Y :param vv_z: Cell sizes in Z :type origin_x: float :type origin_y: float :type origin_z: float :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_location(origin_x, origin_y, origin_z, vv_x, vv_y, vv_z) def set_meta(self, meta): """ Set the metadata of a voxel. :param meta: `GXMETA <geosoft.gxapi.GXMETA>` object to add to `GXVOX <geosoft.gxapi.GXVOX>`'s meta :type meta: GXMETA .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_meta(meta) def set_origin(self, origin, origin_x, origin_y, origin_z): """ Set the Voxel Origin :param origin: Type of origin being set :ref:`VOX_ORIGIN` :param origin_x: Origin X :param origin_y: Origin Y :param origin_z: Origin Z :type origin: int :type origin_x: float :type origin_y: float :type origin_z: float .. versionadded:: 6.3.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_origin(origin, origin_x, origin_y, origin_z) def set_simple_location(self, origin_x, origin_y, origin_z, cell_x, cell_y, cell_z): """ Set Simple Location information :param origin_x: Origin X :param origin_y: Origin Y :param origin_z: Origin Z :param cell_x: Cell Sizes in X (`rDUMMY <geosoft.gxapi.rDUMMY>` if not changed) :param cell_y: Cell Sizes in Y (`rDUMMY <geosoft.gxapi.rDUMMY>` if not changed) :param cell_z: Cell Sizes in Z (`rDUMMY <geosoft.gxapi.rDUMMY>` if not changed) :type origin_x: float :type origin_y: float :type origin_z: float :type cell_x: float :type cell_y: float :type cell_z: float .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_simple_location(origin_x, origin_y, origin_z, cell_x, cell_y, cell_z) def set_tpat(self, tpat): """ Set a thematic voxel's `GXTPAT <geosoft.gxapi.GXTPAT>` object. :param tpat: `GXTPAT <geosoft.gxapi.GXTPAT>` object to store :type tpat: GXTPAT .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Each row in the `GXTPAT <geosoft.gxapi.GXTPAT>` object corresponds to a stored index value in the thematic voxel. The `GXTPAT <geosoft.gxapi.GXTPAT>` should NOT be modified by the addition or deletion of items, if it is to be restored into the `GXVOX <geosoft.gxapi.GXVOX>` object, but the CODE, LABEL, DESCRIPTION or COLOR info can be changed. The `GXTPAT <geosoft.gxapi.GXTPAT>` object is stored inside the `GXVOX <geosoft.gxapi.GXVOX>` `GXMETA <geosoft.gxapi.GXMETA>` object. """ self._set_tpat(tpat) def slice_ipj(self, name, ipj, mode, orig_x, orig_y, cell_size_x, cell_size_y, size_x, size_y): """ Extract a slice of a voxel based on an `GXIPJ <geosoft.gxapi.GXIPJ>` :param name: Grid Name :param ipj: Grid `GXIPJ <geosoft.gxapi.GXIPJ>` (includes orientation, etc) :param mode: :ref:`VOX_SLICE_MODE` :param orig_x: Grid Origin X :param orig_y: Grid Origin Y :param cell_size_x: Grid Cell Size in X :param cell_size_y: Grid Cell Size in Y :param size_x: Grid cells in X :param size_y: Grid cells in Y :type name: str :type ipj: GXIPJ :type mode: int :type orig_x: float :type orig_y: float :type cell_size_x: float :type cell_size_y: float :type size_x: int :type size_y: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._slice_ipj(name.encode(), ipj, mode, orig_x, orig_y, cell_size_x, cell_size_y, size_x, size_y) def slice_multi_layer_ipj(self, name, ipj, mode, orig_x, orig_y, cell_size_x, cell_size_y, size_x, size_y, layers, start_elev, elev_inc): """ Extract multiple slices of a voxel based on an `GXIPJ <geosoft.gxapi.GXIPJ>` :param name: Grid Name :param ipj: Grid `GXIPJ <geosoft.gxapi.GXIPJ>` (includes orientation, etc) :param mode: :ref:`VOX_SLICE_MODE` :param orig_x: Grid Origin X :param orig_y: Grid Origin Y :param cell_size_x: Grid Cell Size in X :param cell_size_y: Grid Cell Size in Y :param size_x: Grid cells in X :param size_y: Grid cells in Y :param layers: Number of layers to extract :param start_elev: Start elevation :param elev_inc: Elevation increment :type name: str :type ipj: GXIPJ :type mode: int :type orig_x: float :type orig_y: float :type cell_size_x: float :type cell_size_y: float :type size_x: int :type size_y: int :type layers: int :type start_elev: float :type elev_inc: float .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._slice_multi_layer_ipj(name.encode(), ipj, mode, orig_x, orig_y, cell_size_x, cell_size_y, size_x, size_y, layers, start_elev, elev_inc) def subset_to_double_extents(self, output_vox): """ Subset a `GXVOX <geosoft.gxapi.GXVOX>` to real extents. :param output_vox: Output voxel file name. :type output_vox: str .. versionadded:: 7.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._subset_to_double_extents(output_vox.encode()) @classmethod def sync(cls, name): """ Syncronize the Metadata for this Voxel :param name: Voxel name :type name: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapVOX._sync(GXContext._get_tls_geo(), name.encode()) def window_ply(self, pply, mask, min_z, max_z, output_vox, clip_dummies): """ Window a `GXVOX <geosoft.gxapi.GXVOX>` to a `GXPLY <geosoft.gxapi.GXPLY>` file and Z. :param pply: `GXPLY <geosoft.gxapi.GXPLY>` object :param mask: Mask (0: inside `GXPLY <geosoft.gxapi.GXPLY>`, 1: outside `GXPLY <geosoft.gxapi.GXPLY>`) :param min_z: Minimum Z (optional, `rDUMMY <geosoft.gxapi.rDUMMY>` for no minimum) :param max_z: Maximum Z (optional, `rDUMMY <geosoft.gxapi.rDUMMY>` for no maximun) :param output_vox: Output voxel file name. :param clip_dummies: Clip extents to remove dummies (0: no (same size), 1: yes (smaller)) :type pply: GXPLY :type mask: int :type min_z: float :type max_z: float :type output_vox: str :type clip_dummies: int .. versionadded:: 7.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The voxel is windowed horizontally to the input `GXPLY <geosoft.gxapi.GXPLY>` file. Optionally, it will be windowed to the input Z range as well. The output can be clipped to the non-dummied cells. """ self._window_ply(pply, mask, min_z, max_z, output_vox.encode(), clip_dummies) def window_xyz(self, min_x, min_y, min_z, max_x, max_y, max_z, output_vox, clip_dummies): """ Window a `GXVOX <geosoft.gxapi.GXVOX>` to ranges in X, Y and Z. :param min_x: Minimum X (optional, `rDUMMY <geosoft.gxapi.rDUMMY>` for no minimum) :param min_y: Minimum Y (optional, `rDUMMY <geosoft.gxapi.rDUMMY>` for no minimum) :param min_z: Minimum Z (optional, `rDUMMY <geosoft.gxapi.rDUMMY>` for no minimum) :param max_x: Maximum X (optional, `rDUMMY <geosoft.gxapi.rDUMMY>` for no maximun) :param max_y: Maximum Y (optional, `rDUMMY <geosoft.gxapi.rDUMMY>` for no maximun) :param max_z: Maximum Z (optional, `rDUMMY <geosoft.gxapi.rDUMMY>` for no maximun) :param output_vox: Output voxel file name. :param clip_dummies: Clip extents to remove dummies (0: no (same size), 1: yes (smaller)) :type min_x: float :type min_y: float :type min_z: float :type max_x: float :type max_y: float :type max_z: float :type output_vox: str :type clip_dummies: int .. versionadded:: 7.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The six minima and maxima are optional. The output can be clipped to the non-dummied cells. """ self._window_xyz(min_x, min_y, min_z, max_x, max_y, max_z, output_vox.encode(), clip_dummies) def write_xml(self, file): """ Export the `GXVOX <geosoft.gxapi.GXVOX>` to XML :param file: XML file to create :type file: str .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._write_xml(file.encode()) def convert_numeric_to_thematic(self, vv_translate, output_vox): """ Convert numeric voxel to thematic (lithology) voxel :param vv_translate: Translation `GXVV <geosoft.gxapi.GXVV>` handle. :param output_vox: Output voxel file name. :type vv_translate: GXVV :type output_vox: str .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._convert_numeric_to_thematic(vv_translate, output_vox.encode()) def convert_thematic_to_numeric(self, vv_translate, output_vox): """ Convert thematic (lithology) voxel to numeric voxel :param vv_translate: Translation `GXVV <geosoft.gxapi.GXVV>` handle. :param output_vox: Output voxel file name. :type vv_translate: GXVV :type output_vox: str .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._convert_thematic_to_numeric(vv_translate, output_vox.encode()) def convert_velocity_to_density(self, input_scaling_factor, a5, a4, a3, a2, a1, a0, output_scaling_factor, output_voxel_filename): """ Produces a density voxel using the velocity values in this voxel. :param input_scaling_factor: 1.0, if this voxel is in meters per second. Otherwise, a value by which each input cell is multiplied to convert it into meters per second. :param a5: Coefficient of fifth-order polynomial term. :param a4: Coefficient of fourth-order polynomial term. :param a3: Coefficient of third-order polynomial term. :param a2: Coefficient of second-order polynomial term. :param a1: Coefficient of first-order polynomial term. :param a0: Constant offset of output. :param output_scaling_factor: 1.0, to produce an output voxel that has units of g/cm^3. If different units are desired, pass in a different value, which will be multiplied into each output voxel cell. :param output_voxel_filename: Filename of the output voxel. :type input_scaling_factor: float :type a5: float :type a4: float :type a3: float :type a2: float :type a1: float :type a0: float :type output_scaling_factor: float :type output_voxel_filename: str .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._convert_velocity_to_density(input_scaling_factor, a5, a4, a3, a2, a1, a0, output_scaling_factor, output_voxel_filename.encode()) def convert_velocity_in_range_to_density(self, input_scaling_factor, input_lower_bound, input_upper_bound, a5, a4, a3, a2, a1, a0, output_scaling_factor, output_voxel_filename): """ Produces a density voxel using the velocity values in this voxel, as long as the velocity values are in range. :param input_scaling_factor: 1.0, if this voxel is in meters per second. Otherwise, a value by which each input cell is multiplied to convert it into meters per second. :param input_lower_bound: Lower bound on velocity values, in meters per second. If the input value (after being pre-multiplied by dInputScalingFactor) is less than this value, the output cell value will be DUMMY. :param input_upper_bound: Upper bound on velocity values, in meters per second. If the input value (after being pre-multiplied by dInputScalingFactor) is greater than this value, the output cell value will be DUMMY. :param a5: Coefficient of fifth-order polynomial term. :param a4: Coefficient of fourth-order polynomial term. :param a3: Coefficient of third-order polynomial term. :param a2: Coefficient of second-order polynomial term. :param a1: Coefficient of first-order polynomial term. :param a0: Constant offset of output. :param output_scaling_factor: 1.0, to produce an output voxel that has units of g/cm^3. If different units are desired, pass in a different value, which will be multiplied into each output voxel cell. :param output_voxel_filename: Filename of the output voxel. :type input_scaling_factor: float :type input_lower_bound: float :type input_upper_bound: float :type a5: float :type a4: float :type a3: float :type a2: float :type a1: float :type a0: float :type output_scaling_factor: float :type output_voxel_filename: str .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._convert_velocity_in_range_to_density(input_scaling_factor, input_lower_bound, input_upper_bound, a5, a4, a3, a2, a1, a0, output_scaling_factor, output_voxel_filename.encode()) def convert_density_to_velocity(self, input_scaling_factor, input_lower_bound, input_upper_bound, a5, a4, a3, a2, a1, a0, output_scaling_factor, output_voxel_filename): """ Produces a velocity voxel using the density values in this voxel. :param input_scaling_factor: 1.0, if this voxel is in g/cm^3. Otherwise, a value by which each input cell is multiplied to convert it into g/cm^3. :param input_lower_bound: Lower bound on velocity values, in g/vm^3. If the input value (after being pre-multiplied by dInputScalingFactor) is less than this value, the output cell value will be DUMMY. :param input_upper_bound: Upper bound on velocity values, in g/cm^3. If the input value (after being pre-multiplied by dInputScalingFactor) is greater than this value, the output cell value will be DUMMY. :param a5: Coefficient of fifth-order polynomial term. :param a4: Coefficient of fourth-order polynomial term. :param a3: Coefficient of third-order polynomial term. :param a2: Coefficient of second-order polynomial term. :param a1: Coefficient of first-order polynomial term. :param a0: Constant offset of output. :param output_scaling_factor: 1.0, to produce an output voxel that has units of meters per second. If different units are desired, pass in a different value, which will be multiplied into each output voxel cell. :param output_voxel_filename: Filename of the output voxel. :type input_scaling_factor: float :type input_lower_bound: float :type input_upper_bound: float :type a5: float :type a4: float :type a3: float :type a2: float :type a1: float :type a0: float :type output_scaling_factor: float :type output_voxel_filename: str .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._convert_density_to_velocity(input_scaling_factor, input_lower_bound, input_upper_bound, a5, a4, a3, a2, a1, a0, output_scaling_factor, output_voxel_filename.encode()) def invert_z(self, output_voxel_filename): """ Convert an inverted voxel to normal orientation :param output_voxel_filename: Output voxel file name. :type output_voxel_filename: str .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._invert_z(output_voxel_filename.encode()) @classmethod def dw_grid_db(cls, voxel, db, x, y, z, data, reg): """ `dw_grid_db <geosoft.gxapi.GXVOX.dw_grid_db>` Inverse-distance weighting gridding method, `GXDB <geosoft.gxapi.GXDB>` version, 3D. :param voxel: Output voxel name :param db: Database :param x: X Channel [READONLY] :param y: Y Channel [READONLY] :param z: Z Channel [READONLY] :param data: Data Channel [READONLY] :param reg: Parameters (see above) :type voxel: str :type db: GXDB :type x: int :type y: int :type z: int :type data: int :type reg: GXREG .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** 3D cells take on the averaged values within a search radius, weighted inversely by distance. Weighting can be controlled using the power and slope properties; weighting = 1 / (distance^wtpower + 1/slope) where distance is in units of grid cells (X dimenstion). Default is 0.0, If the blanking distance is set, all cells whose center point is not within the blanking distance of at least one data point are set to dummy. `GXREG <geosoft.gxapi.GXREG>` Parameters: X0, Y0, Z0, DX, DY, DZ: Voxel origin, and cell sizes (required) WT_POWER (default=2), WT_SLOPE (default=1) Weighting function parameters SEARCH_RADIUS: Distance weighting limit (default = 4 * CUBE_ROOT(DX*DY*DZ)) BLANKING_DISTANCE: Dummy values farther from data than this distance. (default = 4 * CUBE_ROOT(DX*DY*DZ)) LOG: Apply log transform to input data before gridding (0:No (default), 1:Yes)? LOG_BASE: One of `VV_LOG_BASE_10 <geosoft.gxapi.VV_LOG_BASE_10>` (default) or `VV_LOG_BASE_E <geosoft.gxapi.VV_LOG_BASE_E>` LOG_NEGATIVE: One of `VV_LOG_NEGATIVE_NO <geosoft.gxapi.VV_LOG_NEGATIVE_NO>` (default) or `VV_LOG_NEGATIVE_YES <geosoft.gxapi.VV_LOG_NEGATIVE_YES>` """ gxapi_cy.WrapVOX._dw_grid_db(GXContext._get_tls_geo(), voxel.encode(), db, x, y, z, data, reg) @classmethod def tin_grid_db(cls, voxel, db, x, y, z, data, method, z_cell, reg): """ `tin_grid_db <geosoft.gxapi.GXVOX.tin_grid_db>` `GXTIN <geosoft.gxapi.GXTIN>`-Gridding, `GXDB <geosoft.gxapi.GXDB>` version, 3D. :param voxel: Output voxel name :param db: Database :param x: X Channel [READONLY] :param y: Y Channel [READONLY] :param z: Z Channel [READONLY] :param data: Data Channel [READONLY] :param method: Gridding method (0: Linear, 1: Natural Neighbour, 2: Nearest Neightbour :param z_cell: Z Cell sizes (bottom to top) :param reg: Parameters (see above) :type voxel: str :type db: GXDB :type x: int :type y: int :type z: int :type data: int :type method: int :type z_cell: GXVV :type reg: GXREG .. versionadded:: 8.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Designed for data in array channels position vertically at single XY locations. Creates a `GXTIN <geosoft.gxapi.GXTIN>` using the XY locations and uses the coefficients for the top layer on each layer below to make it efficient. `GXREG <geosoft.gxapi.GXREG>` Parameters: X0, Y0, Z0, DX, DY, DZ: Voxel origin, and cell sizes (required) NX, NY, NZ: Voxel dimensions. DZ and NZ are used only if the input cell sizes `GXVV <geosoft.gxapi.GXVV>` is of zero length. """ gxapi_cy.WrapVOX._tin_grid_db(GXContext._get_tls_geo(), voxel.encode(), db, x, y, z, data, method, z_cell, reg) @classmethod def get_multi_voxset_guid(cls, voxel_file, p_uuid_string): """ Get the UUID :param voxel_file: Input Voxel file :param p_uuid_string: UUID string returned :type voxel_file: str :type p_uuid_string: str_ref .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ p_uuid_string.value = gxapi_cy.WrapVOX._get_multi_voxset_guid(GXContext._get_tls_geo(), voxel_file.encode(), p_uuid_string.value.encode()) @classmethod def generate_gocad(cls, name, header, property, ipj): """ Generate a `GXVOX <geosoft.gxapi.GXVOX>` from a GOCAD File :param name: Name of output `GXVOX <geosoft.gxapi.GXVOX>` :param header: Name of GOCAD Voxel file :param property: Propert name to import :type name: str :type header: str :type property: str :type ipj: GXIPJ :returns: `GXVOX <geosoft.gxapi.GXVOX>` Object :rtype: GXVOX .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapVOX._generate_gocad(GXContext._get_tls_geo(), name.encode(), header.encode(), property.encode(), ipj) return GXVOX(ret_val) @classmethod def generate_oriented_gocad(cls, name, header, property, ipj, orientation): """ Generate a `GXVOX <geosoft.gxapi.GXVOX>` from a GOCAD File :param name: Name of output `GXVOX <geosoft.gxapi.GXVOX>` :param header: Name of GOCAD Voxel file :param property: Propert name to import :param orientation: :ref:`VOX_GOCAD_ORIENTATION` :type name: str :type header: str :type property: str :type ipj: GXIPJ :type orientation: int :returns: `GXVOX <geosoft.gxapi.GXVOX>` Object :rtype: GXVOX .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Allows the Orientation flag to be specified. """ ret_val = gxapi_cy.WrapVOX._generate_oriented_gocad(GXContext._get_tls_geo(), name.encode(), header.encode(), property.encode(), ipj, orientation) return GXVOX(ret_val) @classmethod def generate_ubc(cls, name, mesh, mod, dummy, ipj): """ Generate a `GXVOX <geosoft.gxapi.GXVOX>` from a UBC File :param name: Name of output `GXVOX <geosoft.gxapi.GXVOX>` :param mesh: Name of UBC Mesh File :param mod: Name of UBC Mod File :param dummy: Dummy Value :param ipj: Projection :type name: str :type mesh: str :type mod: str :type dummy: float :type ipj: GXIPJ :returns: `GXVOX <geosoft.gxapi.GXVOX>` Object :rtype: GXVOX .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapVOX._generate_ubc(GXContext._get_tls_geo(), name.encode(), mesh.encode(), mod.encode(), dummy, ipj) return GXVOX(ret_val) @classmethod def generate_xyz(cls, name, ra, type, ipj): """ Generate a `GXVOX <geosoft.gxapi.GXVOX>` from an XYZ File :param name: Voxel Name :param ra: `GXRA <geosoft.gxapi.GXRA>` To import from :param type: Data Type :ref:`GS_TYPES` :param ipj: Projection :type name: str :type ra: GXRA :type type: int :type ipj: GXIPJ .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapVOX._generate_xyz(GXContext._get_tls_geo(), name.encode(), ra, type, ipj) @classmethod def list_gocad_properties(cls, header, lst): """ List all the properties available in this GOCAD file. :param header: Name of GOCAD Voxel file :param lst: List object to populate :type header: str :type lst: GXLST .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapVOX._list_gocad_properties(GXContext._get_tls_geo(), header.encode(), lst) def export_db(self, db, chan, dir, rev_x, rev_y, rev_z, dummies): """ Export a Voxel to a database :param db: Database :param chan: Channel Name :param dir: :ref:`VOX_DIRECTION` :param rev_x: Reverse X ? (0/1) :param rev_y: Reverse Y ? (0/1) :param rev_z: Reverse Z ? (0/1) :param dummies: Write Dummies? (0/1) :type db: GXDB :type chan: str :type dir: int :type rev_x: int :type rev_y: int :type rev_z: int :type dummies: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The database lines contain a slice of the voxel at a time. """ self._export_db(db, chan.encode(), dir, rev_x, rev_y, rev_z, dummies) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/tests/_test_dap_client.py import unittest import os import numpy as np import geosoft import geosoft.gxpy.gx as gx import geosoft.gxpy.system as gsys import geosoft.gxpy.coordinate_system as gxcs import geosoft.gxpy.dap_client as gxdap import geosoft.gxpy.geometry as gxgeo import geosoft.gxpy.grid as gxgrd from base import GXPYTest class Test(GXPYTest): @classmethod def setUpClass(cls): cls.setUpGXPYTest() @classmethod def tearDownClass(cls): cls.tearDownGXPYTest() def test_dap(self): self.start() with gxdap.DapClient(get_catalog=False) as dap: self.assertEqual(dap.url, 'http://dap.geosoft.com/') self.assertEqual(str(dap), 'http://dap.geosoft.com/: Geosoft Public DAP Server (? datasets)') self.assertEqual(len(dap), 0) with gxdap.DapClient('http://dap.geosoft.com/rest/', get_catalog=False) as dap: self.assertEqual(dap.url, 'http://dap.geosoft.com/') self.assertEqual(str(dap), 'http://dap.geosoft.com/: Geosoft Public DAP Server (? datasets)') with gxdap.DapClient('http://dap.geosoft.com', get_catalog=False) as dap: self.assertEqual(dap.url, 'http://dap.geosoft.com/') self.assertEqual(str(dap), 'http://dap.geosoft.com/: Geosoft Public DAP Server (? datasets)') self.assertRaises(gxdap.DapClientException, gxdap.DapClient, 'http://www.geosoft.com') def test_catalog(self): self.start() with gxdap.DapClient() as dap: self.assertEqual(dap.url, 'http://dap.geosoft.com/') self.assertTrue(len(dap) == 0) dap.catalog() self.assertTrue(len(dap) > 0) for ds in dap: self.assertTrue(len(ds.Title) > 0) ds = dap['EMAG2_V3_20170530_SeaLevel'] self.assertEqual(int(ds.Id), 970) ds = dap[('/World/Magnetics/EMAG2', 'EMAG2_V3_20170530_SeaLevel')] self.assertEqual(int(ds.Id), 970) try: dap[-1] except IndexError: pass try: dap[100000000] except IndexError: pass try: dap[('nada', 'EMAG2_V3_20170530_SeaLevel')] except gxdap.DapClientException: pass ds = dap.datacard_from_id(905) self.assertEqual(ds.Title, 'SRTM1 Middle East') def test_fetch_grid(self): self.start() with gxdap.DapClient() as dap: # get a grid dataset = dap['SRTM1 Canada'] extent = gxgeo.Point2(((-79.8, 43.5), (-79.25, 43.8)), coordinate_system='NAD83') extent = gxgeo.Point2(extent, coordinate_system='NAD83 / UTM zone 17N') data_file = dap.fetch_data(dataset, extent=extent, progress=print, resolution=500) with gxgrd.Grid.open(data_file) as grd: self.assertEqual(grd.nx, 115) def test_fetch_point(self): self.start() with gxdap.DapClient() as dap: # some point data dataset = dap['Kimberlite Indicator Mineral Grain Chemistry'] extent = gxgeo.Point2(((-112, 65), (-111, 65.5)), coordinate_system='NAD83') data_file = dap.fetch_data(dataset, extent=extent, progress=print) self.assertEqual(os.path.splitext(data_file)[1], '.csv') extent = gxgeo.Point2(((-80, 65), (-70, 65.5)), coordinate_system='NAD83') self.assertRaises(gxdap.DapClientException, dap.fetch_data, dataset, None, extent) def test_datacard_properties(self): self.start() with gxdap.DapClient() as dap: # some point data datacard = dap['Kimberlite Indicator Mineral Grain Chemistry'] # individual properties self.assertEqual(datacard.info['Id'], '127') self.assertEqual(datacard.edition, '') self.assertEqual(datacard.disclaimer['title'], 'Copyright Notice') self.assertEqual(datacard.permission, 1) self.assertEqual(len(datacard.metadata), 4) self.assertEqual(str(gxcs.Coordinate_system(datacard.spatial_properties['CoordinateSystem'])), 'WGS 84') self.assertEqual(len(datacard.point_properties), 9) self.assertTrue(datacard.grid_properties is None) self.assertTrue(datacard.voxel_properties is None) self.assertTrue(datacard.map_properties is None) self.assertEqual(len(dap.datacard_from_id(905).grid_properties), 13) self.assertEqual(len(dap.datacard_from_id(872).document_properties), 8) def test_geometry(self): self.start() with gxdap.DapClient() as dap: datacard= dap['Kimberlite Indicator Mineral Grain Chemistry'] ex = datacard.extent self.assertEqual(str(ex.coordinate_system), 'WGS 84') ############################################################################################### if __name__ == '__main__': gxc = gx.GXpy() print(gxc.gid) unittest.main() <file_sep>/geosoft/gxapi/GXTPAT.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXTPAT(gxapi_cy.WrapTPAT): """ GXTPAT class. The full name of the pattern. ex: "felsic volcanics" Code: Short-form of the pattern description. This is the value which typically appears (for instance) in the "Rock code" channel in a Wholeplot From-To data group. ex: "FVOL" The code is CASE-SENSITIVE. Label: Text to use as a short-form in labels, graphs etc. By default, this is the same as the code. ex: "FVol." Pattern Attributes: (See DEFAULT.`GXPAT <geosoft.gxapi.GXPAT>` in \\src\\etc for more inforation) Pattern: The Pattern Index; defined in DEFAULT.`GXPAT <geosoft.gxapi.GXPAT>`, or in the user's USER.`GXPAT <geosoft.gxapi.GXPAT>` file. If not specified, defaults to 0 (solid fill). Size: The pattern tile size. If not specified, defaults to 2.0mm. Density: The tiling density. If not specified, defaults to 1.0. Thickness: The line thickness in the tile, expressed as a integer percentage (0-100) of the tile size. Color: The pattern line work color. If not specified, defaults to black. Background color: The pattern background color. If not specified, defaults to transparent (C_ANY_NONE) Symbols: Symbol Font The name of the symbol font to use for a given symbol index Symbol Number Index into the font. Symbol Rotation: Rotation in degrees CCW. Symbol Scaling Additional scale factor to apply to symbol size (Default 1.0) """ def __init__(self, handle=0): super(GXTPAT, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXTPAT <geosoft.gxapi.GXTPAT>` :returns: A null `GXTPAT <geosoft.gxapi.GXTPAT>` :rtype: GXTPAT """ return GXTPAT() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def add_color(self, code, label, descr, color): """ Add a new color to the list :param code: Code (required - CASE SENSITIVE) :param label: Label (optional, can be "") :param descr: Description (optional, can be "") :param color: Color (use `GXMVIEW.color <geosoft.gxapi.GXMVIEW.color>` to convert to int). :type code: str :type label: str :type descr: str :type color: int .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The new code must be unique; not in the existing list. """ self._add_color(code.encode(), label.encode(), descr.encode(), color) @classmethod def create(cls): """ Creates an empty thematic pattern object. :returns: `GXTPAT <geosoft.gxapi.GXTPAT>` object :rtype: GXTPAT .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapTPAT._create(GXContext._get_tls_geo()) return GXTPAT(ret_val) def code(self, code): """ Find the index of a given thematic pattern :param code: Pattern code (case sensitive) :type code: str :returns: The code index, -1 if not found :rtype: int .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._code(code.encode()) return ret_val def get_solid_pattern(self, index, code, label, descr, color): """ Get solid pattern info from the `GXTPAT <geosoft.gxapi.GXTPAT>`. :param index: Index :param code: Returned Code :param label: Returned Label :param descr: Returned Description :param color: Color. :type index: int :type code: str_ref :type label: str_ref :type descr: str_ref :type color: int_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Returns the solid color, pattern foreground color, or symbol color, along with the code, label and description. """ code.value, label.value, descr.value, color.value = self._get_solid_pattern(index, code.value.encode(), label.value.encode(), descr.value.encode(), color.value) def size(self): """ Returns the number of rows (items) in the `GXTPAT <geosoft.gxapi.GXTPAT>` object. :returns: Number of `GXTPAT <geosoft.gxapi.GXTPAT>` items. :rtype: int .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._size() return ret_val def load_csv(self, file): """ Load thematic patterns from a CSV file :param file: Thematic Pattern file name :type file: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The type of thematic patterns file is recognized from the types of fields found inside it. The following fields are identified. Only the "CODE" field is required, as the "default" thematic pattern is a solid black color. CODE The pattern code (required by all types - CASE SENSITIVE) LABEL Longer text identifier to use in legends etc. (up to 31 characters) DESCRIPTION Much longer text string (up to 127 characters). COLOR Line color used in patterns, and for solid colors, the color. If only this field is found (and none below), the pattern file is assumed to be type TPAT_TYPE_COLOR. PATTERN Geosoft pattern ID. PAT_SIZE Pattern tile size, or symbol size (default 2mm) PAT_DENSITY Pattern tile density (default 1.0) PAT_THICKNESS Pattern line thickness as % of size (default 5) BACK_COLOR Background color for the pattern. Also used for symbols (Default background is transparent). SYMBFONT Symbol font (e.g. "symbols.gfn") SYMBNUM Symbol number of the current font SYMBROT Symbol rotation SYMBSCL Additional scaling factor applied to the current size """ self._load_csv(file.encode()) def setup_translation_vv(self, ltb, field, vv_values): """ Initializes a `GXVV <geosoft.gxapi.GXVV>` used to map `GXTPAT <geosoft.gxapi.GXTPAT>` indices to output values :param ltb: Table containing `GXTPAT <geosoft.gxapi.GXTPAT>` codes as the key :param field: Field in `GXLTB <geosoft.gxapi.GXLTB>` with the output values (numeric or string) :param vv_values: Returned values for each `GXTPAT <geosoft.gxapi.GXTPAT>` index :type ltb: GXLTB :type field: int :type vv_values: GXVV .. versionadded:: 7.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The input `GXLTB <geosoft.gxapi.GXLTB>` object should have key values matching the `GXTPAT <geosoft.gxapi.GXTPAT>` codes. Whether the matches are case sensitive or not is dependent on how the `GXLTB <geosoft.gxapi.GXLTB>` oject was created (see ltb.h). The `GXLTB <geosoft.gxapi.GXLTB>` field values are converted to the output `GXVV <geosoft.gxapi.GXVV>` type. """ self._setup_translation_vv(ltb, field, vv_values) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXGER.rst .. _GXGER: GXGER class ================================== .. autoclass:: geosoft.gxapi.GXGER :members: <file_sep>/geosoft/gxapi/GXVULCAN.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXVULCAN(gxapi_cy.WrapVULCAN): """ GXVULCAN class. The `GXVULCAN <geosoft.gxapi.GXVULCAN>` class is used for importing Maptek® Vulcan block and triangulation files. """ def __init__(self, handle=0): super(GXVULCAN, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXVULCAN <geosoft.gxapi.GXVULCAN>` :returns: A null `GXVULCAN <geosoft.gxapi.GXVULCAN>` :rtype: GXVULCAN """ return GXVULCAN() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def is_valid_triangulation_file(cls, triangulation_file): """ Check if the given file can be opened as a Vulcan triangulation file. :param triangulation_file: Triangulation file :type triangulation_file: str :returns: 0 - No 1 - Yes :rtype: int .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapVULCAN._is_valid_triangulation_file(GXContext._get_tls_geo(), triangulation_file.encode()) return ret_val @classmethod def is_valid_block_model_file(cls, block_model_file): """ Check if the given file can be opened as a Vulcan block model file. :param block_model_file: Block model file :type block_model_file: str :returns: 0 - No 1 - Yes :rtype: int .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapVULCAN._is_valid_block_model_file(GXContext._get_tls_geo(), block_model_file.encode()) return ret_val @classmethod def triangulation_to_view(cls, triangulation_file, ipj, mview, new_group_name): """ Draw triangle edges in a Vulcan triangulation file to a 3D view in a map. :param triangulation_file: Triangulation file :param ipj: Triangulation projection :param mview: Destination `GXMVIEW <geosoft.gxapi.GXMVIEW>` :param new_group_name: New group name :type triangulation_file: str :type ipj: GXIPJ :type mview: GXMVIEW :type new_group_name: str .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapVULCAN._triangulation_to_view(GXContext._get_tls_geo(), triangulation_file.encode(), ipj, mview, new_group_name.encode()) @classmethod def get_block_model_variable_info(cls, block_model_file, query, lst): """ Query a block model for the variable names and descriptions. :param block_model_file: Block model file :param query: :ref:`BLOCK_MODEL_VARIABLE_TYPE` Which variables to return. :param lst: List used to return variable names/descriptions. :type block_model_file: str :type query: int :type lst: GXLST .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapVULCAN._get_block_model_variable_info(GXContext._get_tls_geo(), block_model_file.encode(), query, lst) @classmethod def get_block_model_string_variable_values(cls, block_model_file, variable_name, lst): """ Query a block model for the values a string variable can assume. :param block_model_file: Block model file :param variable_name: Variable name :param lst: List used to return variable names :type block_model_file: str :type variable_name: str :type lst: GXLST .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapVULCAN._get_block_model_string_variable_values(GXContext._get_tls_geo(), block_model_file.encode(), variable_name.encode(), lst) @classmethod def block_model_to_voxel(cls, block_model_file, ipj, variable_to_export, output_voxel_filename, remove_default_values, rock_code_filename): """ Create a Geosoft voxel file from a Vulcan block model file. :param block_model_file: Block model file :param ipj: Block model projection :param variable_to_export: Variable to import :param output_voxel_filename: Ouput voxel filename :param remove_default_values: Remove default values from input? :param rock_code_filename: Rock code file for string variable imports. Optional, unused for numeric variable imports. :type block_model_file: str :type ipj: GXIPJ :type variable_to_export: str :type output_voxel_filename: str :type remove_default_values: bool :type rock_code_filename: str .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapVULCAN._block_model_to_voxel(GXContext._get_tls_geo(), block_model_file.encode(), ipj, variable_to_export.encode(), output_voxel_filename.encode(), remove_default_values, rock_code_filename.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXLL2.rst .. _GXLL2: GXLL2 class ================================== .. autoclass:: geosoft.gxapi.GXLL2 :members: <file_sep>/geosoft/gxapi/GXEDOC.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXEDOC(gxapi_cy.WrapEDOC): """ GXEDOC class. The `GXEDOC <geosoft.gxapi.GXEDOC>` class provides access to a generic documents views as loaded within Oasis montaj. """ def __init__(self, handle=0): super(GXEDOC, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXEDOC <geosoft.gxapi.GXEDOC>` :returns: A null `GXEDOC <geosoft.gxapi.GXEDOC>` :rtype: GXEDOC """ return GXEDOC() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # GMSYS 3D Models @classmethod def create_new_gms_3d(cls, name, nx, ny, type): """ Creates a new `GXGMSYS <geosoft.gxapi.GXGMSYS>` 3D Model into the workspace, flags as new. :param name: Document to load. :param nx: X Size :param ny: Y Size :param type: :ref:`GMS3D_MODELTYPE` :type name: str :type nx: int :type ny: int :type type: int :returns: Handle to the newly created edited model. :rtype: GXEDOC .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** See `load <geosoft.gxapi.GXEDOC.load>`. This is used for brand new documents, it also sets an internal flag such that if on closing the user chooses not to save changes, the document is deleted thus keeping the project folders clean. """ ret_val = gxapi_cy.WrapEDOC._create_new_gms_3d(GXContext._get_tls_geo(), name.encode(), nx, ny, type) return GXEDOC(ret_val) # Miscellaneous @classmethod def current(cls, type): """ This method returns the Current Edited Document. :param type: :ref:`EDOC_TYPE` :type type: int :returns: `GXEDOC <geosoft.gxapi.GXEDOC>` Object :rtype: GXEDOC .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEDOC._current(GXContext._get_tls_geo(), type) return GXEDOC(ret_val) @classmethod def current_no_activate(cls, type): """ This method returns the Current Edited Document. :param type: :ref:`EDOC_TYPE` :type type: int :returns: `GXEDOC <geosoft.gxapi.GXEDOC>` Object :rtype: GXEDOC .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This function acts just like `current <geosoft.gxapi.GXEDOC.current>` except that the document is not activated (brought to foreground) and no guarantee is given about which document is currently active. """ ret_val = gxapi_cy.WrapEDOC._current_no_activate(GXContext._get_tls_geo(), type) return GXEDOC(ret_val) @classmethod def current_if_exists(cls, type): """ This method returns the Current Edited Document. :param type: :ref:`EDOC_TYPE` :type type: int :returns: `GXEDOC <geosoft.gxapi.GXEDOC>` Object to current edited document. If there is no current document, the user is not prompted for a document, and 0 is returned. :rtype: GXEDOC .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEDOC._current_if_exists(GXContext._get_tls_geo(), type) return GXEDOC(ret_val) @classmethod def get_documents_lst(cls, lst, path, type): """ Load the file names of open documents into a `GXLST <geosoft.gxapi.GXLST>`. :param lst: `GXLST <geosoft.gxapi.GXLST>` to load :param path: :ref:`EDOC_PATH` :param type: :ref:`EDOC_TYPE` :type lst: GXLST :type path: int :type type: int :returns: The number of documents loaded into the `GXLST <geosoft.gxapi.GXLST>`. The `GXLST <geosoft.gxapi.GXLST>` is cleared first. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEDOC._get_documents_lst(GXContext._get_tls_geo(), lst, path, type) return ret_val def get_name(self, name): """ Get the name of the document object of this `GXEDOC <geosoft.gxapi.GXEDOC>`. :param name: Name returned :type name: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ name.value = self._get_name(name.value.encode()) def get_window_state(self): """ Retrieve the current state of the document window :returns: :ref:`EDOC_WINDOW_STATE` :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_window_state() return ret_val @classmethod def have_current(cls, type): """ Returns true if a document is loaded :param type: :ref:`EDOC_TYPE` :type type: int :rtype: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEDOC._have_current(GXContext._get_tls_geo(), type) return ret_val @classmethod def loaded(cls, name, type): """ Returns 1 if a document is loaded . :param name: document name :param type: :ref:`EDOC_TYPE` :type name: str :type type: int :returns: 1 if document is loaded, 0 otherwise. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEDOC._loaded(GXContext._get_tls_geo(), name.encode(), type) return ret_val def get_window_position(self, left, top, right, bottom, state, is_floating): """ Get the map window's position and dock state :param left: Window left position :param top: Window top position :param right: Window right position :param bottom: Window bottom position :param state: Window state :ref:`EDOC_WINDOW_STATE` :param is_floating: Docked or floating :ref:`EDOC_WINDOW_POSITION` :type left: int_ref :type top: int_ref :type right: int_ref :type bottom: int_ref :type state: int_ref :type is_floating: int_ref .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ left.value, top.value, right.value, bottom.value, state.value, is_floating.value = self._get_window_position(left.value, top.value, right.value, bottom.value, state.value, is_floating.value) def set_window_position(self, left, top, right, bottom, state, is_floating): """ Get the map window's position and dock state :param left: Window left position :param top: Window top position :param right: Window right position :param bottom: Window bottom position :param state: Window state :ref:`EDOC_WINDOW_STATE` :param is_floating: Docked or floating :ref:`EDOC_WINDOW_POSITION` :type left: int :type top: int :type right: int :type bottom: int :type state: int :type is_floating: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_window_position(left, top, right, bottom, state, is_floating) def read_only(self): """ Checks if a document is currently opened in a read-only mode. :rtype: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._read_only() return ret_val @classmethod def load(cls, name, type): """ Loads a list of documents into the workspace :param name: list of documents (';' or '|' delimited) to load. :param type: :ref:`EDOC_TYPE` :type name: str :type type: int :returns: Handle to current edited document, which will be the last document in the list. :rtype: GXEDOC .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The last listed document will become the current document. Only the first file in the list may have a directory path. All other files in the list are assumed to be in the same directory as the first file. """ ret_val = gxapi_cy.WrapEDOC._load(GXContext._get_tls_geo(), name.encode(), type) return GXEDOC(ret_val) @classmethod def load_no_activate(cls, name, type): """ Loads a list of documents into the workspace :param name: list of documents (';' or '|' delimited) to load. :param type: :ref:`EDOC_TYPE` :type name: str :type type: int :returns: Handle to current edited document, which will be the last document in the list. :rtype: GXEDOC .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This function acts just like `load <geosoft.gxapi.GXEDOC.load>` except that the document(s) is not activated (brought to foreground) and no guarantee is given about which document is currently active. """ ret_val = gxapi_cy.WrapEDOC._load_no_activate(GXContext._get_tls_geo(), name.encode(), type) return GXEDOC(ret_val) def make_current(self): """ Makes this `GXEDOC <geosoft.gxapi.GXEDOC>` object the current active object to the user. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._make_current() def set_window_state(self, state): """ Changes the state of the document window :param state: :ref:`EDOC_WINDOW_STATE` :type state: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_window_state(state) @classmethod def sync(cls, file, type): """ Syncronize the Metadata of a document that is not currently open :param file: Document file name :param type: :ref:`EDOC_TYPE` :type file: str :type type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapEDOC._sync(GXContext._get_tls_geo(), file.encode(), type) def sync_open(self): """ Syncronize the Metadata of a document .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._sync_open() @classmethod def un_load(cls, name, type): """ Unloads an edited document. :param name: Name of document to unload :param type: :ref:`EDOC_TYPE` :type name: str :type type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the document is not loaded, nothing happens. Same as `un_load_verify <geosoft.gxapi.GXEDOC.un_load_verify>` with FALSE to prompt save. """ gxapi_cy.WrapEDOC._un_load(GXContext._get_tls_geo(), name.encode(), type) @classmethod def un_load_all(cls, type): """ Unloads all opened documents :param type: :ref:`EDOC_TYPE` :type type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapEDOC._un_load_all(GXContext._get_tls_geo(), type) @classmethod def un_load_discard(cls, name, type): """ Unloads a document in the workspace, discards changes. :param name: Name of document to unload :param type: :ref:`EDOC_TYPE` :type name: str :type type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the document is not loaded, nothing happens. """ gxapi_cy.WrapEDOC._un_load_discard(GXContext._get_tls_geo(), name.encode(), type) @classmethod def un_load_verify(cls, name, verify, type): """ Unloads an edited document, optional prompt to save. :param name: Name of document to unload :param verify: :ref:`EDOC_UNLOAD` :param type: :ref:`EDOC_TYPE` :type name: str :type verify: int :type type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the document is not loaded, nothing happens. The user can be prompted to save before unloading. If `EDOC_UNLOAD_NO_PROMPT <geosoft.gxapi.EDOC_UNLOAD_NO_PROMPT>`, data is always saved. """ gxapi_cy.WrapEDOC._un_load_verify(GXContext._get_tls_geo(), name.encode(), verify, type) # Obsolete def copy(self, output): """ Copies a managed document to another document. :param output: Name of output document :type output: str :rtype: bool .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Copies a managed document to another document. """ ret_val = self._copy(output.encode()) return ret_val def save(self): """ Save the managed document. .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Save the managed document """ self._save() ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXDATAMINE.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXDATAMINE(gxapi_cy.WrapDATAMINE): """ GXDATAMINE class. `GXDATAMINE <geosoft.gxapi.GXDATAMINE>` functions provide an interface to Datamine Software Limited files. See also `GXGIS <geosoft.gxapi.GXGIS>` for various other Datamine-specific functions. **Note:** None. """ def __init__(self, handle=0): super(GXDATAMINE, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXDATAMINE <geosoft.gxapi.GXDATAMINE>` :returns: A null `GXDATAMINE <geosoft.gxapi.GXDATAMINE>` :rtype: GXDATAMINE """ return GXDATAMINE() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create_voxel(cls, file, field, ipj, meta, voxel): """ Create a Geosoft Voxel file from a Datamine block model file. :param file: Datamine file name :param field: Field to use for data :param ipj: Projection to set :param meta: `GXMETA <geosoft.gxapi.GXMETA>` to set :param voxel: Output voxel file name :type file: str :type field: str :type ipj: GXIPJ :type meta: GXMETA :type voxel: str .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Create a Geosoft Voxel file from a Datamine block model file. """ gxapi_cy.WrapDATAMINE._create_voxel(GXContext._get_tls_geo(), file.encode(), field.encode(), ipj, meta, voxel.encode()) @classmethod def numeric_field_lst(cls, file, lst): """ Return a `GXLST <geosoft.gxapi.GXLST>` containing the non-standard numeric fields in a Datamine file. :param file: Datamine file name :param lst: `GXLST <geosoft.gxapi.GXLST>` to populate :type file: str :type lst: GXLST .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** At this time, only `GIS_DMTYPE_BLOCKMODEL <geosoft.gxapi.GIS_DMTYPE_BLOCKMODEL>` files are supported. The field names go in the name part, and field indices (1 to N) in the value part. """ gxapi_cy.WrapDATAMINE._numeric_field_lst(GXContext._get_tls_geo(), file.encode(), lst) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXACQUIRE.rst .. _GXACQUIRE: GXACQUIRE class ================================== .. autoclass:: geosoft.gxapi.GXACQUIRE :members: .. _ACQUIRE_SEL: ACQUIRE_SEL constants ----------------------------------------------------------------------- Type of Selection .. autodata:: geosoft.gxapi.ACQUIRE_SEL_HOLES :annotation: .. autoattribute:: geosoft.gxapi.ACQUIRE_SEL_HOLES .. autodata:: geosoft.gxapi.ACQUIRE_SEL_POINT :annotation: .. autoattribute:: geosoft.gxapi.ACQUIRE_SEL_POINT <file_sep>/geosoft/gxapi/GXVOXE.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXVOXE(gxapi_cy.WrapVOXE): """ GXVOXE class. `GXVOX <geosoft.gxapi.GXVOX>` evaluator class. Used to sample values from the voxel. """ def __init__(self, handle=0): super(GXVOXE, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXVOXE <geosoft.gxapi.GXVOXE>` :returns: A null `GXVOXE <geosoft.gxapi.GXVOXE>` :rtype: GXVOXE """ return GXVOXE() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, vox): """ Create a handle to an `GXVOXE <geosoft.gxapi.GXVOXE>` object :param vox: `GXVOX <geosoft.gxapi.GXVOX>` Object :type vox: GXVOX :returns: `GXVOXE <geosoft.gxapi.GXVOXE>` handle, terminates if creation fails :rtype: GXVOXE .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapVOXE._create(GXContext._get_tls_geo(), vox) return GXVOXE(ret_val) def profile(self, vv_x, vv_y, vv_z, vv_d, interp): """ Extract a profile of data along points provided. :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` (must be double) :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` (must be double) :param vv_z: Z `GXVV <geosoft.gxapi.GXVV>` (must be double) :param vv_d: D `GXVV <geosoft.gxapi.GXVV>` (must be double) :param interp: :ref:`VOXE_EVAL` :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_d: GXVV :type interp: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._profile(vv_x, vv_y, vv_z, vv_d, interp) def value(self, x, y, z, interp): """ Get a value at a specific point :param x: X Location :param y: Y Location :param z: Z Location :param interp: :ref:`VOXE_EVAL` :type x: float :type y: float :type z: float :type interp: int :returns: Value at the point or DUMMY if not valid :rtype: float .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._value(x, y, z, interp) return ret_val def vector(self, ox, oy, oz, vx, vy, vz, vv, interp): """ Extract a profile of data along a vector :param ox: X Origin :param oy: Y Origin :param oz: Z Origin :param vx: X Delta :param vy: Y Delta :param vz: Z Delta :param vv: Data `GXVV <geosoft.gxapi.GXVV>` (must be double) :param interp: :ref:`VOXE_EVAL` :type ox: float :type oy: float :type oz: float :type vx: float :type vy: float :type vz: float :type vv: GXVV :type interp: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._vector(ox, oy, oz, vx, vy, vz, vv, interp) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/tutorial/Geosoft modules - gxapi and gxpy/get_data_files.py import geosoft.gxpy.gx as gx import geosoft.gxpy.utility as gxu gxc = gx.GXpy() url = 'https://github.com/GeosoftInc/gxpy/raw/9.3/examples/tutorial/Geosoft%20modules%20-%20gxapi%20and%20gxpy/' gxu.url_retrieve(url + 'test.grd') gxu.url_retrieve(url + 'test.grd.gi')<file_sep>/docs/GXARCPY.rst .. _GXARCPY: GXARCPY class ================================== .. autoclass:: geosoft.gxapi.GXARCPY :members: <file_sep>/docs/GXHXYZ.rst .. _GXHXYZ: GXHXYZ class ================================== .. autoclass:: geosoft.gxapi.GXHXYZ :members: <file_sep>/geosoft/gxapi/GXIMU.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXIMG import GXIMG ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXIMU(gxapi_cy.WrapIMU): """ GXIMU class. Not a class. This is a catch-all group of functions working on `GXIMG <geosoft.gxapi.GXIMG>` objects (see `GXIMG <geosoft.gxapi.GXIMG>`). Grid operations include masking, trending, windowing, expanding and grid stitching. """ def __init__(self, handle=0): super(GXIMU, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXIMU <geosoft.gxapi.GXIMU>` :returns: A null `GXIMU <geosoft.gxapi.GXIMU>` :rtype: GXIMU """ return GXIMU() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def agg_to_geo_color(cls, agg, grid, ipj, res): """ Create a Geosoft color grid from an aggregate. :param agg: Input Aggregate :param grid: Output image name :param ipj: Projection to use :param res: Resolution (Cell Size) size to use :type agg: GXAGG :type grid: str :type ipj: GXIPJ :type res: float .. versionadded:: 5.1.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This consumes a very small amount of memory """ gxapi_cy.WrapIMU._agg_to_geo_color(GXContext._get_tls_geo(), agg, grid.encode(), ipj, res) @classmethod def crc(cls, img, pul_crc): """ Computes a CRC Checksum on an image. :param img: Input image :param pul_crc: Starting CRC (use `CRC_INIT_VALUE <geosoft.gxapi.CRC_INIT_VALUE>` if none) :type img: GXIMG :type pul_crc: int :returns: CRC value :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapIMU._crc(GXContext._get_tls_geo(), img, pul_crc) return ret_val @classmethod def crc_grid(cls, grid, pul_crc): """ Computes a CRC Checksum on a grid. :param grid: Grid :param pul_crc: Starting CRC (use `CRC_INIT_VALUE <geosoft.gxapi.CRC_INIT_VALUE>` if none) :type grid: str :type pul_crc: int :returns: CRC value :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapIMU._crc_grid(GXContext._get_tls_geo(), grid.encode(), pul_crc) return ret_val @classmethod def crc_grid_inexact(cls, grid, pul_crc, float_bits, double_bits): """ Computes a CRC Checksum on a grid and allows you to specify number of bits of floats/doubles to drop so that the CRC will be same even of this are changed. :param grid: Grid :param pul_crc: Starting CRC (use `CRC_INIT_VALUE <geosoft.gxapi.CRC_INIT_VALUE>` if none) :param float_bits: :ref:`IMU_FLOAT_CRC_BITS` :param double_bits: :ref:`IMU_DOUBLE_CRC_BITS` :type grid: str :type pul_crc: int :type float_bits: int :type double_bits: int :returns: CRC value :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Very useful for testing where the last bits of accuracy are not as important. """ ret_val = gxapi_cy.WrapIMU._crc_grid_inexact(GXContext._get_tls_geo(), grid.encode(), pul_crc, float_bits, double_bits) return ret_val @classmethod def crc_inexact(cls, img, pul_crc, float_bits, double_bits): """ Computes a CRC Checksum on an image and allows you to specify number of bits of floats/doubles to drop so that the CRC will be same even of this are changed. :param img: Input image :param pul_crc: Starting CRC (use `CRC_INIT_VALUE <geosoft.gxapi.CRC_INIT_VALUE>` if none) :param float_bits: :ref:`IMU_FLOAT_CRC_BITS` :param double_bits: :ref:`IMU_DOUBLE_CRC_BITS` :type img: GXIMG :type pul_crc: int :type float_bits: int :type double_bits: int :returns: CRC value :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Very useful for testing where the last bits of accuracy are not as important. """ ret_val = gxapi_cy.WrapIMU._crc_inexact(GXContext._get_tls_geo(), img, pul_crc, float_bits, double_bits) return ret_val @classmethod def decimate_crooked_section_grid(cls, img, dec_X, dec_y, out): """ Decimate a crooked section grid. :param img: Input grid IMG :param dec_X: :def:Decimation factor along section (>=1) :param dec_y: :def:Decimation factor down section (>=1) :param out: File name of output grid :type img: GXIMG :type dec_X: int :type dec_y: int :type out: str .. versionadded:: 2023.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** For decimation factor N, every MN'th value is selected for M = 0, 1, 2, ... The full distance is recalculated based on the remaining points because it will almost certainly be smaller. """ gxapi_cy.WrapIMU._decimate_crooked_section_grid(GXContext._get_tls_geo(), img, dec_X, dec_y, out.encode()) @classmethod def export_grid_without_data_section_xml(cls, grid, crc, file): """ Export a Grid minus the data section as an XML file. :param grid: Grid :param crc: CRC returned :param file: Output XML file :type grid: str :type crc: int_ref :type file: str .. versionadded:: 7.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ crc.value = gxapi_cy.WrapIMU._export_grid_without_data_section_xml(GXContext._get_tls_geo(), grid.encode(), crc.value, file.encode()) @classmethod def export_grid_xml(cls, grid, crc, file): """ Export a Grid as an XML file. :param grid: Grid :param crc: CRC returned :param file: Output XML file :type grid: str :type crc: int_ref :type file: str .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ crc.value = gxapi_cy.WrapIMU._export_grid_xml(GXContext._get_tls_geo(), grid.encode(), crc.value, file.encode()) @classmethod def export_raw_xml(cls, img, crc, file): """ Export a Grid as an XML file using a fast raw output. :param img: Image :param crc: CRC returned :param file: Output XML file :type img: GXIMG :type crc: int_ref :type file: str .. versionadded:: 7.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ crc.value = gxapi_cy.WrapIMU._export_raw_xml(GXContext._get_tls_geo(), img, crc.value, file.encode()) @classmethod def export_xml(cls, img, crc, file): """ Export a Grid as an XML file. :param img: Image :param crc: CRC returned :param file: Output XML file :type img: GXIMG :type crc: int_ref :type file: str .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ crc.value = gxapi_cy.WrapIMU._export_xml(GXContext._get_tls_geo(), img, crc.value, file.encode()) @classmethod def get_zvv(cls, img, vv_x, vv_y, vv_z): """ Extract an interpolated image value for given XY `GXVV <geosoft.gxapi.GXVV>` locations :param img: Input grid :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` :param vv_z: Z `GXVV <geosoft.gxapi.GXVV>` filled with values (set to be same size as X, Y) :type img: GXIMG :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 5.0.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapIMU._get_zvv(GXContext._get_tls_geo(), img, vv_x, vv_y, vv_z) @classmethod def get_z_peaks_vv(cls, img, vv_x, vv_y, vv_z): """ Same as `get_zvv <geosoft.gxapi.GXIMU.get_zvv>`, but find the closest peak value to the input locations, and return the peak value and peak value location. :param img: Input grid :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` :param vv_z: Z `GXVV <geosoft.gxapi.GXVV>` filled with values (set to be same size as X, Y) :type img: GXIMG :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 9.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The returned locations will always be a grid point location; no interpolation is performed when locating the peaks. A simple search is done of all neighbouring points from the starting point, and once no neighbours can be located with a higher value, the search stops. """ gxapi_cy.WrapIMU._get_z_peaks_vv(GXContext._get_tls_geo(), img, vv_x, vv_y, vv_z) @classmethod def grid_add(cls, img1, m1, img2, m2, imgo): """ Adds two Grid images together point-by-point. :param img1: Image of first grid :param m1: Multiplier to operate on first grid image :param img2: Image of second grid :param m2: Multiplier to operate on second grid image :param imgo: Output grid image :type img1: GXIMG :type m1: float :type img2: GXIMG :type m2: float :type imgo: GXIMG .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The `GXIMG <geosoft.gxapi.GXIMG>` parameters MUST be of type `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`! If not, the method will terminate. """ gxapi_cy.WrapIMU._grid_add(GXContext._get_tls_geo(), img1, m1, img2, m2, imgo) @classmethod def grid_agc(cls, i_img, o_img, width, max_gain, remove_background): """ Automatic Gain Compensation of a grid. :param i_img: Image of input grid :param o_img: Image of output grid :param width: Width of filter to separate signal from background. :param max_gain: Maximum gain applied to the signal. :param remove_background: Remove background before applying gain? :type i_img: GXIMG :type o_img: GXIMG :type width: int :type max_gain: float :type remove_background: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The `GXIMG <geosoft.gxapi.GXIMG>` parameters MUST be of type `GS_FLOAT <geosoft.gxapi.GS_FLOAT>`! If not, the method will terminate. """ gxapi_cy.WrapIMU._grid_agc(GXContext._get_tls_geo(), i_img, o_img, width, max_gain, remove_background) @classmethod def grid_bool(cls, img1, img2, out, boolean_op, sizing, olap): """ Mask one grid against another using boolean logic operations. :param img1: Image of first input grid :param img2: Image of second input grid :param out: File name of output grid :param boolean_op: :ref:`IMU_BOOL_OPT` :param sizing: :ref:`IMU_BOOL_SIZING` :param olap: :ref:`IMU_BOOL_OLAP` :type img1: GXIMG :type img2: GXIMG :type out: str :type boolean_op: int :type sizing: int :type olap: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The `GXIMG <geosoft.gxapi.GXIMG>` parameters must be of type `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`! If not, the method will terminate. """ gxapi_cy.WrapIMU._grid_bool(GXContext._get_tls_geo(), img1, img2, out.encode(), boolean_op, sizing, olap) @classmethod def grid_edge(cls, grid, vv_x, vv_y): """ Get grid edge points :param grid: Grid file name :param vv_x: X coordinates of edge points :param vv_y: Y coordinates of edge points :type grid: str :type vv_x: GXVV :type vv_y: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapIMU._grid_edge(GXContext._get_tls_geo(), grid.encode(), vv_x, vv_y) @classmethod def grid_edge_ply(cls, img, ply, min_points): """ Get grid edge points :param img: The Grid :param ply: `GXPLY <geosoft.gxapi.GXPLY>` containing the edges. :param min_points: Minimum number of points in polygons (0 for all) :type img: GXIMG :type ply: GXPLY :type min_points: int .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Unlike `grid_ply <geosoft.gxapi.GXIMU.grid_ply>` and GridPlyEx_IMU, the image is not altered. It just gives the `GXPLY <geosoft.gxapi.GXPLY>`. """ gxapi_cy.WrapIMU._grid_edge_ply(GXContext._get_tls_geo(), img, ply, min_points) @classmethod def grid_expand(cls, im_gi, out, per, shape, x, y): """ Expand a grid and place dummies in the area beyond the original edges. :param im_gi: Image of input grid :param out: File name of output grid :param per: Minimum percentage to expand the grid by :param shape: :ref:`IMU_EXPAND_SHAPE` :param x: X Dimension the output grid is expanded to :param y: Y Dimension the output grid is expanded to :type im_gi: GXIMG :type out: str :type per: float :type shape: int :type x: int :type y: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The `GXIMG <geosoft.gxapi.GXIMG>` parameter MUST be of type `GS_FLOAT <geosoft.gxapi.GS_FLOAT>`! If not, the method will terminate. """ gxapi_cy.WrapIMU._grid_expand(GXContext._get_tls_geo(), im_gi, out.encode(), per, shape, x, y) @classmethod def grid_exp_fill(cls, in_grd, out_grd, p_ex, t_ex): """ Extends and fills a grid for `GXFFT2 <geosoft.gxapi.GXFFT2>`. :param in_grd: Name of the input grid :param out_grd: Name of the output grid :param p_ex: % expansion :param t_ex: Shape of expansion: 0 - rectangle, 1 - square :type in_grd: str :type out_grd: str :type p_ex: float :type t_ex: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapIMU._grid_exp_fill(GXContext._get_tls_geo(), in_grd.encode(), out_grd.encode(), p_ex, t_ex) @classmethod def grid_fill(cls, im_gi, im_go, rollopt, rolldist, mxf, mxp, rollbase, alimit, elimit, width, npass): """ Interpolates to fill dummies, generates an output grid. :param im_gi: Image of input grid :param im_go: Image of output grid :param rollopt: :ref:`IMU_FILL_ROLLOPT` :param rolldist: Distance at which to roll off to 0 :param mxf: Maximum prediction filter length :param mxp: Maximum prediction filter area :param rollbase: Base value to roll off to :param alimit: Maximum amplitude allowed in grid :param elimit: Maximum edge amplitude allowed in grid :param width: Width from edge to start limiting from :param npass: Number of convolution passes to apply :type im_gi: GXIMG :type im_go: GXIMG :type rollopt: int :type rolldist: int :type mxf: int :type mxp: int :type rollbase: float :type alimit: float :type elimit: float :type width: int :type npass: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The `GXIMG <geosoft.gxapi.GXIMG>` parameters MUST be of type `GS_FLOAT <geosoft.gxapi.GS_FLOAT>`! If not, the method will terminate. """ gxapi_cy.WrapIMU._grid_fill(GXContext._get_tls_geo(), im_gi, im_go, rollopt, rolldist, mxf, mxp, rollbase, alimit, elimit, width, npass) @classmethod def grid_filt(cls, img, imgo, passes, mult, dum, hz, usefile, file, vv): """ Applies a filter to a grid any number of passes. :param img: Image of first grid :param imgo: Image of second grid :param passes: Number of passes to apply filter (>0) :param mult: Multiplier to apply to grid values :param dum: :ref:`IMU_FILT_DUMMY` :param hz: :ref:`IMU_FILT_HZDRV` :param usefile: :ref:`IMU_FILT_FILE` :param file: Name of file containing filter values :param vv: `GXVV <geosoft.gxapi.GXVV>` containing filter values (if not using a file for the values) MUST BE OF TYPE 'real' :type img: GXIMG :type imgo: GXIMG :type passes: int :type mult: float :type dum: int :type hz: int :type usefile: int :type file: str :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The `GXIMG <geosoft.gxapi.GXIMG>` parameters MUST be of type `GS_FLOAT <geosoft.gxapi.GS_FLOAT>`! If not, the method will terminate. """ gxapi_cy.WrapIMU._grid_filt(GXContext._get_tls_geo(), img, imgo, passes, mult, dum, hz, usefile, file.encode(), vv) @classmethod def grid_head(cls, grid, esep, vsep, x_orig, y_orig, rot): """ Modifies Statistics contained in a grid header. :param grid: Name of the grid whose header is to be modified. :param esep: Element separation :param vsep: Vector separation :param x_orig: Grid X Origin on ground :param y_orig: Grid Y Origin on ground :param rot: Grid Rotation :type grid: str :type esep: float :type vsep: float :type x_orig: float :type y_orig: float :type rot: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapIMU._grid_head(GXContext._get_tls_geo(), grid.encode(), esep, vsep, x_orig, y_orig, rot) @classmethod def grid_mask(cls, in_grid, m_grid, pply, mode): """ Create a mask grid using a set of polygon coordinates defined in a separate file, then masking the polygon over an input grid. :param in_grid: Name of input grid :param m_grid: Name of output mask grid file :param pply: Polygon containing mask coordinates :param mode: :ref:`IMU_MASK` :type in_grid: str :type m_grid: str :type pply: GXPLY :type mode: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The `GXIMG <geosoft.gxapi.GXIMG>` parameters MUST be of type `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`! If not, the method will terminate. The `GXPLY <geosoft.gxapi.GXPLY>` will contain more than one polygon if it was loaded from a file containing coordinates of more than one polygon. """ gxapi_cy.WrapIMU._grid_mask(GXContext._get_tls_geo(), in_grid.encode(), m_grid.encode(), pply, mode) @classmethod def grid_peak(cls, grid, nlmt, v_vx, v_vy, v_vz): """ Pick grid peaks. :param grid: Grid file name :param nlmt: Peak test directions (1 to 4) :param v_vx: X of found peaks :param v_vy: Y of found peaks :param v_vz: Z values of found peaks :type grid: str :type nlmt: int :type v_vx: GXVV :type v_vy: GXVV :type v_vz: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Peak test directions defines how grid peaks are to be found. For example, with the 1, a grid point will be picked if its value is greater than it's two neighbors in at least one direction. Up to 4 directions can be tested. """ gxapi_cy.WrapIMU._grid_peak(GXContext._get_tls_geo(), grid.encode(), nlmt, v_vx, v_vy, v_vz) @classmethod def grid_ply(cls, img, ply, refresh): """ Get the grid edge in a `GXPLY <geosoft.gxapi.GXPLY>` :param img: The `GXIMG <geosoft.gxapi.GXIMG>` :param ply: `GXPLY <geosoft.gxapi.GXPLY>` to which the bounding polygons will be added. :param refresh: TRUE to force the boundary to be refreshed. :type img: GXIMG :type ply: GXPLY :type refresh: int .. versionadded:: 5.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This will optionally refresh the grid boundary `GXPLY <geosoft.gxapi.GXPLY>` and return the `GXPLY <geosoft.gxapi.GXPLY>`. If the boundary is not refreshed and has never been calculated, the boundary will be the bounding rectangle of the grid. The grid `GXPLY <geosoft.gxapi.GXPLY>` will be added to existing ploygons in the passed `GXPLY <geosoft.gxapi.GXPLY>`. """ gxapi_cy.WrapIMU._grid_ply(GXContext._get_tls_geo(), img, ply, refresh) @classmethod def grid_ply_ex(cls, img, ply, refresh, min_points): """ Get the grid edge in a `GXPLY <geosoft.gxapi.GXPLY>` (with min points) :param img: The `GXIMG <geosoft.gxapi.GXIMG>` :param ply: `GXPLY <geosoft.gxapi.GXPLY>` to which the bounding polygons will be added. :param refresh: TRUE to force the boundary to be refreshed. :param min_points: Minimum number of points in polygons refreshed (0 for all) :type img: GXIMG :type ply: GXPLY :type refresh: int :type min_points: int .. versionadded:: 5.1.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This will optionally refresh the grid boundary `GXPLY <geosoft.gxapi.GXPLY>` and return the `GXPLY <geosoft.gxapi.GXPLY>`. If the boundary is not refreshed and has never been calculated, the boundary will be the bounding rectangle of the grid. The grid `GXPLY <geosoft.gxapi.GXPLY>` will be added to existing ploygons in the passed `GXPLY <geosoft.gxapi.GXPLY>`. """ gxapi_cy.WrapIMU._grid_ply_ex(GXContext._get_tls_geo(), img, ply, refresh, min_points) @classmethod def grid_reproject_and_window(cls, input_grid_filename, output_grid_filename, new_projection, min_x, max_x, min_y, max_y): """ Create a new grid by reprojecting an existing grid and windowing its contents :param input_grid_filename: Input grid filename :param output_grid_filename: Output grid filename :param new_projection: Output grid projection :param min_x: Window minX (in output projection) :param max_x: Window maxX (in output projection) :param min_y: Window minY (in output projection) :param max_y: Window maxY (in output projection) :type input_grid_filename: str :type output_grid_filename: str :type new_projection: GXIPJ :type min_x: float :type max_x: float :type min_y: float :type max_y: float .. versionadded:: 7.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapIMU._grid_reproject_and_window(GXContext._get_tls_geo(), input_grid_filename.encode(), output_grid_filename.encode(), new_projection, min_x, max_x, min_y, max_y) @classmethod def grid_resample(cls, input_grid_filename, output_grid_filename, o_x, o_y, d_x, d_y, n_x, n_y): """ Create a new grid by resampling an existing grid :param input_grid_filename: Input grid filename :param output_grid_filename: Output grid filename :param o_x: Origin X :param o_y: Origin Y :param d_x: Cell spacing X :param d_y: Cell spacing Y :param n_x: Elements in X :param n_y: Elements in Y :type input_grid_filename: str :type output_grid_filename: str :type o_x: float :type o_y: float :type d_x: float :type d_y: float :type n_x: int :type n_y: int .. versionadded:: 7.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Works only for un rotated grids. """ gxapi_cy.WrapIMU._grid_resample(GXContext._get_tls_geo(), input_grid_filename.encode(), output_grid_filename.encode(), o_x, o_y, d_x, d_y, n_x, n_y) @classmethod def grid_resize(cls, in_grd, out_grd): """ Resize a grid to reduce the size not cover the outside dummies. :param in_grd: File name of input grid :param out_grd: File name of output grid :type in_grd: str :type out_grd: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapIMU._grid_resize(GXContext._get_tls_geo(), in_grd.encode(), out_grd.encode()) @classmethod def grid_shad(cls, in_grid, sh_grid, inc, dec, scl): """ Create a shaded relief image. :param in_grid: Input image name :param sh_grid: Output new shaded image :param inc: Inclination 0-90 degrees (def. 45) :param dec: Declination 0-360 degrees azimuth (def. 45) :param scl: Vertical scale factor (distance/z unit) :type in_grid: str :type sh_grid: str :type inc: float_ref :type dec: float_ref :type scl: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Pass `GS_R8DM <geosoft.gxapi.GS_R8DM>` as parameters to obtain default values. The default values are returned. """ inc.value, dec.value, scl.value = gxapi_cy.WrapIMU._grid_shad(GXContext._get_tls_geo(), in_grid.encode(), sh_grid.encode(), inc.value, dec.value, scl.value) @classmethod def refresh_shad(cls, in_img, sh_img, inc, dec, scl): """ Refresh a shaded relief image :param in_img: Input grid object :param sh_img: Output shaded grid object :param inc: Inclination 0-90 degrees (def. 45) :param dec: Declination 0-360 degrees azimuth (def. 45) :param scl: Vertical scale factor (distance/z unit) :type in_img: GXIMG :type sh_img: GXIMG :type inc: float_ref :type dec: float_ref :type scl: float_ref .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Pass `GS_R8DM <geosoft.gxapi.GS_R8DM>` as parameters to obtain default values. The default values are returned. """ inc.value, dec.value, scl.value = gxapi_cy.WrapIMU._refresh_shad(GXContext._get_tls_geo(), in_img, sh_img, inc.value, dec.value, scl.value) @classmethod def grid_st(cls, grid, st): """ Update an `GXST <geosoft.gxapi.GXST>` object using a grid. :param grid: Grid name :param st: `GXST <geosoft.gxapi.GXST>` (statistics) object to fill/update :type grid: str :type st: GXST .. versionadded:: 5.1.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The input `GXST <geosoft.gxapi.GXST>` object is not initialized by `grid_st <geosoft.gxapi.GXIMU.grid_st>`, so this function can be used to accumulate statistical info on more than a single grid. See `GXST <geosoft.gxapi.GXST>`. """ gxapi_cy.WrapIMU._grid_st(GXContext._get_tls_geo(), grid.encode(), st) @classmethod def grid_stat(cls, grid, type, xelem, yelem, xsep, ysep, kx, x_orig, y_orig, rot, base, mult): """ Reports statistics contained in a grid header. :param grid: Name of the grid to get stats from :param type: Element type in bytes :param xelem: Elements in X direction :param yelem: Elements in Y direction :param xsep: X element separation :param ysep: Y element separation :param kx: KX (storage orientation) :param x_orig: X origin :param y_orig: Y origin :param rot: Grid Rotation :param base: Base removed :param mult: Grid multiplier :type grid: str :type type: int_ref :type xelem: int_ref :type yelem: int_ref :type xsep: float_ref :type ysep: float_ref :type kx: int_ref :type x_orig: float_ref :type y_orig: float_ref :type rot: float_ref :type base: float_ref :type mult: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Statistics are returned in the parameter set """ type.value, xelem.value, yelem.value, xsep.value, ysep.value, kx.value, x_orig.value, y_orig.value, rot.value, base.value, mult.value = gxapi_cy.WrapIMU._grid_stat(GXContext._get_tls_geo(), grid.encode(), type.value, xelem.value, yelem.value, xsep.value, ysep.value, kx.value, x_orig.value, y_orig.value, rot.value, base.value, mult.value) @classmethod def grid_stat_comp(cls, grid, type, xelem, yelem, xsep, ysep, kx, x_orig, y_orig, rot, base, mult, comp): """ Reports statistics contained in a grid header. :param grid: Name of the grid to get stats from :param type: Element type: 0 - byte 1 - USHORT 2 - SHORT 3 - LONG 4 - FLOAT 5 - DOUBLE 6 - 32 byte Color (RGBx) :param xelem: Elements in X direction :param yelem: Elements in Y direction :param xsep: X element separation :param ysep: Y element separation :param kx: KX (storage orientation) :param x_orig: X origin :param y_orig: Y origin :param rot: Grid Rotation :param base: Base removed :param mult: Grid multiplier :param comp: Compression Ratio :type grid: str :type type: int_ref :type xelem: int_ref :type yelem: int_ref :type xsep: float_ref :type ysep: float_ref :type kx: int_ref :type x_orig: float_ref :type y_orig: float_ref :type rot: float_ref :type base: float_ref :type mult: float_ref :type comp: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Statistics are returned in the parameter set """ type.value, xelem.value, yelem.value, xsep.value, ysep.value, kx.value, x_orig.value, y_orig.value, rot.value, base.value, mult.value, comp.value = gxapi_cy.WrapIMU._grid_stat_comp(GXContext._get_tls_geo(), grid.encode(), type.value, xelem.value, yelem.value, xsep.value, ysep.value, kx.value, x_orig.value, y_orig.value, rot.value, base.value, mult.value, comp.value) @classmethod def grid_stat_ext(cls, grid, force, items, dums, min, max, mean, stddev): """ Reports statistics of a grid's elements. :param grid: Name of the grid to get stats from :param force: :ref:`IMU_STAT_FORCED` :param items: Number of valid elements in grid :param dums: Number of dummies in grid :param min: Minimum grid value :param max: Maximum grid value :param mean: Grid mean :param stddev: Grid standard deviation :type grid: str :type force: int :type items: int_ref :type dums: int_ref :type min: float_ref :type max: float_ref :type mean: float_ref :type stddev: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the :ref:`IMU_STAT_FORCED` value is set, the statistics will be recalculated. Statistics are returned in the parameter set. """ items.value, dums.value, min.value, max.value, mean.value, stddev.value = gxapi_cy.WrapIMU._grid_stat_ext(GXContext._get_tls_geo(), grid.encode(), force, items.value, dums.value, min.value, max.value, mean.value, stddev.value) @classmethod def grid_stat_trend(cls, grid, trend_valid, co, cx, cy): """ Reports Trend Info of a grid (for first order coefficients only). :param grid: Name of the grid to get stats from :param trend_valid: Trend Valid Flag :param co: Trend coefficient rCo :param cx: Trend coefficient rCx :param cy: Trend coefficient rCy :type grid: str :type trend_valid: int_ref :type co: float_ref :type cx: float_ref :type cy: float_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Trend Info are returned in the parameter set """ trend_valid.value, co.value, cx.value, cy.value = gxapi_cy.WrapIMU._grid_stat_trend(GXContext._get_tls_geo(), grid.encode(), trend_valid.value, co.value, cx.value, cy.value) @classmethod def grid_stat_trend_ext(cls, grid, order, num_coef, xo, yo, vm): """ Reports Extended Trend Info of a grid (for up to third order coefficients). :param grid: Grid name :param order: Trend order :param num_coef: Number of coefficients :param xo: Trend origin Xo :param yo: Trend origin Yo :param vm: `GXVM <geosoft.gxapi.GXVM>` hold coefficient values MUST BE OF TYPE 'real' :type grid: str :type order: int_ref :type num_coef: int_ref :type xo: float_ref :type yo: float_ref :type vm: GXVM .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Trend Info are returned in the parameter set """ order.value, num_coef.value, xo.value, yo.value = gxapi_cy.WrapIMU._grid_stat_trend_ext(GXContext._get_tls_geo(), grid.encode(), order.value, num_coef.value, xo.value, yo.value, vm) @classmethod def slope_standard_deviation(cls, img): """ Return the standard deviation of the slopes. :param img: Grid object :type img: GXIMG :returns: Standard deviation of grid slopes :rtype: float .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method calculates the standard deviation of the horizontal differences in the X and Y directions for the supplied image. This is useful for shading routines. A good default scaling factor is 2.5 / standard deviation. The image will be sub-sampled to a statistically meaningful number. The cell sizes are used to determine the slopes. """ ret_val = gxapi_cy.WrapIMU._slope_standard_deviation(GXContext._get_tls_geo(), img) return ret_val @classmethod def grid_stitch(cls, grid1, grid2, grid3, method, tr_order1, tr_order2, tr_calc, gap, spline, path, pply, weighting, width): """ Stitches together too grids :param grid1: Input Grid1 name :param grid2: Input Grid2 name :param grid3: Output Grid name :param method: Stitching method :param tr_order1: Grid 1 trend removal order :param tr_order2: Grid 2 trend removal order :param tr_calc: Trend removal type of points to use :param gap: Gap for interpolation :param spline: Interpolation spline method :param path: Path selection :param pply: `GXPLY <geosoft.gxapi.GXPLY>` object for user path :param weighting: Correction weighting :param width: Width of corrections, in grid cells (8 to 256) :type grid1: str :type grid2: str :type grid3: str :type method: int :type tr_order1: int :type tr_order2: int :type tr_calc: int :type gap: float :type spline: int :type path: int :type pply: GXPLY :type weighting: float :type width: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapIMU._grid_stitch(GXContext._get_tls_geo(), grid1.encode(), grid2.encode(), grid3.encode(), method, tr_order1, tr_order2, tr_calc, gap, spline, path, pply, weighting, width) @classmethod def grid_stitch_ctl(cls, ctl): """ Stitches together two grids - control file for options. :param ctl: Control file containing all "GRIDSTCH" parameters :type ctl: str .. versionadded:: 5.1.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Data validation is done internally, not in the GX. This is simply a way of avoiding writing a new GX wrapper every time an option is added. """ gxapi_cy.WrapIMU._grid_stitch_ctl(GXContext._get_tls_geo(), ctl.encode()) @classmethod def grid_tiff(cls, grds, tiff, bcol, red, green, blue, csize, reg, scale): """ Generate a Tiff (Tagged-Image file format) file with up to 16 grids. :param grds: Comma-delimited string containing names of all grids to use in Tiff generation Up to 16 grids allowed. :param tiff: Name of Tiff file to create :param bcol: Background color option. One of W (White) K (Black) C (Cyan) M (Magenta) Y (Yellow) R (Red) G (Green) B (Blue) :param red: Background Red value (0-255) :param green: Background Green (0-255) :param blue: Background Blue (0-255) :param csize: New cell size :param reg: Pixel size of registration marks :param scale: Map scale :type grds: str :type tiff: str :type bcol: str :type red: int :type green: int :type blue: int :type csize: float :type reg: int :type scale: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The background color can be either selected from one of 8 settings, or can be specified as a combination of Reg,Green, and Blue values. """ gxapi_cy.WrapIMU._grid_tiff(GXContext._get_tls_geo(), grds.encode(), tiff.encode(), bcol.encode(), red, green, blue, csize, reg, scale) @classmethod def grid_trnd(cls, imgi, imgo, tr_option, edge, order, vm, num_coefs): """ Remove a trend surface from a grid. :param imgi: Handle to input image :param imgo: Handle to output image :param tr_option: 0-calculate, 1-given, 2-replace :param edge: :ref:`IMU_TREND` :param order: Trend order :param vm: `GXVM <geosoft.gxapi.GXVM>` holds coefficients :param num_coefs: Number of coefficients :type imgi: GXIMG :type imgo: GXIMG :type tr_option: int :type edge: int :type order: int :type vm: GXVM :type num_coefs: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Both Images must be of type `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`. The `GXVM <geosoft.gxapi.GXVM>` parameter must be of type REAL, and be of size 10 at most. The number of coefficients must be compatible with the order of the trend removed. Following is the number of coefficients which should be present for a given order ===== ====================== Order Number of Coefficients ----- ---------------------- 0 1 1 3 2 6 3 10 ===== ====================== """ gxapi_cy.WrapIMU._grid_trnd(GXContext._get_tls_geo(), imgi, imgo, tr_option, edge, order, vm, num_coefs) @classmethod def grid_trns(cls, grid, tcon): """ Transpose a grid by swapping the grid rows with the grid columns. :param grid: Name of the grid to transpose :param tcon: Transpose condition value :ref:`IMU_TRANS` :type grid: str :type tcon: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the grid has a line orientation that does NOT match the :ref:`IMU_TRANS` value, this method will not succeed. """ gxapi_cy.WrapIMU._grid_trns(GXContext._get_tls_geo(), grid.encode(), tcon) @classmethod def grid_vc(cls, im_gi, im_go, updown, distance): """ Apply vertical continuation convolution filter to a grid. :param im_gi: Input image :param im_go: Output image :param updown: 1 to calculate continuation upward, 0 downward :param distance: Continuation distance :type im_gi: GXIMG :type im_go: GXIMG :type updown: int :type distance: float .. versionadded:: 9.9 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapIMU._grid_vc(GXContext._get_tls_geo(), im_gi, im_go, updown, distance) @classmethod def grid_vd(cls, im_gi, im_go): """ Apply vertical derivative convolution filter to a grid. :param im_gi: Input image :param im_go: Output image :type im_gi: GXIMG :type im_go: GXIMG .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapIMU._grid_vd(GXContext._get_tls_geo(), im_gi, im_go) @classmethod def grid_vol(cls, img, rbase, mult, vol_a, vol_b, diff): """ Calculates the grid volumes above and below a reference base. :param img: Image of the grid to calculate volume for :param rbase: Reference base :param mult: Multiplier to final volume :param vol_a: Grid Volume above reference base :param vol_b: Grid Volume below reference base :param diff: Differences between volumes :type img: GXIMG :type rbase: float :type mult: float :type vol_a: float_ref :type vol_b: float_ref :type diff: float_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Volumes are calculated above and below a reference base level, and reported as positive integers. A multiplier is applied to the final volume (to correct for units). The `GXIMG <geosoft.gxapi.GXIMG>` parameters MUST be of type `GS_FLOAT <geosoft.gxapi.GS_FLOAT>`! If not, the method will terminate. """ vol_a.value, vol_b.value, diff.value = gxapi_cy.WrapIMU._grid_vol(GXContext._get_tls_geo(), img, rbase, mult, vol_a.value, vol_b.value, diff.value) @classmethod def grid_wind(cls, img, out, coord, xmin, xmax, ymin, ymax, zmin, zmax, csize, clip, dec, mdf): """ Create a grid using a defined area window within a larger grid. :param img: Image of input grid :param out: Name of output grid file :param coord: :ref:`IMU_WIND_COORD` :param xmin: Min. limit of window in X direction (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param xmax: Max. limit of window in X direction (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param ymin: Min. limit of window in Y direction (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param ymax: Max. limit of window in Y direction (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param zmin: Minimum Z data value in output grid (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param zmax: Maximum Z data value in output grid (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param csize: New grid cell size :param clip: :ref:`IMU_WIND_DUMMIES` :param dec: Decimation factor :param mdf: Name of .MDF file for data clipping :type img: GXIMG :type out: str :type coord: int :type xmin: float :type xmax: float :type ymin: float :type ymax: float :type zmin: float :type zmax: float :type csize: float :type clip: int :type dec: int :type mdf: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapIMU._grid_wind(GXContext._get_tls_geo(), img, out.encode(), coord, xmin, xmax, ymin, ymax, zmin, zmax, csize, clip, dec, mdf.encode()) @classmethod def grid_wind2(cls, img, out, xmin, xmax, ymin, ymax, zmin, zmax, clip): """ Window a grid. :param img: Image of input grid :param out: Name of output grid file :param xmin: Minimum X, ground units (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param xmax: Maximum X (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param ymin: Minimum Y (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param ymax: Maximum Y (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param zmin: Minimum Z (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param zmax: Maximum Z (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param clip: :ref:`IMU_WIND_DUMMIES` :type img: GXIMG :type out: str :type xmin: float :type xmax: float :type ymin: float :type ymax: float :type zmin: float :type zmax: float :type clip: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** To change the cell size or work in a different projection, first inherit the `GXIMG <geosoft.gxapi.GXIMG>` by calling The windowed grid will be adjusted/expanded to include the defined area and line up on an even grid cell. """ gxapi_cy.WrapIMU._grid_wind2(GXContext._get_tls_geo(), img, out.encode(), xmin, xmax, ymin, ymax, zmin, zmax, clip) @classmethod def grid_xyz(cls, img, xyz, index, dec_x, dec_y, lab): """ Export a Grid image to an XYZ file. :param img: Image of the grid to export :param xyz: Name of new XYZ file :param index: :ref:`IMU_XYZ_INDEX` :param dec_x: X direction decimation factor :param dec_y: Y direction decimation factor :param lab: :ref:`IMU_XYZ_LABEL` :type img: GXIMG :type xyz: str :type index: int :type dec_x: int :type dec_y: int :type lab: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The `GXIMG <geosoft.gxapi.GXIMG>` (image) of the grid to export must be of type `GS_FLOAT <geosoft.gxapi.GS_FLOAT>`. If not, this method will terminate with an error. """ gxapi_cy.WrapIMU._grid_xyz(GXContext._get_tls_geo(), img, xyz.encode(), index, dec_x, dec_y, lab) @classmethod def grid_type(cls, grid): """ Reports the true data the of a grid (geosoft types) :param grid: Name of the Grid :type grid: str :returns: :ref:`GS_TYPES` :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapIMU._grid_type(GXContext._get_tls_geo(), grid.encode()) return ret_val @classmethod def make_mi_tab_file(cls, file): """ Make a MapInfo tab file for this grid :param file: Grid file name :type file: str .. versionadded:: 5.1.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapIMU._make_mi_tab_file(GXContext._get_tls_geo(), file.encode()) @classmethod def make_mi_tabfrom_grid(cls, file): """ Make a MapInfo tab file for this grid as rendered in a map :param file: Grid file name :type file: str .. versionadded:: 5.1.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapIMU._make_mi_tabfrom_grid(GXContext._get_tls_geo(), file.encode()) @classmethod def make_mi_tabfrom_map(cls, map): """ Make a MapInfo tab file from this map :param map: Map file name :type map: str .. versionadded:: 5.1.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapIMU._make_mi_tabfrom_map(GXContext._get_tls_geo(), map.encode()) @classmethod def mosaic(cls, grids, name, ipj, cell): """ Create a mosaic image of an image list. :param grids: Image names ('|' separated) :param name: Output image name ("" for a memory only image) :param ipj: Projection to use (0 to use the first grid's projection) :param cell: Cell size to use (rDummy to use first grid) :type grids: str :type name: str :type ipj: GXIPJ :type cell: float :returns: `GXIMG <geosoft.gxapi.GXIMG>` Object :rtype: GXIMG .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The images are simply placed on the output image, starting with the first image. Note that this function may require very large amounts of virtual memory. """ ret_val = gxapi_cy.WrapIMU._mosaic(GXContext._get_tls_geo(), grids.encode(), name.encode(), ipj, cell) return GXIMG(ret_val) @classmethod def peak_size(cls, grid, vv_x, vv_y, max, prec, v_vz): """ Define the sizes of all the peaks in an image. :param grid: Grid file name :param vv_x: Peaks' X :param vv_y: Peaks' Y :param max: Maximum target diameter (window) in # of cells :param prec: Precision factor (see note above) :param v_vz: Returned peak (anomaly) sizes in data units :type grid: str :type vv_x: GXVV :type vv_y: GXVV :type max: int :type prec: float :type v_vz: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Extending from the peak location of an anomaly to the inflection points of the grid values along each of the 8 directions results in 8 radii. Anomaly size is defined as the 2*mediam of the 8 radii. Precision factor is used to control definition of an inflection point. For points A,B, and C, B is an inflection point if (A+C)/2.0 > B. With the precision factor, B is an inflection point only when (A+C)/2.0 > B*(1.0+Precision factor). This factor must be within (-1.0,1.0). Note: `peak_size2 <geosoft.gxapi.GXIMU.peak_size2>` is probably a better routine... """ gxapi_cy.WrapIMU._peak_size(GXContext._get_tls_geo(), grid.encode(), vv_x, vv_y, max, prec, v_vz) @classmethod def peak_size2(cls, grid, vv_x, vv_y, max, v_vz): """ Define the sizes of all the peaks in an image - new algorithm :param grid: Grid file name :param vv_x: Peaks' X :param vv_y: Peaks' Y :param max: Maximum target diameter (window) in # of cells :param v_vz: Returned peak (anomaly) sizes in data units :type grid: str :type vv_x: GXVV :type vv_y: GXVV :type max: int :type v_vz: GXVV .. versionadded:: 5.1.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Extending from the peak location of an anomaly to the inflection points of the grid values along each of the 8 directions results in 8 radii. Anomaly size is defined as the 2*mediam of the 8 radii. This algorithm uses 4 successive points d1, d2, d3 and d4 in any direction. Given slopes m1 = d2-d1, m2 = d3-d2 and m3 = d4-d3, an inflection point occurs between d2 and d3 if m1>m2 and m2<m3. The location index is given as i3 - s2/(s2-s1), where i3 is the index of d3, and s1=m2-m1 and s2=m3-m2. This algorithm tends to give much smaller (and more reasonable) results than `peak_size <geosoft.gxapi.GXIMU.peak_size>`. """ gxapi_cy.WrapIMU._peak_size2(GXContext._get_tls_geo(), grid.encode(), vv_x, vv_y, max, v_vz) @classmethod def pigeon_hole(cls, img, vv_x, vv_y, put): """ Pigeon-hole and count points by location into a grid. :param img: Input grid :param vv_x: X locations :param vv_y: Y locations :param put: Number of points located in the grid. :type img: GXIMG :type vv_x: GXVV :type vv_y: GXVV :type put: int_ref .. versionadded:: 5.0.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** X and Y location VVs are input. If a point (X, Y) is located within one-half cell width from a location in the grid, then the value of the grid at that location is incremented by 1. The cells are inclusive at the minima, and exclusive at the maxima: e.g. if dDx = dDy = 1, and dXo = dYo = 0, then the corner cell would accept values -0.5 <= X < 0.5 and -0.5 <= Y < 0.5. The grid values should be set to 0 before calling this function. The number of points "pigeon-holed" is returned to the user. This function is useful, for instance, in determining the density of sample locations in a survey area. """ put.value = gxapi_cy.WrapIMU._pigeon_hole(GXContext._get_tls_geo(), img, vv_x, vv_y, put.value) @classmethod def pigeon_hole_color(cls, img, color_img, vv_x, vv_y, itr, put): """ Pigeon-hole and count points by location and color locations in another grid based on ITR information. :param img: Input grid :param color_img: Input color grid :param vv_x: X locations :param vv_y: Y locations :param itr: Input color transform :param put: Number of points located in the grid. :type img: GXIMG :type color_img: GXIMG :type vv_x: GXVV :type vv_y: GXVV :type itr: GXITR :type put: int_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** X and Y location VVs are input. If a point (X, Y) is located within one-half cell width from a location in the grid, then the value of the grid at that location is incremented by 1. The cells are inclusive at the minima, and exclusive at the maxima: e.g. if dDx = dDy = 1, and dXo = dYo = 0, then the corner cell would accept values -0.5 <= X < 0.5 and -0.5 <= Y < 0.5. The grid values should be set to 0 before calling this function. The color grid locations are coloured by the number of items at each location, with the colour being determined by the input ITR, which should map the integer count values 1, 2, 3, etc. onto individual colours. The number of points "pigeon-holed" is returned to the user. This function is useful, for instance, in determining the density of sample locations in a survey area. """ put.value = gxapi_cy.WrapIMU._pigeon_hole_color(GXContext._get_tls_geo(), img, color_img, vv_x, vv_y, itr, put.value) @classmethod def profile(cls, img, x1, y1, x2, y2, samsep, vv_z): """ Extract a profile from a grid. :param img: Input image :param x1: X1 :param y1: Y1 :param x2: X2 :param y2: Y2 :param samsep: Sample separation, if 0.0, use grid cell size :param vv_z: `GXVV <geosoft.gxapi.GXVV>` in which to place result :type img: GXIMG :type x1: float :type y1: float :type x2: float :type y2: float :type samsep: float :type vv_z: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Returned `GXVV <geosoft.gxapi.GXVV>` will start at X1,Y1 and will sample up to X2,Y2 at the specified separation. """ gxapi_cy.WrapIMU._profile(GXContext._get_tls_geo(), img, x1, y1, x2, y2, samsep, vv_z) @classmethod def profile_vv(cls, img, vv_x, vv_y, vv_z): """ Extract a `GXVV <geosoft.gxapi.GXVV>` profile from a grid. :param img: Input image :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` coordinates :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` coordinates :param vv_z: `GXVV <geosoft.gxapi.GXVV>` in which to place result :type img: GXIMG :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ .. seealso:: iGetPolyLine_DBE """ gxapi_cy.WrapIMU._profile_vv(GXContext._get_tls_geo(), img, vv_x, vv_y, vv_z) @classmethod def range_grids(cls, grids, ipj, min_x, min_y, max_x, max_y): """ Determine bounding rectangle for a set of grids :param grids: List of grid files, "|" delimited :param ipj: Projection for the range - see notes :param min_x: Min X - returned range in the projection :param min_y: Min Y :param max_x: Max X :param max_y: Max Y :type grids: str :type ipj: GXIPJ :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If an `GXIPJ <geosoft.gxapi.GXIPJ>` is IPJ_CS_UNKNOWN, the `GXIPJ <geosoft.gxapi.GXIPJ>` of the first grid in the list will be used and the `GXIPJ <geosoft.gxapi.GXIPJ>` will be returned in this setting. Otherwise, the range in the requested `GXIPJ <geosoft.gxapi.GXIPJ>` will be determined. """ min_x.value, min_y.value, max_x.value, max_y.value = gxapi_cy.WrapIMU._range_grids(GXContext._get_tls_geo(), grids.encode(), ipj, min_x.value, min_y.value, max_x.value, max_y.value) @classmethod def range_ll(cls, img, min_lat, min_lon, max_lat, max_lon): """ Determine the range in lat. and long. of a projected grid :param img: Input image :param min_lat: Min latitude :param min_lon: Min longitude :param max_lat: Max latitude :param max_lon: Max longitude :type img: GXIMG :type min_lat: float_ref :type min_lon: float_ref :type max_lat: float_ref :type max_lon: float_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This routine determines the latitude and longitudes along the edge of a grid and returns the minimal and maximal values. It scans each row and and column and finds the first non-dummy position at the start and end, and then determines the coordinates at those points. If the grid has no data, no `GXIPJ <geosoft.gxapi.GXIPJ>` object, or if the Source Type of the `GXIPJ <geosoft.gxapi.GXIPJ>` is not `IPJ_TYPE_PCS <geosoft.gxapi.IPJ_TYPE_PCS>` (projected coordinate system), then the returned values are dummies (`GS_R8DM <geosoft.gxapi.GS_R8DM>`). """ min_lat.value, min_lon.value, max_lat.value, max_lon.value = gxapi_cy.WrapIMU._range_ll(GXContext._get_tls_geo(), img, min_lat.value, min_lon.value, max_lat.value, max_lon.value) @classmethod def stat_window(cls, img, min_x, min_y, max_x, max_y, max, st): """ Calculate grid statistics in a window :param img: Name of the grid to get stats from :param min_x: Min X window :param min_y: Min Y window :param max_x: Max X window :param max_y: Max Y window :param max: Maximum values needed, 0 for all :param st: `GXST <geosoft.gxapi.GXST>` object, stats are accumulated :type img: GXIMG :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type max: int :type st: GXST .. versionadded:: 5.0.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The maximum values needed will beused to decimate the sampling of the grid in order to improve performance. 100000 is often a good number when absolute precision is not required. """ gxapi_cy.WrapIMU._stat_window(GXContext._get_tls_geo(), img, min_x, min_y, max_x, max_y, max, st) @classmethod def update_ply(cls, img, ply): """ Update the grid boundary in the grid metadata :param img: The Grid :param ply: `GXPLY <geosoft.gxapi.GXPLY>` containing the edges. :type img: GXIMG :type ply: GXPLY .. versionadded:: 6.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** You can call the GridEdgePLY function to get an edge, perhaps alter the edge, such as thin it to a reasonable resolution, then put set it as the grid boundary by calling this funtion. This is similar to the GridPLYEx function except that you get to alter the `GXPLY <geosoft.gxapi.GXPLY>` before it is placed back in the `GXIMG <geosoft.gxapi.GXIMG>`. """ gxapi_cy.WrapIMU._update_ply(GXContext._get_tls_geo(), img, ply) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXEDOC.rst .. _GXEDOC: GXEDOC class ================================== .. autoclass:: geosoft.gxapi.GXEDOC :members: .. _EDOC_PATH: EDOC_PATH constants ----------------------------------------------------------------------- Four forms .. autodata:: geosoft.gxapi.EDOC_PATH_FULL :annotation: .. autoattribute:: geosoft.gxapi.EDOC_PATH_FULL .. autodata:: geosoft.gxapi.EDOC_PATH_DIR :annotation: .. autoattribute:: geosoft.gxapi.EDOC_PATH_DIR .. autodata:: geosoft.gxapi.EDOC_PATH_NAME_EXT :annotation: .. autoattribute:: geosoft.gxapi.EDOC_PATH_NAME_EXT .. autodata:: geosoft.gxapi.EDOC_PATH_NAME :annotation: .. autoattribute:: geosoft.gxapi.EDOC_PATH_NAME .. _EDOC_TYPE: EDOC_TYPE constants ----------------------------------------------------------------------- Avaialable generic document types .. autodata:: geosoft.gxapi.EDOC_TYPE_GMS3D :annotation: .. autoattribute:: geosoft.gxapi.EDOC_TYPE_GMS3D .. autodata:: geosoft.gxapi.EDOC_TYPE_VOXEL :annotation: .. autoattribute:: geosoft.gxapi.EDOC_TYPE_VOXEL .. autodata:: geosoft.gxapi.EDOC_TYPE_VOXEL_INVERSION :annotation: .. autoattribute:: geosoft.gxapi.EDOC_TYPE_VOXEL_INVERSION .. autodata:: geosoft.gxapi.EDOC_TYPE_GMS2D :annotation: .. autoattribute:: geosoft.gxapi.EDOC_TYPE_GMS2D .. autodata:: geosoft.gxapi.EDOC_TYPE_GEOSURFACE :annotation: .. autoattribute:: geosoft.gxapi.EDOC_TYPE_GEOSURFACE .. _EDOC_UNLOAD: EDOC_UNLOAD constants ----------------------------------------------------------------------- What type of prompt .. autodata:: geosoft.gxapi.EDOC_UNLOAD_NO_PROMPT :annotation: .. autoattribute:: geosoft.gxapi.EDOC_UNLOAD_NO_PROMPT .. autodata:: geosoft.gxapi.EDOC_UNLOAD_PROMPT :annotation: .. autoattribute:: geosoft.gxapi.EDOC_UNLOAD_PROMPT .. _EDOC_WINDOW_POSITION: EDOC_WINDOW_POSITION constants ----------------------------------------------------------------------- Window Positioning Options .. autodata:: geosoft.gxapi.EDOC_WINDOW_POSITION_DOCKED :annotation: .. autoattribute:: geosoft.gxapi.EDOC_WINDOW_POSITION_DOCKED .. autodata:: geosoft.gxapi.EDOC_WINDOW_POSITION_FLOATING :annotation: .. autoattribute:: geosoft.gxapi.EDOC_WINDOW_POSITION_FLOATING .. _EDOC_WINDOW_STATE: EDOC_WINDOW_STATE constants ----------------------------------------------------------------------- Window State Options .. autodata:: geosoft.gxapi.EDOC_WINDOW_RESTORE :annotation: .. autoattribute:: geosoft.gxapi.EDOC_WINDOW_RESTORE .. autodata:: geosoft.gxapi.EDOC_WINDOW_MINIMIZE :annotation: .. autoattribute:: geosoft.gxapi.EDOC_WINDOW_MINIMIZE .. autodata:: geosoft.gxapi.EDOC_WINDOW_MAXIMIZE :annotation: .. autoattribute:: geosoft.gxapi.EDOC_WINDOW_MAXIMIZE .. _GMS3D_MODELTYPE: GMS3D_MODELTYPE constants ----------------------------------------------------------------------- Avaialable model types .. autodata:: geosoft.gxapi.GMS3D_MODELTYPE_DEPTH :annotation: .. autoattribute:: geosoft.gxapi.GMS3D_MODELTYPE_DEPTH .. autodata:: geosoft.gxapi.GMS3D_MODELTYPE_TIME :annotation: .. autoattribute:: geosoft.gxapi.GMS3D_MODELTYPE_TIME .. _GMS2D_MODELTYPE: GMS2D_MODELTYPE constants ----------------------------------------------------------------------- Avaialable model types .. autodata:: geosoft.gxapi.GMS2D_MODELTYPE_DEPTH :annotation: .. autoattribute:: geosoft.gxapi.GMS2D_MODELTYPE_DEPTH .. autodata:: geosoft.gxapi.GMS2D_MODELTYPE_TIME :annotation: .. autoattribute:: geosoft.gxapi.GMS2D_MODELTYPE_TIME <file_sep>/docs/GXIP.rst .. _GXIP: GXIP class ================================== .. autoclass:: geosoft.gxapi.GXIP :members: .. _IP_ARRAY: IP_ARRAY constants ----------------------------------------------------------------------- `GXIP <geosoft.gxapi.GXIP>` Array options .. autodata:: geosoft.gxapi.IP_ARRAY_DPDP :annotation: .. autoattribute:: geosoft.gxapi.IP_ARRAY_DPDP .. autodata:: geosoft.gxapi.IP_ARRAY_PLDP :annotation: .. autoattribute:: geosoft.gxapi.IP_ARRAY_PLDP .. autodata:: geosoft.gxapi.IP_ARRAY_PLPL :annotation: .. autoattribute:: geosoft.gxapi.IP_ARRAY_PLPL .. autodata:: geosoft.gxapi.IP_ARRAY_GRAD :annotation: .. autoattribute:: geosoft.gxapi.IP_ARRAY_GRAD .. autodata:: geosoft.gxapi.IP_ARRAY_WENNER :annotation: .. autoattribute:: geosoft.gxapi.IP_ARRAY_WENNER .. autodata:: geosoft.gxapi.IP_ARRAY_SCHLUMBERGER :annotation: .. autoattribute:: geosoft.gxapi.IP_ARRAY_SCHLUMBERGER .. autodata:: geosoft.gxapi.IP_ARRAY_UNKNOWN :annotation: .. autoattribute:: geosoft.gxapi.IP_ARRAY_UNKNOWN .. autodata:: geosoft.gxapi.IP_ARRAY_3D :annotation: .. autoattribute:: geosoft.gxapi.IP_ARRAY_3D .. autodata:: geosoft.gxapi.IP_ARRAY_3D_PLDP :annotation: .. autoattribute:: geosoft.gxapi.IP_ARRAY_3D_PLDP .. autodata:: geosoft.gxapi.IP_ARRAY_3D_PLPL :annotation: .. autoattribute:: geosoft.gxapi.IP_ARRAY_3D_PLPL .. _IP_CHANNELS: IP_CHANNELS constants ----------------------------------------------------------------------- Channels to display .. autodata:: geosoft.gxapi.IP_CHANNELS_DISPLAYED :annotation: .. autoattribute:: geosoft.gxapi.IP_CHANNELS_DISPLAYED .. autodata:: geosoft.gxapi.IP_CHANNELS_SELECTED :annotation: .. autoattribute:: geosoft.gxapi.IP_CHANNELS_SELECTED .. autodata:: geosoft.gxapi.IP_CHANNELS_ALL :annotation: .. autoattribute:: geosoft.gxapi.IP_CHANNELS_ALL .. _IP_DOMAIN: IP_DOMAIN constants ----------------------------------------------------------------------- Types of Domains .. autodata:: geosoft.gxapi.IP_DOMAIN_NONE :annotation: .. autoattribute:: geosoft.gxapi.IP_DOMAIN_NONE .. autodata:: geosoft.gxapi.IP_DOMAIN_TIME :annotation: .. autoattribute:: geosoft.gxapi.IP_DOMAIN_TIME .. autodata:: geosoft.gxapi.IP_DOMAIN_FREQUENCY :annotation: .. autoattribute:: geosoft.gxapi.IP_DOMAIN_FREQUENCY .. autodata:: geosoft.gxapi.IP_DOMAIN_BOTH :annotation: .. autoattribute:: geosoft.gxapi.IP_DOMAIN_BOTH .. _IP_DUPLICATE: IP_DUPLICATE constants ----------------------------------------------------------------------- How to handle duplicates .. autodata:: geosoft.gxapi.IP_DUPLICATE_APPEND :annotation: .. autoattribute:: geosoft.gxapi.IP_DUPLICATE_APPEND .. autodata:: geosoft.gxapi.IP_DUPLICATE_OVERWRITE :annotation: .. autoattribute:: geosoft.gxapi.IP_DUPLICATE_OVERWRITE .. _IP_FILTER: IP_FILTER constants ----------------------------------------------------------------------- Fraser Filters .. autodata:: geosoft.gxapi.IP_FILTER_PANTLEG :annotation: .. autoattribute:: geosoft.gxapi.IP_FILTER_PANTLEG .. autodata:: geosoft.gxapi.IP_FILTER_PANTLEGP :annotation: .. autoattribute:: geosoft.gxapi.IP_FILTER_PANTLEGP .. autodata:: geosoft.gxapi.IP_FILTER_PYRIAMID :annotation: .. autoattribute:: geosoft.gxapi.IP_FILTER_PYRIAMID .. autodata:: geosoft.gxapi.IP_FILTER_PYRIAMIDP :annotation: .. autoattribute:: geosoft.gxapi.IP_FILTER_PYRIAMIDP .. _IP_I2XIMPMODE: IP_I2XIMPMODE constants ----------------------------------------------------------------------- Interpext Import Mode .. autodata:: geosoft.gxapi.IP_I2XIMPMODE_REPLACE :annotation: .. autoattribute:: geosoft.gxapi.IP_I2XIMPMODE_REPLACE .. autodata:: geosoft.gxapi.IP_I2XIMPMODE_MERGE :annotation: .. autoattribute:: geosoft.gxapi.IP_I2XIMPMODE_MERGE .. _IP_I2XINV: IP_I2XINV constants ----------------------------------------------------------------------- Type of Inversion .. autodata:: geosoft.gxapi.IP_I2XINV_IMAGE :annotation: .. autoattribute:: geosoft.gxapi.IP_I2XINV_IMAGE .. autodata:: geosoft.gxapi.IP_I2XINV_ZONGE :annotation: .. autoattribute:: geosoft.gxapi.IP_I2XINV_ZONGE .. _IP_LINES: IP_LINES constants ----------------------------------------------------------------------- Lines to display .. autodata:: geosoft.gxapi.IP_LINES_DISPLAYED :annotation: .. autoattribute:: geosoft.gxapi.IP_LINES_DISPLAYED .. autodata:: geosoft.gxapi.IP_LINES_SELECTED :annotation: .. autoattribute:: geosoft.gxapi.IP_LINES_SELECTED .. autodata:: geosoft.gxapi.IP_LINES_ALL :annotation: .. autoattribute:: geosoft.gxapi.IP_LINES_ALL .. _IP_PLOT: IP_PLOT constants ----------------------------------------------------------------------- Type of Plot .. autodata:: geosoft.gxapi.IP_PLOT_PSEUDOSECTION :annotation: .. autoattribute:: geosoft.gxapi.IP_PLOT_PSEUDOSECTION .. autodata:: geosoft.gxapi.IP_PLOT_STACKEDSECTION :annotation: .. autoattribute:: geosoft.gxapi.IP_PLOT_STACKEDSECTION .. _IP_QCTYPE: IP_QCTYPE constants ----------------------------------------------------------------------- Type of Measurement .. autodata:: geosoft.gxapi.IP_QCTYPE_RESISTIVITY :annotation: .. autoattribute:: geosoft.gxapi.IP_QCTYPE_RESISTIVITY .. autodata:: geosoft.gxapi.IP_QCTYPE_IP :annotation: .. autoattribute:: geosoft.gxapi.IP_QCTYPE_IP .. _IP_STACK_TYPE: IP_STACK_TYPE constants ----------------------------------------------------------------------- Spacing Types .. autodata:: geosoft.gxapi.IP_STACK_TYPE_MAP :annotation: .. autoattribute:: geosoft.gxapi.IP_STACK_TYPE_MAP .. autodata:: geosoft.gxapi.IP_STACK_TYPE_EQUAL :annotation: .. autoattribute:: geosoft.gxapi.IP_STACK_TYPE_EQUAL .. autodata:: geosoft.gxapi.IP_STACK_TYPE_GEOGRAPHIC :annotation: .. autoattribute:: geosoft.gxapi.IP_STACK_TYPE_GEOGRAPHIC .. _IP_STNSCALE: IP_STNSCALE constants ----------------------------------------------------------------------- Station Scaling .. autodata:: geosoft.gxapi.IP_STNSCALE_NONE :annotation: .. autoattribute:: geosoft.gxapi.IP_STNSCALE_NONE .. autodata:: geosoft.gxapi.IP_STNSCALE_ASPACE :annotation: .. autoattribute:: geosoft.gxapi.IP_STNSCALE_ASPACE .. autodata:: geosoft.gxapi.IP_STNSCALE_VALUE :annotation: .. autoattribute:: geosoft.gxapi.IP_STNSCALE_VALUE .. autodata:: geosoft.gxapi.IP_STNSCALE_FILE :annotation: .. autoattribute:: geosoft.gxapi.IP_STNSCALE_FILE .. _IP_SYS: IP_SYS constants ----------------------------------------------------------------------- Instrument .. autodata:: geosoft.gxapi.IP_SYS_IPDATA :annotation: .. autoattribute:: geosoft.gxapi.IP_SYS_IPDATA .. autodata:: geosoft.gxapi.IP_SYS_IP2 :annotation: .. autoattribute:: geosoft.gxapi.IP_SYS_IP2 .. autodata:: geosoft.gxapi.IP_SYS_IP6 :annotation: .. autoattribute:: geosoft.gxapi.IP_SYS_IP6 .. autodata:: geosoft.gxapi.IP_SYS_IP10 :annotation: .. autoattribute:: geosoft.gxapi.IP_SYS_IP10 .. autodata:: geosoft.gxapi.IP_SYS_SYSCALR2 :annotation: .. autoattribute:: geosoft.gxapi.IP_SYS_SYSCALR2 .. autodata:: geosoft.gxapi.IP_SYS_IPR11 :annotation: .. autoattribute:: geosoft.gxapi.IP_SYS_IPR11 .. autodata:: geosoft.gxapi.IP_SYS_IPR12 :annotation: .. autoattribute:: geosoft.gxapi.IP_SYS_IPR12 .. autodata:: geosoft.gxapi.IP_SYS_PHOENIX :annotation: .. autoattribute:: geosoft.gxapi.IP_SYS_PHOENIX .. autodata:: geosoft.gxapi.IP_SYS_PHOENIX_V2 :annotation: .. autoattribute:: geosoft.gxapi.IP_SYS_PHOENIX_V2 .. autodata:: geosoft.gxapi.IP_SYS_ELREC_PRO :annotation: .. autoattribute:: geosoft.gxapi.IP_SYS_ELREC_PRO .. autodata:: geosoft.gxapi.IP_SYS_PROSYS_II :annotation: .. autoattribute:: geosoft.gxapi.IP_SYS_PROSYS_II .. _IP_UBC_CONTROL: IP_UBC_CONTROL constants ----------------------------------------------------------------------- Types of Domains .. autodata:: geosoft.gxapi.IP_UBC_CONTROL_NONE :annotation: .. autoattribute:: geosoft.gxapi.IP_UBC_CONTROL_NONE .. autodata:: geosoft.gxapi.IP_UBC_CONTROL_DEFAULT :annotation: .. autoattribute:: geosoft.gxapi.IP_UBC_CONTROL_DEFAULT .. autodata:: geosoft.gxapi.IP_UBC_CONTROL_FILE :annotation: .. autoattribute:: geosoft.gxapi.IP_UBC_CONTROL_FILE .. autodata:: geosoft.gxapi.IP_UBC_CONTROL_VALUE :annotation: .. autoattribute:: geosoft.gxapi.IP_UBC_CONTROL_VALUE .. autodata:: geosoft.gxapi.IP_UBC_CONTROL_LENGTH :annotation: .. autoattribute:: geosoft.gxapi.IP_UBC_CONTROL_LENGTH .. _IP_PLDP_CONV: IP_PLDP_CONV constants ----------------------------------------------------------------------- Types of Domains .. autodata:: geosoft.gxapi.IP_PLDP_CONV_CLOSE_RX :annotation: .. autoattribute:: geosoft.gxapi.IP_PLDP_CONV_CLOSE_RX .. autodata:: geosoft.gxapi.IP_PLDP_CONV_MID_RX :annotation: .. autoattribute:: geosoft.gxapi.IP_PLDP_CONV_MID_RX .. autodata:: geosoft.gxapi.IP_PLDP_CONV_DISTANT_RX :annotation: .. autoattribute:: geosoft.gxapi.IP_PLDP_CONV_DISTANT_RX <file_sep>/geosoft/gxpy/gx.py """ GX Context and related methods required for Geosoft Python. :Classes: :`GXpy`: the Geosoft GX context The GX context is a singleton, which is either created for stand-alone Python scripts, or is provided to the script for extensions to Geosoft Desktop applications. .. note:: Regression tests provide usage examples: `gx tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_gx.py>`_ """ import tkinter.ttk as ttk import pprint import os import shutil import datetime import atexit import tempfile import threading import geosoft import geosoft.gxapi as gxapi from . import utility as gxu from . import system as gxs __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) class GXException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.gx`. .. versionadded:: 9.1 """ pass _singleton_getattr_default = object() _is_sphinx_build = os.environ.get('GEOSOFT_SPHINX_BUILD', '0') == '1' _tls = threading.local() def _get_gx_instance(): return GXpyContext._get_instance() def _have_gx(): return GXpyContext._get_instance() is not None class TLSGlobals: _res_id = 0 _res_heap = {} _max_resource_heap = 1000000 _stack_depth = 5 _max_warnings = 10 _NULL_ID = -1 def _get_tls_globals(): global _tls tls_globals = getattr(_tls, '_gxpy_tls_globals', None) if tls_globals is not None: return tls_globals if _tls is None: return None _tls._gxpy_tls_globals = TLSGlobals() return _tls._gxpy_tls_globals def _reset_tls_globals(): global _tls # Reset singlton wrappers _tls._gxpy_tls_globals = None def track_resource(resource_class, info): """ Track a resource. Resource tracking is useful for debugging resource leaks. If you create a class or resource that you expect to be removed before your script ends you can track it with this call. When you dispose of your resource call :meth:`pop_resource` to remove it from the tracking heap. On exit, any resource left on the tracked resource heap will be reported together with the call stack for each resource and the information you provided. :param resource_class: the resource class name :param info: some information about the resource :returns: resource_id, can be used with :meth:`pop_resource` .. versionadded:: 9.2 """ tls_globals = _get_tls_globals() if tls_globals._res_id < tls_globals._max_resource_heap: tls_globals._res_id += 1 rs = "{}:".format(resource_class) for i in range(tls_globals._stack_depth): f = gxs.func_name(i + 2) if f is None: break rs += '<{}'.format(gxs.func_name(i + 2)) rs += ' [{}]'.format(info) tls_globals._res_heap[tls_globals._res_id] = rs return tls_globals._res_id else: return tls_globals._NULL_ID def pop_resource(res_id): """ Pop a tracked resource off the resource stack. :param res_id: the resource id returned by :meth:`track_resource` .. versionadded:: 9.2 .. versionchanged:: 9.3.1 changed id to res_id to avoid built-in shadow """ tls_globals = _get_tls_globals() if res_id != tls_globals._NULL_ID: if len(tls_globals._res_heap): try: del (tls_globals._res_heap[res_id]) except KeyError: pass def _log_file_error(fnc, path, excinfo): if _have_gx(): gx = _get_gx_instance() if hasattr(gx, 'log'): gx.log(_t("error removing temporary file\n \"{}\"\nfunction \"{}\"\nexception\"{}\"\n") .format(path, str(fnc), str(excinfo))) def gx(): """Returns the current thread `GXpy` instance.""" if not _have_gx(): raise gxapi.GXAPIError("A GXpy instance has not been created for current thread yet, " "or the original context has been released.") return _get_gx_instance() def GXpy(name=__name__, version=__version__, parent_window=0, log=None, max_res_heap=10000000, res_stack=6, max_warnings=10, suppress_progress=False, key='Core', per_user_key=False, redist_override=False, redist_dir=None, user_dir=None, temp_dir=None): """ Instantiate a Geosoft GX context. There should be only one instance of this created per thread. To simplify usage, use this method to instantiaate the context and the :func:`.gxpy.gx.gx` methods instead to obtain the current thread instance. It is a good idea to use the with statement pattern to ensure timely cleanup of unmanaged resources. :parameters: :name: application name, default is the script name :version: application version number, default Geosoft version :parent_window: ID of the parent window. A parent window is required for GUI-dependent functions to work. Set `parent_window=-1` to create a Tkinter frame that provides a default parent window handle for GUI/Viewer functions. :log: name of a file to record logging information, or a call-back function that accepts a string. Specifying `log=''` will log to a default file named using the current date and time. If not provided calls to log() are ignored. :max_res_heap: If logging is on, open gxpy resources (like grids, or databases) are tracked. This is the maximum size of resource heap for tracking open resources. Set to 0 to not track resources. On exit, if any resources remain open a warning is logged together with a list of the open resources, each with a call stack to help find the function that created the resources. :res_stack: Depth of the call-stack to report for open-resource warning. :max_warnings: Maximum number of resource warnings to report. :suppress_progress: True to suppress progress reporting (default False) :key: Default Geosoft registry key to use (in absence of geosoft.key file) to discover GX developer common redistributables or Desktop Applications software (default 'Core') :per_user_key: Use per-user registry instead of local machine (default False) :redist_override: Override registry mechanism to discover redistributables with redist_dir, user_dir and temp_dir parameters. (default False) :redist_dir: Path containing the redistributable files, i.e. containing bin, csv and other folders. Only used if redist_override is True (default None) :user_dir: Writable path to directory containing the user redistributable files. Only used if redist_override is True (default None). If redist_override is True and user_dir is None a unique folder in system temp will be used for this purpose. :temp_dir: Path to use for temporary files. Only used if redist_override is True (default None) If redist_override is True and temp_dir is None a unique folder in system temp will be used for this purpose. .. seealso:: Class :class:`.gxpy.gx.GXpyContext` """ return GXpyContext(name, version, parent_window, log, max_res_heap, res_stack, max_warnings, suppress_progress, key, per_user_key, redist_override, redist_dir, user_dir, temp_dir) class GXpyContext: """ Geosoft GX context. There should be only one instance of this created per thread. To simplify usage, use the :func:`.gxpy.gx.GXpy` and :func:`.gxpy.gx.gx` methods instead of instantiating this class directly. This class does not need to be instantiated by the main thread in Oasis montaj desktop extension scripts, since the context is instantiated prior to entering the rungx method. If called, the desktop context is returned. It is a good idea to use the with statement pattern to ensure timely cleanup of unmanaged resources. :parameters: :name: application name, default is the script name :version: application version number, default Geosoft version :parent_window: ID of the parent window. A parent window is required for GUI-dependent functions to work. Set `parent_window=-1` to create a Tkinter frame that provides a default parent window handle for GUI/Viewer functions. :log: name of a file to record logging information, or a call-back function that accepts a string. Specifying `log=''` will log to a default file named using the current date and time. If not provided calls to log() are ignored. :max_res_heap: If logging is on, open gxpy resources (like grids, or databases) are tracked. This is the maximum size of resource heap for tracking open resources. Set to 0 to not track resources. On exit, if any resources remain open a warning is logged together with a list of the open resources, each with a call stack to help find the function that created the resources. :res_stack: Depth of the call-stack to report for open-resource warning. :max_warnings: Maximum number of resource warnings to report. :suppress_progress: True to suppress progress reporting (default False) :key: Default Geosoft registry key to use (in absence of geosoft.key file) to discover GX developer common redistributables or Desktop Applications software (default 'Core') :per_user_key: Use per-user registry instead of local machine (default False) :redist_override: Override registry mechanism to discover redistributables with redist_dir, user_dir and temp_dir parameters. (default False) :redist_dir: Path containing the redistributable files, i.e. containing bin, csv and other folders. Only used if redist_override is True (default None) :user_dir: Writable path to directory containing the user redistributable files. Only used if redist_override is True (default None). If redist_override is True and user_dir is None a unique folder in system temp will be used for this purpose. :temp_dir: Path to use for temporary files. Only used if redist_override is True (default None) If redist_override is True and temp_dir is None a unique folder in system temp will be used for this purpose. :Properties: :gxapi: GX context to be used to call geosoft.gxapi methods :tkframe: tkframe for UI applications. Will be None if a the context was created from a window application. :gid: User's Geosoft ID :current_date: date at start-up :current_utc_date: UTC date at start-up :current_time: time at start-up :current_utc_time: UTC time at start-up :folder_workspace: Geosoft workspace folder :folder_temp: Geosoft temporary folder :folder_user: Geosoft Desktop installation 'user' folder :raises: :GXException(): if unable to create context .. versionadded:: 9.1 .. versionchanged:: 9.2 | * `parent_window=-1` creates a Tkinter frame as a parent for scripts that call UI functions. | * Added `log` argument to support `log()`. | * Made environment dictionary properties, deprecated environment. """ def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): return "GID: {}, class: {}".format(self.gid, self.license_class) def __enter__(self): return self def __exit__(self, _type, _value, _traceback): self.__del__() def __del__(self): self._delete() _gxpy_deleted = True def _delete(self): if not getattr(self, '_gxpy_deleted', True): self._gxpy_deleted = True tls_globals = _get_tls_globals() if tls_globals is None: return self.log('\nGX closing') temp_folder = self.temp_folder() if temp_folder and (temp_folder != gxu.folder_temp()): shutil.rmtree(temp_folder, ignore_errors=False, onerror=_log_file_error) if len(tls_globals._res_heap): # resources were created but not deleted or removed self.log(_t('Warning - cleaning up resources that are still open:')) i = 0 for s in tls_globals._res_heap.values(): if i == tls_globals._max_warnings: self.log(_t(' and there are {} more (change GXpy(max_warnings=) to see more)...' .format(len(tls_globals._res_heap) - i))) break self.log(' ', s) i += 1 self.close_log() self._clean_redist_folders() del self._tkframe self._gxapi.__del__() del self._gxapi GXpyContext._set_instance(None) def __init__(self, name=__name__, version=__version__, parent_window=0, log=None, max_res_heap=10000000, res_stack=6, max_warnings=10, suppress_progress=False, key='Core', per_user_key=False, redist_override=False, redist_dir=None, user_dir=None, temp_dir=None): if _have_gx(): raise gxapi.GXAPIError("A GXpy instance has already been created for current thread.") tls_globals = _get_tls_globals() # Reset testing UUID base with every init gxu.d_uuid_count = 1 if log is None: tls_globals._max_resource_heap = 0 else: tls_globals._max_resource_heap = max_res_heap tls_globals._stack_depth = max(0, res_stack) tls_globals._max_warnings = max(0, max_warnings) self._enter_count = 0 self._redist_dir = redist_dir self._redist_user_dir = user_dir self._redist_user_dir_cleanup = False self._redist_temp_dir = temp_dir self._redist_temp_dir_cleanup = False # create a Tkinter parent frame for the viewers if not parent_window == 0: try: import pythoncom except ImportError: raise ImportError(_t( 'Unable to import the pythoncom module, which is needed for GUI APIs to work.')) self._tkframe = None if parent_window == -1: self._tkframe = ttk.Frame(master=None) parent_window = self._tkframe.winfo_id() self._parent_window = parent_window try: flags = 0 if suppress_progress: if self._parent_window: flags = 128 else: flags = 64 if redist_override: if self._redist_dir is None: raise GXException('redist_dir needs to be defined with redist_override.') geodist_path = os.path.normpath(os.path.join(self._redist_dir, 'bin', 'geodist.dll')) if not os.path.exists(geodist_path): raise GXException('redist_dir needs to point to directory containing Geosoft redistributables. ' '(Could not find {})).'.format(geodist_path)) if self._redist_temp_dir is None: self._redist_temp_dir = tempfile.mkdtemp() self._redist_temp_dir_cleanup = True elif not os.path.exists(self._redist_temp_dir): raise GXException('temp_dir needs to point an existing directory (or pass None to use ' 'automatic temporary folder).') if self._redist_user_dir is None: self._redist_user_dir = tempfile.mkdtemp() self._redist_user_dir_cleanup = True elif not os.path.exists(self._redist_user_dir): raise GXException('user_dir needs to point an existing directory (or pass None to use ' 'automatic temporary folder).') self._gxapi = gxapi.GXContext.create(name, version, self._parent_window, flags, key=key, per_user_key=per_user_key, redist_override=redist_override, redist_dir=self._redist_dir, user_dir=self._redist_user_dir, temp_dir=self._redist_temp_dir) except gxapi.GXAPIError as e: self._gxapi = None raise GXException(_t('GX services are not available.\n{}'.format(e))) user = gxapi.str_ref() company = gxapi.str_ref() gxapi.GXSYS.get_licensed_user(user, company) self.gid = user.value self._temp_file_folder = None self._keep_temp_files = True self._start = datetime.datetime.utcnow() self._gxid = gxu.uuid() self._entitlements = None # general properties self.current_date = gxapi.GXSYS.date() self.current_utc_date = gxapi.GXSYS.utc_date() self.current_time = gxapi.GXSYS.time() self.current_utc_time = gxapi.GXSYS.utc_time() self.folder_workspace = gxu.folder_workspace() self.folder_temp = gxu.folder_temp() self.folder_user = gxu.folder_user() # determine license try: # test if we can create a GXST2 instance, which requires a minimal license gxapi.GXST2.create() self._entitled = True except gxapi.GXAPIError: self._entitled = False # create a log file if log is None: self._logf = None self._log_it = None else: if callable(log): self._log_it = log self._logf = None else: if len(log) == 0: dts = "{}-{}-{}({}_{}_{}_{})" \ .format(self._start.year, str(self._start.month).zfill(2), str(self._start.day).zfill(2), str(self._start.hour).zfill(2), str(self._start.minute).zfill(2), str(self._start.second).zfill(2), str(self._start.microsecond // 1000).zfill(3)) log = "_gx_" + dts + ".log" self._logf = open(log, "wb") self._log_it = self._log_to_file self.log('\n') self.log('-' * 80) self.log('UTC: {}'.format(self._start)) self.log('Script: {}'.format(gxs.app_name())) self.log('GX API: {}'.format(__version__)) self.log('Core API: {}'.format(self.geosoft_version_label)) self.log('BIN_PATH: {}'.format(os.environ.get('GX_GEOSOFT_BIN_PATH', 'default'))) self.log('Project: {}'.format(gxu.folder_workspace())) self.log('GID: {}'.format(self.gid)) self.log('Entitled: {}'.format(self.entitled)) self.log('-' * 80) self.log('\n') self.log('\nGX open') self._gxpy_deleted = False GXpyContext._set_instance(self) _tls_instance_name = '_GXpyContext_tls_instance' @classmethod def _set_instance(cls, instance): global _tls setattr(_tls, cls._tls_instance_name, instance) @classmethod def _get_instance(cls): global _tls return getattr(_tls, cls._tls_instance_name, None) def _log_to_file(self, *args): now = datetime.datetime.now() dts = "{}-{}-{} {}:{}:{}:{} ".format(now.year, str(now.month).zfill(2), str(now.day).zfill(2), str(now.hour).zfill(2), str(now.minute).zfill(2), str(now.second).zfill(2), str(now.microsecond // 1000).zfill(3)) for log_str in args: for l in str(log_str).split('\n'): logstr = dts + l + os.linesep self._logf.write(logstr.encode('utf-8')) @property def gxapi(self): """gxapi context for calls to geosoft.gxapi""" return self._gxapi @property def tkframe(self): """tkframe if created fro this context, None if not created""" return self._tkframe @property def parent_window(self): """parent window for this context""" return self._parent_window @property def version(self): """ API version description .. versionadded:: 9.3 """ return __version__ @property def profile_name(self): """ Geosoft ID profile use name. .. versionadded:: 9.4 """ sr = gxapi.str_ref() gxapi.GXSYS.get_profile_name(sr) return sr.value @property def profile_url(self): """ Geosoft ID profile url in My Geosoft portal. .. versionadded:: 9.4 """ sr = gxapi.str_ref() gxapi.GXSYS.get_profile_url(sr) return sr.value @property def main_wind_id(self): """ The main window ID (HWND cast to unsigned for Windows). .. versionadded:: 9.1 """ if self._parent_window == 0: return self._gxapi.get_main_wnd_id() else: return self._parent_window @property def active_wind_id(self): """ The active window ID (HWND cast to unsigned for Windows). .. versionadded:: 9.1 """ return self._gxapi.get_active_wnd_id() @property def geosoft_name(self): """ Geosoft installed product name .. versionadded:: 9.3.2 """ i = gxapi.str_ref() gxapi.GXSYS.get_sys_info(gxapi.SYS_INFO_PRODUCTNAME, i) return i.value @property def geosoft_build_label(self): """ Geosoft build label. .. versionadded:: 9.3.2 """ i = gxapi.str_ref() gxapi.GXSYS.get_sys_info(gxapi.SYS_INFO_BUILD_LABEL, i) return i.value @property def geosoft_build_number(self): """ Geosoft build numberl. .. versionadded:: 9.3.2 """ i = gxapi.str_ref() gxapi.GXSYS.get_sys_info(gxapi.SYS_INFO_BUILD_NUMBER, i) return int(i.value) @property def geosoft_version_label(self): """ Geosoft version label. .. versionadded:: 9.3.2 """ i = gxapi.str_ref() gxapi.GXSYS.get_sys_info(gxapi.SYS_INFO_VERSION_LABEL, i) return i.value @property def geosoft_version_major(self): """ Geosoft major version number. .. versionadded:: 9.3.2 """ i = gxapi.str_ref() gxapi.GXSYS.get_sys_info(gxapi.SYS_INFO_VERSION_MAJOR, i) return int(i.value) @property def geosoft_version_minor(self): """ Geosoft minor version number. .. versionadded:: 9.3.2 """ i = gxapi.str_ref() gxapi.GXSYS.get_sys_info(gxapi.SYS_INFO_VERSION_MINOR, i) return int(i.value) @property def geosoft_version_micro(self): """ Geosoft micro version number. .. versionadded:: 9.3.2 """ i = gxapi.str_ref() gxapi.GXSYS.get_sys_info(gxapi.SYS_INFO_VERSION_SP, i) return int(i.value) def remove_stale_temporary_files(self, age=24 * 60 * 60): """ Removes stale temporary files from the current instance temporary file folder. :param age: files older than this age is seconds are removed. The default is 24 * 60 * 60. Many classes that depend on a persistent file will support the creation of a class instance without providing a specific file name, in which case a temporary file is created in the temporary folder for this running GX instance. Upon loss of GX context all temporary files will be removed, but for a long-running process, such as a GX instnce that supports a web application, it can be useful to use this function to remove stale files and free valuable disk space. Folders, if any, are not removed, but stale-dated files within folders will be removed. .. versionadded:: 9.3.2 """ def remove_all_files(folder): for filename in list(os.listdir(folder)): ff = os.path.join(folder, filename) if os.path.isdir(ff): remove_all_files(ff) else: if not gxu.is_file_locked(ff, age=age): gxu.delete_file(ff) remove_all_files(self.temp_folder()) def disable_app(self): """ Disables application windows to allow modal Python UI. Call before opening your own UI window. .. versionadded:: 9.1 """ self._gxapi.enable_application_windows(False) def enable_app(self): """ Enables application windows to allow modal Python UI. Call before returning control to OM. .. versionadded:: 9.1 """ self._gxapi.enable_application_windows(True) def entitlements(self): """ :returns: The current user entitlements as a dictionary. .. versionadded:: 9.1 """ if not self._entitlements: lst = gxapi.GXLST.create(1000) gxapi.GXSYS.get_entitlement_rights(lst) self._entitlements = gxu.dict_from_lst(lst) return self._entitlements def has_entitlement(self, ent): """ Returns True if the user has this entitlement. :param ent: Entitlement number or descriptive name (case sensitive) | Partial list of entitlements as of 9.3 platform (subject to change): | 1000: "Oasis montaj™ Base" | 10000: "Oasis montaj™ Mapping and Processing System" | 100010: "Geosoft - Virtual Computer License" | 10100: "Geophysics" | 10101: "Geochemistry" | 10102: "Drillhole Plotting" | 10103: "Induced Polarization" | 10104: "Geophysics Levelling" | 10105: "MAGMAP Filtering" | 10106: "Grav/Mag Interpretation" | 10107: "Airborne Quality Control" | 10108: "256-Channel Radiometric Processing" | 10109: "Gravity and Terrain Correction" | 10110: "GridKnit" | 10111: "UXO Land" | 10114: "UXO Marine" | 10500: "montaj plus™ Modeling Lite (PotentQ)" | 10520: "GM-SYS Basic Profile Modeling" | 10521: "GM-SYS Intermediate Profile Modeling" | 10522: "GM-SYS Advanced Profile Modeling" | 10523: "GM-SYS 3D Modeling" | 10524: "Depth to Basement" | 10525: "Isostatic Residual" | 10540: "montaj plus™ Grav/Mag Filtering" | 10541: "montaj plus™ Compudrape" | 10550: "montaj plus™ Praga Radiometric Processing System" | 10560: "montaj plus™ CET Grid Analysis" | 10561: "montaj plus™ CET Porphyry Analysis" | 2000: "ArcGIS" | 3000: "MapInfo" | 30000: "Target™ Surface and Drillhole Mapping" | 30101: "Target™ Geochemistry" | 40000: "Target™ for ArcGIS Surface and Drillhole Mapping" | 41000: "Geochemistry for ArcGIS" | 5104: "montaj™ Geophysics Leveling - Basic" | 5106: "montaj™ Grav/Mag Interpretation - Basic" .. versionadded:: 9.3 """ ent = str(ent) if ent in self.entitlements().keys(): return True if ent in self.entitlements().values(): return True return False @property def entitled(self): """ True if this user has a minimal Geosoft desktop licence/entitlement .. versionadded:: 9.3 """ return self._entitled @property def license_class(self): """ The user's license class. .. versionadded:: 9.1 """ lc = gxapi.str_ref() gxapi.GXSYS.get_license_class(lc) return lc.value def run_gx(self, gx): """ Runs a GX. :param gx: GX name to run :returns: success, cancelled, exit_val, error_list, warning_list .. versionadded:: 9.6 """ exit_val = gxapi.int_ref() ret = gxapi.GXSYS.run_gx_ex(gx, exit_val) success = ret == 0 cancelled = ret == -1 error_list = [] warning_list = [] for i in range(0, gxapi.GXSYS.num_errors_ap()): err_no = gxapi.GXSYS.get_error_ap(i) err = gxapi.str_ref() gxapi.GXSYS.get_error_message_ap(i, err) if err_no < 0: warning_list.append(err.value) else: error_list.append(err.value) gxapi.GXSYS.clear_err_ap() return success, cancelled, exit_val.value, error_list, warning_list def temp_folder(self): """ Return the GX temporary folder path. Each GX instance will create an instance-specific temporary folder as a child in the Geosoft temporary folder. Placing temporary files in the GX-specific temporary folder will ensure temporary file names will not collide with other running GX-based programs, and that all temporarty files are removed on termination of this GX. Call `keep_temp_folder` to prevent deletion of the temporary files, which can be useful when debugging. .. versionadded:: 9.2 """ if self._temp_file_folder is None: # create a temporary folder for this GX instance path = gxu.folder_temp() uuid = "_gx_" + self._gxid self._temp_file_folder = os.path.join(path, uuid) try: os.makedirs(self._temp_file_folder, exist_ok=True) self._keep_temp_files = False except OSError: self._temp_file_folder = path self._keep_temp_files = True return self._temp_file_folder def _clean_redist_folders(self): if hasattr(self, '_redist_user_dir_cleanup') and self._redist_user_dir_cleanup: shutil.rmtree(self._redist_user_dir, ignore_errors=True) if hasattr(self, '_redist_user_temp_cleanup') and self._redist_temp_dir_cleanup: shutil.rmtree(self._redist_temp_dir, ignore_errors=True) def keep_temp_folder(self, keep=True): """ Keep temporary file folder setting. :param keep: True to keep the temporary file folder, False to remove .. versionadded:: 9.2 """ self._keep_temp_files = keep def temp_file(self, ext=''): """ Return a unique temporary file name as a full path. The temporary file is created in the instance temporary folder and will be deleted when this GXpy instance is deleted. :param ext: optional extension :returns: uuid-based file name in the instance temporary folder. .. versionadded:: 9.2 """ if ext and ext[0] != '.': ext = '.' + ext return os.path.join(self.temp_folder(), gxu.uuid() + ext) def environment(self, formated_indent=-1): """ .. deprecated:: 9.2 replaced by properties. """ info = {'gid': self.gid, 'current_date': gxapi.GXSYS.date(), 'current_utc_date': gxapi.GXSYS.utc_date(), 'current_time': gxapi.GXSYS.time(), 'current_utc_time': gxapi.GXSYS.utc_time(), 'license_class': self.license_class(), 'folder_workspace': gxu.folder_workspace(), 'folder_temp': gxu.folder_temp(), 'folder_user': gxu.folder_user(), } if formated_indent >= 0: pp = pprint.PrettyPrinter(indent=formated_indent) return pp.pformat(info) else: return info def log(self, *args): """ Log a string to the log file or log call-back as defined when creating :class:`GXpy` instance. :param args: arguments to log, each will be converted to a str() If logging to a file each line is preceded by the date and time: .. code:: 2016-12-25 12:34:16.175 log_str_line_1 2016-12-25 12:34:16.175 log_str_line_2 .. versionadded:: 9.2 """ log_it = getattr(self, '_log_it', None) if log_it: log_it(*args) def close_log(self): """close logging""" self.log('GX closed') logf = getattr(self, '_logf', None) if logf: logf.close() def elapsed_seconds(self, tag='', log=False): """ Return the elapsed seconds since this GX instance started. The elapsed time is logged if logging is on. :param log: True to log, which also requires logging to be on :param tag: optional string to add to the log :returns: elapsed time in seconds .. versionadded:: 9.2 """ elapsed = datetime.datetime.now() - self._start elapsed_seconds = elapsed.seconds + elapsed.microseconds / 1000000.0 if log: if tag: tag = '{}> '.format(tag) self.log('{}Elapsed seconds: {} ({} minutes, {}.{} seconds)'. format(tag, elapsed_seconds, elapsed.seconds // 60, elapsed.seconds % 60, str(elapsed.microseconds).zfill(6))) return elapsed_seconds #################################################### # deprecated def folder_workspace(self): """ .. deprecated:: 9.2 use :meth:`geosoft.gxpy.utility.folder_workspace` """ if self: return gxu.folder_workspace() def folder_temp(self): """ .. deprecated:: 9.2 use :meth:`geosoft.gxpy.utility.folder_temp` """ if self: return gxu.folder_temp() def folder_user(self): """ .. deprecated:: 9.2 use :meth:`geosoft.gxpy.utility.folder_user` """ if self: return gxu.folder_user() <file_sep>/geosoft/gxapi/GXARCDB.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXDAT import GXDAT ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXARCDB(gxapi_cy.WrapARCDB): """ GXARCDB class. The `GXARCDB <geosoft.gxapi.GXARCDB>` class is used in ArcGIS to access table contents from data sources and layers. """ def __init__(self, handle=0): super(GXARCDB, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXARCDB <geosoft.gxapi.GXARCDB>` :returns: A null `GXARCDB <geosoft.gxapi.GXARCDB>` :rtype: GXARCDB """ return GXARCDB() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def create_dat(self, x_field, y_field, d_field): """ Create a handle to a ARCGIS table `GXDAT <geosoft.gxapi.GXDAT>` 2D object :param x_field: Name of X field in table :param y_field: Name of Y field in table :param d_field: Name of Data field in table :type x_field: str :type y_field: str :type d_field: str :returns: `GXDAT <geosoft.gxapi.GXDAT>`, terminates if creation fails :rtype: GXDAT .. versionadded:: 8.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._create_dat(x_field.encode(), y_field.encode(), d_field.encode()) return GXDAT(ret_val) def create_dat_3d(self, x_field, y_field, z_field, d_field): """ Create a handle to a ARCGIS table `GXDAT <geosoft.gxapi.GXDAT>` 3D object :param x_field: Name of X field in table :param y_field: Name of Y field in table :param z_field: Name of Z field in table :param d_field: Name of Data field in table :type x_field: str :type y_field: str :type z_field: str :type d_field: str :returns: `GXDAT <geosoft.gxapi.GXDAT>`, terminates if creation fails :rtype: GXDAT .. versionadded:: 8.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._create_dat_3d(x_field.encode(), y_field.encode(), z_field.encode(), d_field.encode()) return GXDAT(ret_val) @classmethod def current(cls): """ This method return a handle to the current table :returns: `GXARCDB <geosoft.gxapi.GXARCDB>` Handle, `ARCDB_NULL <geosoft.gxapi.ARCDB_NULL>` if no table selected :rtype: GXARCDB .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapARCDB._current(GXContext._get_tls_geo()) return GXARCDB(ret_val) def export_to_db(self, db, temp, line): """ Export data from an `GXARCDB <geosoft.gxapi.GXARCDB>` table into a group in a Geosoft GDB using a template. :param db: Database :param temp: Import template name :param line: Oasis montaj line name to create (overrides template value) :type db: GXDB :type temp: str :type line: str .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 1. The import template can be in the local directory or the GEOSOFT directory. 3. If the line already exists, the data will overwrite the existing data. """ self._export_to_db(db, temp.encode(), line.encode()) def field_lst(self, lst): """ Place the list of field names in a `GXLST <geosoft.gxapi.GXLST>`. :type lst: GXLST .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If Z or M values are supported by the table geometry the strings "<Z Values>" and "<M Values>" will be added accordingly. """ self._field_lst(lst) @classmethod def from_i_unknown(cls, unknown): """ This method attempts to make a table handle from an IUnknown pointer Returns `GXARCDB <geosoft.gxapi.GXARCDB>` Handle, `ARCDB_NULL <geosoft.gxapi.ARCDB_NULL>` if not successful :param unknown: IUnknown pointer :type unknown: int :rtype: GXARCDB .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapARCDB._from_i_unknown(GXContext._get_tls_geo(), unknown) return GXARCDB(ret_val) def get_ipj(self, ipj): """ Get georeference information from a table. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` to fill in :type ipj: GXIPJ .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the table does not have an `GXIPJ <geosoft.gxapi.GXIPJ>`, the `GXIPJ <geosoft.gxapi.GXIPJ>` that is returned will have an unknown projection. """ self._get_ipj(ipj) def exist_field(self, field): """ This method checks to see if the specified field exists in the table. :param field: Name of Field :type field: str :returns: 0 - Field does not exist 1 - Field Exists :rtype: int .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._exist_field(field.encode()) return ret_val def get_i_unknown(self): """ This method gets the IUnknown pointer :returns: IUnknown pointer :rtype: int .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_i_unknown() return ret_val def import_chem_database_wizard(self, temp, type): """ Template creation for importing geochem data. :param temp: Template to make :param type: :ref:`IMPCH_TYPE` :type temp: str :type type: int :returns: 0-OK 1-Cancel :rtype: int .. versionadded:: 8.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._import_chem_database_wizard(temp.encode(), type) return ret_val @classmethod def sel_tbl_ex_gui(cls, table_type): """ Select table `GXGUI <geosoft.gxapi.GXGUI>` with table type. :param table_type: :ref:`ARC_SELTBL_TYPE` :type table_type: int_ref :returns: Handle to the table (Terminate on Error) :rtype: GXARCDB .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, table_type.value = gxapi_cy.WrapARCDB._sel_tbl_ex_gui(GXContext._get_tls_geo(), table_type.value) return GXARCDB(ret_val) @classmethod def sel_tbl_gui(cls): """ Select table `GXGUI <geosoft.gxapi.GXGUI>`. :returns: Handle to the table :rtype: GXARCDB .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Terminates with Cancel on cancel, returns `ARCDB_NULL <geosoft.gxapi.ARCDB_NULL>` if there are no valid tables in current document. """ ret_val = gxapi_cy.WrapARCDB._sel_tbl_gui(GXContext._get_tls_geo()) return GXARCDB(ret_val) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/stand-alone/chanadd.py """ Add a constant value to a channel on all selected lines. This is a sample Python program that illustrates how to connect to the GX developer environment from a stand-alone program. Follwoing are the basic steps: 1. Get (create) a GX Object handle. 2. Collect command-line parameters 3. Open the database 4. Process the data """ import os import sys import argparse as argp import geosoft.gxpy as gxpy def rungx(): raise Exception("This is not an extension. Please use a python interpreter.") def process_database(db, channel_name, add_value): """ Process all selected lines in a database by adding a constant value to a channel. The data is processed in-place. """ # work through the data a line at a time - get a list of selected lines print('Processing selected lines...') lines = db.list_lines() # for each line, get the data, add a value, return the data to the line for l in lines: # print to the console to reflect progress print('line {}...'.format(str(l))) # get the data and determine the dummy to the data type data, ch, fid = db.read_line(l, channels=channel_name) dummy = gxpy.utility.gx_dummy(data.dtype) # make a dummy mask so we can replace dummies after processing dMask = gxpy.utility.dummy_mask(data) # process - add the value, then replace the dummies sum = data + add_value sum[dMask] = dummy # write the data back to the database db.write_channel(l, channel_name, sum, fid) if __name__ == "__main__": gxpy.utility.check_version('9.2') # get (create) a GX context with gxpy.gx.GXpy() as gxp: # get the current gx context # The GX_GEOSOFT_BIN_PATH Environment variable should contain a path with geodist.dll print("GX_GEOSOFT_BIN_PATH: {}".format(os.getenv("GX_GEOSOFT_BIN_PATH"))) print('Working directory: ' + os.path.abspath(os.curdir)) print('User: {}'.format(gxp.gid)) # get command line parameters parser = argp.ArgumentParser(description="Add a constant to a Geosoft database channel") parser.add_argument("sDB", help="Geosoft database") parser.add_argument("sCh", help="channel to process") parser.add_argument("-v", "--value", type=float, default=1.0, help="value to add, default is 1.0") args = parser.parse_args() # echo parameters print("\nDatabase = "+args.sDB) print("Channel = "+args.sCh) print("Value to add = {}\n".format(args.value)) # open the database with gxpy.gdb.Geosoft_gdb.open(args.sDB) as db: # process the data process_database(db, args.sCh, args.value) <file_sep>/geosoft/gxpy/vox_display.py """ Geosoft vox display handling, which manages the rendering of a `geosoft.gxpy.vox.Vox` in a 3d view. :Classes: :`VoxDisplay`: 3D visualization of a vox, which can be placed `geosoft.gxpy.view.View_3d` :Constants: :ZONE_DEFAULT: 0 :ZONE_LINEAR: 1 :ZONE_NORMAL: 2 :ZONE_EQUALAREA: 3 :ZONE_SHADE: 4 :ZONE_LOGLINEAR: 5 :ZONE_LAST: 6 :RENDER_FILL: 0 :RENDER_EDGE: 1 :RENDER_FILL_EDGE: 2 :RENDER_SMOOTH: 3 .. seealso:: `geosoft.gxpy.vox.Vox`, `geosoft.gxpy.view.View_3d`, `geosoft.gxapi.GXVOXD` .. note:: Regression tests provide usage examples: `vox_display tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_vox_display.py>`_ .. versionadded:: 9.3.1 """ import os import geosoft import geosoft.gxapi as gxapi from . import gx from . import view as gxview from . import group as gxgroup from . import vox as gxvox from . import map as gxmap __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) class VoxDisplayException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.vox_display`. .. versionadded:: 9.2 """ pass ZONE_DEFAULT = 0 ZONE_LINEAR = 1 ZONE_NORMAL = 2 ZONE_EQUALAREA = 3 ZONE_SHADE = 4 ZONE_LOGLINEAR = 5 ZONE_LAST = 6 RENDER_FILL = 0 RENDER_EDGE = 1 RENDER_FILL_EDGE = 2 RENDER_SMOOTH = 3 class VoxDisplay: """ Creation and handling of vox displays. Vox displays can be placed into a 3D view for display. :Constructors: :`solid`: create as a solid, each cell colored from a `geosoft.gxpy.group.Color_map` :`vector`: create as a vector voxel as vectors colored from a `geosoft.gxpy.group.Color_map` :`gxapi_gxvoxd`: create from an existing `geosoft.gxapi.GXVOXD` instance .. versionadded:: 9.3.1 """ def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): return self.name def __enter__(self): return self def __exit__(self, _type, _value, _traceback): self.__del__() def __del__(self): if hasattr(self, '_close'): self._close() def _close(self): if hasattr(self, '_open'): if self._open: gx.pop_resource(self._open) self._gxvoxd = None self._vox = None self._open = None def __init__(self, vox, name=None): self._gxvoxd = None self._vox = vox if name is None: if vox is not None: name = vox.name self._name = name self._vector = False self._vector_cone_specs = (1., 4., 0.25, 5000) self._open = gx.track_resource(self.__class__.__name__, name) @classmethod def solid(cls, vox, color_map=None, zone=ZONE_DEFAULT, contour=None): """ Create a solid colored vox_display from a `geosoft.gxpy.vox.Vox` instance. :param vox: `geosoft.gxpy.vox.Vox` instance :param color_map: `gxpy.group.Color_map` instance, or the name of a file, which may be `.tbl`, `.zon`, `.itr`, or `.agg`. :param zone: Colour distribution method: =================== ================================================== ZONE_DEFAULT as set by user global default settings ZONE_LINEAR linearly distributed ZONE_NORMAL normal (Gaussian) distribution ZONE_EQUALAREA each color will occupy an equal area on the image ZONE_LOGLINEAR logarithmic linear distribution ZONE_LAST last used coloring for this vox =================== ================================================== :param contour: break colours on even multiples of contour .. versionadded:: 9.3.1 """ voxd = cls(vox) if (color_map is None) or (isinstance(color_map, str)): color_map = geosoft.gxpy.group.Color_map(color_map) color_map_file = color_map.save_file() if contour is None: contour = gxapi.rDUMMY voxd._gxvoxd = gxapi.GXVOXD.create(vox.gxvox, color_map_file, zone, contour) return voxd @classmethod def vector(cls, vox, vector_cone_specs=(1., 4., 0.25, 5000), color_map=None, zone=ZONE_DEFAULT, contour=None): """ Create a vector symbol vox_display from a `geosoft.gxpy.vox.Vox` instance. :param vox: `geosoft.gxpy.vox.Vox` instance :param vector_cone_specs: Vector plotting specs (scale_cell_ratio, height_base_ratio, base_cell_ratio, max_cones). Default is (1., 4., 0.25, 5000). See `vector_cone_specs` property. :param color_map: `gxpy.group.Color_map` instance, or the name of a file, which may be `.tbl`, `.zon`, `.itr`, or `.agg`. :param zone: Colour distribution method: :: ZONE_DEFAULT as set by user global default settings ZONE_LINEAR linearly distributed ZONE_NORMAL normal (Gaussian) distribution ZONE_EQUALAREA each color will occupy an equal area on the image ZONE_LOGLINEAR logarithmic linear distribution ZONE_LAST last used coloring for this vox :param contour: break colours on even multiples of contour .. versionadded:: 9.3.1 """ if not vox.is_vectorvox: raise VoxDisplayException(_t('vox must be a vectorvoxel to create a vector swarm')) voxd = VoxDisplay.solid(vox, color_map, zone, contour) voxd._vector = True voxd._vector_cone_specs = vector_cone_specs return voxd @classmethod def gxapi_gxvoxd(cls, gxapi_voxd, name=None): """ Create a VoxDisplay instance from a `geosoft.gxapi.GXVOXD` or a `geosoft.gxapi.GXVECTOR3D` instance. :param gxapi_voxd: `geosoft.gxapi.VOXD` or `geosoft.gxapi.GXVECTOR3D` instance :param name: name of the voxel, required for a vector voxel. .. versionadded 9.3.1 """ if isinstance(gxapi_voxd, gxapi.GXVOXD): if name is None: name = gxapi.str_ref() gxapi_voxd.get_name(name) name = name.value else: if not name: raise VoxDisplayException(_t('a name is required to open a GXVECTOR3D object')) try: vox = gxvox.Vox.open(name) except Exception: vox = None name = os.path.splitext(os.path.basename(name))[0] voxd = cls(vox, name=name) voxd._gxvoxd = gxapi_voxd return voxd @property def vox(self): """ `geosoft.gxpy.vox.Vox` instance""" return self._vox @property def name(self): """ instance name, same as the contained Vox name""" return self._name @property def unit_of_measure(self): """Unit of data measurement for the contained vox data.""" return self.color_map.unit_of_measure @property def is_vector(self): """True if this is a vector style display""" return self._vector @property def vector_cone_specs(self): """ Vector plotting specs: (scale_cell_ratio, height_base_ratio, base_cell_ratio, max_cones). Can be set. scale_cell_ratio scales the maximum cone length to the size of the smallest cell. If None, default is 1. height_base_ratio is the ration of the cone height to the base size. If None, default is 4. base_cell_ratio is the maximum base size relative to the minimum cell size. If None, default is 0.25. max_cones is the maximum number of cones to draw. Voxel is decimated to limit the cones. None to plot all cones, though typically this is limited to about 2000 to improve display performance. .. versionadded:: 9.3.1 """ return self._vector_cone_specs @vector_cone_specs.setter def vector_cone_specs(self, specs): sc, hb, bc, mx = specs if sc is None or sc <= 0.: sc = 1.0 if hb is None or hb <= 0.: hb = 4. if bc is None or bc <= 0.: bc = 0.25 if mx is not None and mx <= 0: mx = None self._vector_cone_specs = (sc, hb, bc, mx) @property def draw_controls(self): """ Vox drawing settings, returned as a tuple: (box_on, opacity, extent) as (boolean, float, (min_x, min_y, min_z, max_x, max_y, max_z)) Can be set. .. versionadded:: 9.3.1 """ if self.is_vector: return None, None, None box = gxapi.int_ref() trans = gxapi.float_ref() x0 = gxapi.float_ref() x1 = gxapi.float_ref() y0 = gxapi.float_ref() y1 = gxapi.float_ref() z0 = gxapi.float_ref() z1 = gxapi.float_ref() self.gxvoxd.get_draw_controls(box, trans, x0, y0, z0, x1, y1, z1) return bool(box.value), trans.value, (x0.value, y0.value, z0.value, x1.value, y1.value, z1.value) @draw_controls.setter def draw_controls(self, controls): if self.is_vector: raise VoxDisplayException(_t('cannot set draw controls for a vector display')) box, trans, extent = controls x0, y0, z0, x1, y1, z1 = extent self.gxvoxd.set_draw_controls(box, trans, x0, y0, z0, x1, y1, z1) @property def render_mode(self): rm = gxapi.int_ref() self.gxvoxd.get_render_mode(rm) return rm.value @render_mode.setter def render_mode(self, mode): if mode not in (RENDER_FILL, RENDER_EDGE, RENDER_FILL_EDGE, RENDER_SMOOTH): raise VoxDisplayException(_t('Invalid render mode {}').format(mode)) self.gxvoxd.set_render_mode(mode) @property def gxvoxd(self): """The :class:`geosoft.gxapi.GXVOXD` instance handle, None for a vector display.""" return self._gxvoxd @property def is_thematic(self): """True if this is a thematic vox display""" if self.is_vector: return False return bool(self.gxvoxd.is_thematic()) @property def opacity(self): """Opacity between 0. (invisible) and 1. (opaque) can be set.""" return self.draw_controls[1] @opacity.setter def opacity(self, t): controls = list(self.draw_controls) controls[1] = t self.draw_controls = controls @property def color_map(self): """Return the colour map for this vox""" itr = gxapi.GXITR.create() self.gxvoxd.get_itr(itr) cmap = geosoft.gxpy.group.Color_map(itr) cmap.title = self.name if self.vox: cmap.unit_of_measure = self.vox.unit_of_measure return cmap @property def shell_limits(self): """ The data limits of the visible data shell for scalar data. Can be set. returns: (min, max) limits, data outside this range is transparent, None for no limit .. versionadded 9.3.1 """ vmin = gxapi.float_ref() vmax = gxapi.float_ref() self.gxvoxd.get_shell_controls(vmin, vmax) vmin = vmin.value vmax = vmax.value if vmin == gxapi.rDUMMY: vmin = None if vmax == gxapi.rDUMMY: vmax = None return vmin, vmax @shell_limits.setter def shell_limits(self, limits): vmin, vmax = limits if vmin is None: vmin = gxapi.rDUMMY if vmax is None: vmax = gxapi.rDUMMY self.gxvoxd.set_shell_controls(vmin, vmax) def view_3d(self, file_name=None, overwrite=True, plane_2d=False): """ Create a 3d view (`geosoft.gxpy.view.View_3d`) from the instance. :param file_name: the name of a file for the 3d view. If None a temporary 3d view created. :param overwrite: True to overwrite existing file :param plane_2d: True to keep the 2D plane. Only keep it if you intend to draw on it otherwise a grey plane will appear in the view. .. versionadded:: 9.3 """ v3d = gxview.View_3d.new(file_name, overwrite=overwrite) gxgroup.VoxDisplayGroup.new(v3d, self) if not plane_2d: v3d.delete_plane(0) return v3d def figure_map(self, file_name=None, overwrite=True, title=None, legend_label=None, features=('LEGEND', 'NEATLINE'), **kwargs): """ Create a figure view file from the instance. :param file_name: the name of a file for the 3d view. If None a temporary 3d view created. :param overwrite: True to overwrite existing file :param title: Title added to the image :param legend_label: If plotting a legend make this the legned title. The default is the title in the first aggregate layer colour map. :param features: list of features to place on the map, default is ('SCALE', 'LEGEND', 'NEATLINE') =========== ========================================= 'LEGEND' show the colour legend 'NEATLINE' draw a neat-line around the image =========== ========================================= :param kwargs: passed to `geosoft.gxpy.map.Map.new` .. versionadded:: 9.3 """ # uppercase features, use a dict so we pop things we use and report error if isinstance(features, str): features = (features,) feature_list = {} if features is not None: for f in features: feature_list[f.upper()] = None features = list(feature_list.keys()) # setup margins if not ('margins' in kwargs): bottom_margin = 1.0 if title: bottom_margin += len(title.split('\n')) * 1.0 right_margin = 1 if 'LEGEND' in feature_list: right_margin += 3.5 kwargs['margins'] = (1, right_margin, bottom_margin, 1) gmap = gxmap.Map.figure((0, 0, 100, 100), file_name=file_name, features=features, title=title, overwrite=overwrite, **kwargs) with gxview.View.open(gmap, "data") as v: if 'LEGEND' in features: gxgroup.legend_color_bar(v, 'legend', title=legend_label, location=(1, 0), cmap=self.color_map) area = gxview.View.open(gmap, gmap.current_data_view).extent_map_cm() area = (area[0] * 10., area[1] * 10., area[2] * 10., area[3] * 10.) gmap.create_linked_3d_view(self.view_3d(), area_on_map=area) return gmap <file_sep>/docs/GXVOX.rst .. _GXVOX: GXVOX class ================================== .. autoclass:: geosoft.gxapi.GXVOX :members: .. _VOX_DIR: VOX_DIR constants ----------------------------------------------------------------------- Voxel direction .. autodata:: geosoft.gxapi.VOX_DIR_XY :annotation: .. autoattribute:: geosoft.gxapi.VOX_DIR_XY .. autodata:: geosoft.gxapi.VOX_DIR_XZ :annotation: .. autoattribute:: geosoft.gxapi.VOX_DIR_XZ .. autodata:: geosoft.gxapi.VOX_DIR_YZ :annotation: .. autoattribute:: geosoft.gxapi.VOX_DIR_YZ .. _VOX_DIRECTION: VOX_DIRECTION constants ----------------------------------------------------------------------- Voxel export direction .. autodata:: geosoft.gxapi.VOX_3D_DIR_XYZ :annotation: .. autoattribute:: geosoft.gxapi.VOX_3D_DIR_XYZ .. autodata:: geosoft.gxapi.VOX_3D_DIR_YXZ :annotation: .. autoattribute:: geosoft.gxapi.VOX_3D_DIR_YXZ .. autodata:: geosoft.gxapi.VOX_3D_DIR_XZY :annotation: .. autoattribute:: geosoft.gxapi.VOX_3D_DIR_XZY .. autodata:: geosoft.gxapi.VOX_3D_DIR_YZX :annotation: .. autoattribute:: geosoft.gxapi.VOX_3D_DIR_YZX .. autodata:: geosoft.gxapi.VOX_3D_DIR_ZXY :annotation: .. autoattribute:: geosoft.gxapi.VOX_3D_DIR_ZXY .. autodata:: geosoft.gxapi.VOX_3D_DIR_ZYX :annotation: .. autoattribute:: geosoft.gxapi.VOX_3D_DIR_ZYX .. _VOX_FILTER3D: VOX_FILTER3D constants ----------------------------------------------------------------------- Voxel filter type .. autodata:: geosoft.gxapi.VOX_FILTER3D_FILE :annotation: .. autoattribute:: geosoft.gxapi.VOX_FILTER3D_FILE .. autodata:: geosoft.gxapi.VOX_FILTER3D_SMOOTHING :annotation: .. autoattribute:: geosoft.gxapi.VOX_FILTER3D_SMOOTHING .. autodata:: geosoft.gxapi.VOX_FILTER3D_LAPLACE :annotation: .. autoattribute:: geosoft.gxapi.VOX_FILTER3D_LAPLACE .. autodata:: geosoft.gxapi.VOX_FILTER3D_X_GRADIENT :annotation: .. autoattribute:: geosoft.gxapi.VOX_FILTER3D_X_GRADIENT .. autodata:: geosoft.gxapi.VOX_FILTER3D_Y_GRADIENT :annotation: .. autoattribute:: geosoft.gxapi.VOX_FILTER3D_Y_GRADIENT .. autodata:: geosoft.gxapi.VOX_FILTER3D_Z_GRADIENT :annotation: .. autoattribute:: geosoft.gxapi.VOX_FILTER3D_Z_GRADIENT .. autodata:: geosoft.gxapi.VOX_FILTER3D_TOTAL_GRADIENT :annotation: .. autoattribute:: geosoft.gxapi.VOX_FILTER3D_TOTAL_GRADIENT .. _VOX_GOCAD_ORIENTATION: VOX_GOCAD_ORIENTATION constants ----------------------------------------------------------------------- GOCAD Orientations .. autodata:: geosoft.gxapi.VOX_GOCAD_ORIENTATIONS_NORMAL :annotation: .. autoattribute:: geosoft.gxapi.VOX_GOCAD_ORIENTATIONS_NORMAL .. autodata:: geosoft.gxapi.VOX_GOCAD_ORIENTATIONS_INVERTED :annotation: .. autoattribute:: geosoft.gxapi.VOX_GOCAD_ORIENTATIONS_INVERTED .. autodata:: geosoft.gxapi.VOX_GOCAD_ORIENTATIONS_NORMAL_ZFIRST :annotation: .. autoattribute:: geosoft.gxapi.VOX_GOCAD_ORIENTATIONS_NORMAL_ZFIRST .. autodata:: geosoft.gxapi.VOX_GOCAD_ORIENTATIONS_INVERTED_ZFIRST :annotation: .. autoattribute:: geosoft.gxapi.VOX_GOCAD_ORIENTATIONS_INVERTED_ZFIRST .. _VOX_GRID_LOGOPT: VOX_GRID_LOGOPT constants ----------------------------------------------------------------------- Voxel log gridding options .. autodata:: geosoft.gxapi.VOX_GRID_LOGOPT_LINEAR :annotation: .. autoattribute:: geosoft.gxapi.VOX_GRID_LOGOPT_LINEAR .. autodata:: geosoft.gxapi.VOX_GRID_LOGOPT_LOG_SAVELINEAR :annotation: .. autoattribute:: geosoft.gxapi.VOX_GRID_LOGOPT_LOG_SAVELINEAR .. autodata:: geosoft.gxapi.VOX_GRID_LOGOPT_LOGLINEAR_SAVELINEAR :annotation: .. autoattribute:: geosoft.gxapi.VOX_GRID_LOGOPT_LOGLINEAR_SAVELINEAR .. autodata:: geosoft.gxapi.VOX_GRID_LOGOPT_LOG_SAVELOG :annotation: .. autoattribute:: geosoft.gxapi.VOX_GRID_LOGOPT_LOG_SAVELOG .. autodata:: geosoft.gxapi.VOX_GRID_LOGOPT_LOGLINEAR_SAVELOG :annotation: .. autoattribute:: geosoft.gxapi.VOX_GRID_LOGOPT_LOGLINEAR_SAVELOG .. _VOX_ORIGIN: VOX_ORIGIN constants ----------------------------------------------------------------------- Voxel origin .. autodata:: geosoft.gxapi.VOX_ORIGIN_BOTTOM :annotation: .. autoattribute:: geosoft.gxapi.VOX_ORIGIN_BOTTOM .. autodata:: geosoft.gxapi.VOX_ORIGIN_TOP :annotation: .. autoattribute:: geosoft.gxapi.VOX_ORIGIN_TOP .. _VOX_SLICE_MODE: VOX_SLICE_MODE constants ----------------------------------------------------------------------- Voxel export direction .. autodata:: geosoft.gxapi.VOX_SLICE_MODE_LINEAR :annotation: .. autoattribute:: geosoft.gxapi.VOX_SLICE_MODE_LINEAR .. autodata:: geosoft.gxapi.VOX_SLICE_MODE_NEAREST :annotation: .. autoattribute:: geosoft.gxapi.VOX_SLICE_MODE_NEAREST .. _VOX_VECTORVOX_IMPORT: VOX_VECTORVOX_IMPORT constants ----------------------------------------------------------------------- Voxel direction .. autodata:: geosoft.gxapi.VOX_VECTORVOX_XYZ :annotation: .. autoattribute:: geosoft.gxapi.VOX_VECTORVOX_XYZ .. autodata:: geosoft.gxapi.VOX_VECTORVOX_UVW :annotation: .. autoattribute:: geosoft.gxapi.VOX_VECTORVOX_UVW .. autodata:: geosoft.gxapi.VOX_VECTORVOX_AID :annotation: .. autoattribute:: geosoft.gxapi.VOX_VECTORVOX_AID <file_sep>/geosoft/gxapi/GXSQLSRV.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXSQLSRV(gxapi_cy.WrapSQLSRV): """ GXSQLSRV class. SQL Server and MSDE utility functions """ def __init__(self, handle=0): super(GXSQLSRV, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXSQLSRV <geosoft.gxapi.GXSQLSRV>` :returns: A null `GXSQLSRV <geosoft.gxapi.GXSQLSRV>` :rtype: GXSQLSRV """ return GXSQLSRV() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def attach_mdf(cls, server, user, password, db, mdf_file_name, ldf_file_name): """ Attaches an MDF SQL server file to a server. :param server: SQL server to use :param user: User name (if blank assume NT Integrated Security) :param password: <PASSWORD> :param db: `GXDB <geosoft.gxapi.GXDB>` name :param mdf_file_name: MDF name :param ldf_file_name: LDF name (if blank, tries single db attach) :type server: str :type user: str :type password: str :type db: str :type mdf_file_name: str :type ldf_file_name: str :returns: 0 - OK 1 - `GXDB <geosoft.gxapi.GXDB>` Operation Canceled Terminates on Error :rtype: int .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The file's path need to be visible as local files on the server. Network drives and substitutes may not work. """ ret_val = gxapi_cy.WrapSQLSRV._attach_mdf(GXContext._get_tls_geo(), server.encode(), user.encode(), password.encode(), db.encode(), mdf_file_name.encode(), ldf_file_name.encode()) return ret_val @classmethod def detach_db(cls, server, user, password, db): """ Detaches a SQL Server database from a server. :param server: SQL server to use :param user: User name (if blank assume NT Integrated Security) :param password: <PASSWORD> :param db: `GXDB <geosoft.gxapi.GXDB>` name :type server: str :type user: str :type password: str :type db: str :returns: 0 - OK 1 - `GXDB <geosoft.gxapi.GXDB>` Operation Canceled :rtype: int .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapSQLSRV._detach_db(GXContext._get_tls_geo(), server.encode(), user.encode(), password.encode(), db.encode()) return ret_val @classmethod def get_database_languages_lst(cls, lst, server, user, password, win_auth): """ Get a list of the languages into `GXLST <geosoft.gxapi.GXLST>` :param server: SQL server to use :param user: <NAME> :param password: <PASSWORD> :param win_auth: 0 - SQL authentication, 1 - NT integrated securty :type lst: GXLST :type server: str :type user: str :type password: str :type win_auth: int :returns: Number of languages :rtype: int .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapSQLSRV._get_database_languages_lst(GXContext._get_tls_geo(), lst, server.encode(), user.encode(), password.encode(), win_auth) return ret_val @classmethod def get_databases_lst(cls, lst, server, user, password, win_auth): """ Get a list of the database into `GXLST <geosoft.gxapi.GXLST>` :param server: SQL server to use :param user: <NAME> :param password: <PASSWORD> :param win_auth: 0 - SQL authentication, 1 - NT integrated securty :type lst: GXLST :type server: str :type user: str :type password: str :type win_auth: int :returns: Number of database :rtype: int .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapSQLSRV._get_databases_lst(GXContext._get_tls_geo(), lst, server.encode(), user.encode(), password.encode(), win_auth) return ret_val @classmethod def get_login_gui(cls, server, user, password, mode, win_auth): """ Get/Test login information to SQL Server :param server: SQL server to use :param user: User name (default & returned) :param password: Password (default & returned) :param mode: :ref:`MFCSQL_DRIVER` :param win_auth: Windows Authentication (default & returned) :type server: str :type user: str_ref :type password: <PASSWORD> :type mode: int :type win_auth: int_ref .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ user.value, password.value, win_auth.value = gxapi_cy.WrapSQLSRV._get_login_gui(GXContext._get_tls_geo(), server.encode(), user.value.encode(), password.value.encode(), mode, win_auth.value) @classmethod def get_servers_lst(cls, lst): """ Get a list of the visible servers into `GXLST <geosoft.gxapi.GXLST>` :type lst: GXLST :returns: Number of servers :rtype: int .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapSQLSRV._get_servers_lst(GXContext._get_tls_geo(), lst) return ret_val ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXTR.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXTR(gxapi_cy.WrapTR): """ GXTR class. The `GXTR <geosoft.gxapi.GXTR>` object contains trend information about a grid or grid pager. Currently, it is used only in conjunction with the `GXIMG.get_tr <geosoft.gxapi.GXIMG.get_tr>`, `GXIMG.set_tr <geosoft.gxapi.GXIMG.set_tr>`, and `GXPGU.trend <geosoft.gxapi.GXPGU.trend>` functions. """ def __init__(self, handle=0): super(GXTR, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXTR <geosoft.gxapi.GXTR>` :returns: A null `GXTR <geosoft.gxapi.GXTR>` :rtype: GXTR """ return GXTR() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, order): """ Creates a Trend object :param order: Trend order (must >=0 and <=3) :type order: int :returns: `GXTR <geosoft.gxapi.GXTR>` Object :rtype: GXTR .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapTR._create(GXContext._get_tls_geo(), order) return GXTR(ret_val) def copy(self, t_rs): """ This method copies a table resource to another trend table resource. :param t_rs: Source Trend Object to Copy :type t_rs: GXTR .. versionadded:: 8.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._copy(t_rs) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXGMSYS.rst .. _GXGMSYS: GXGMSYS class ================================== .. autoclass:: geosoft.gxapi.GXGMSYS :members: <file_sep>/examples/tutorial/Coordinate systems/assign_coordinate_system.py import geosoft.gxpy.gx as gx import geosoft.gxpy.grid as gxgrd gxc = gx.GXpy() # create a memory grid as an example of a spatial object for this exercise grid = gxgrd.Grid.new(properties=({'nx': 10, 'ny': 10})) print(grid.coordinate_system) # define by a Geosoft-style coordinate system name. Parameters are derived from internal Geosoft tables. grid.coordinate_system = "NAD83 / UTM zone 17N" print(grid.coordinate_system) print(grid.coordinate_system.gxf) print(grid.coordinate_system.coordinate_dict()) # example use of GXF strings to change the datum to NAD27. Here we remove the name and local datum transform # and allow the Coordinate_system class to complete parameters for NAD27 from the tables. gxf = grid.coordinate_system.gxf gxf[0] = '' gxf[1] = "NAD27" gxf[4] = '' grid.coordinate_system = gxf print('gxf:', grid.coordinate_system.gxf) # fully explicit definition of UTM zone 17N on NAD27 datum using GXF string. grid.coordinate_system = ['', 'NAD27', '"Transverse Mercator",0,-87,0.9996,500000,0', 'm,1', '"*local_datum",-8,160,176,0,0,0,0'] print('gxf:', grid.coordinate_system.gxf) # ... from a json string. Note how to properly escape a string embedded in a string. js = '{"units": "m,1", "datum": "NAD27", "projection": "\\"Transverse Mercator\\",0,-87,0.9996,500000,0"}' grid.coordinate_system = js print('json:', grid.coordinate_system.gxf) # ... from an ESRI WKT string wkt = 'PROJCS["NAD_1927_UTM_Zone_16N",' + \ 'GEOGCS["GCS_North_American_1927",' + \ 'DATUM["D_North_American_1927",' + \ 'SPHEROID["Clarke_1866",6378206.4,294.9786982]],' + \ 'PRIMEM["Greenwich",0.0],' + \ 'UNIT["Degree",0.0174532925199433]],' + \ 'PROJECTION["Transverse_Mercator"],' + \ 'PARAMETER["False_Easting",500000.0],' + \ 'PARAMETER["False_Northing",0.0],' + \ 'PARAMETER["Central_Meridian",-87.0],' + \ 'PARAMETER["Scale_Factor",0.9996],' + \ 'PARAMETER["Latitude_Of_Origin",0.0],' + \ 'UNIT["Meter",1.0],' + \ 'AUTHORITY["EPSG",26716]]' grid.coordinate_system = wkt print('from wkt:', grid.coordinate_system.esri_wkt) <file_sep>/geosoft/gxapi/GXHGD.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXHGD(gxapi_cy.WrapHGD): """ GXHGD class. High Performance Grid. Designed to place grid data on a DAP server. It produces a multi-resolution compressed object that supports multi-threading and allows for high-speed extraction of data at any resolution. """ def __init__(self, handle=0): super(GXHGD, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXHGD <geosoft.gxapi.GXHGD>` :returns: A null `GXHGD <geosoft.gxapi.GXHGD>` :rtype: GXHGD """ return GXHGD() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, name): """ Create a handle to an `GXHGD <geosoft.gxapi.GXHGD>` object :param name: File Name :type name: str :returns: `GXHGD <geosoft.gxapi.GXHGD>` handle, terminates if creation fails :rtype: GXHGD .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapHGD._create(GXContext._get_tls_geo(), name.encode()) return GXHGD(ret_val) def export_img(self, name): """ Export all layers of this `GXHGD <geosoft.gxapi.GXHGD>` into grid files. :param name: Name of grids (each layers adds _Number to the name) :type name: str .. versionadded:: 6.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._export_img(name.encode()) def get_meta(self, meta): """ Get the metadata of a grid. :param meta: `GXMETA <geosoft.gxapi.GXMETA>` object to save `GXHGD <geosoft.gxapi.GXHGD>`'s meta to :type meta: GXMETA .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_meta(meta) @classmethod def h_create_img(cls, img, name): """ Make an `GXHGD <geosoft.gxapi.GXHGD>` from an `GXIMG <geosoft.gxapi.GXIMG>` :param img: Image Handle :param name: Name of `GXHGD <geosoft.gxapi.GXHGD>` object :type img: GXIMG :type name: str :returns: `GXHGD <geosoft.gxapi.GXHGD>` Object :rtype: GXHGD .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapHGD._h_create_img(GXContext._get_tls_geo(), img, name.encode()) return GXHGD(ret_val) def set_meta(self, meta): """ Set the metadata of a grid. :param meta: `GXMETA <geosoft.gxapi.GXMETA>` object to add to `GXHGD <geosoft.gxapi.GXHGD>`'s meta :type meta: GXMETA .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_meta(meta) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXGIS.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXIPJ import GXIPJ ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXGIS(gxapi_cy.WrapGIS): """ GXGIS class. The `GXGIS <geosoft.gxapi.GXGIS>` class is used for the import, export, and interrogation of `GXGIS <geosoft.gxapi.GXGIS>` Data stored in external formats, such as MapInfo® TAB files. """ def __init__(self, handle=0): super(GXGIS, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXGIS <geosoft.gxapi.GXGIS>` :returns: A null `GXGIS <geosoft.gxapi.GXGIS>` :rtype: GXGIS """ return GXGIS() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, file, info, type): """ Creates a `GXGIS <geosoft.gxapi.GXGIS>` Object :param file: Data source (file) :param info: Data qualifying information if required. :param type: :ref:`GIS_TYPE` :type file: str :type info: str :type type: int :returns: `GXGIS <geosoft.gxapi.GXGIS>` Object :rtype: GXGIS .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapGIS._create(GXContext._get_tls_geo(), file.encode(), info.encode(), type) return GXGIS(ret_val) def create_map_2d(self, map, map_scale, ipj, map2_d): """ `create_map_2d <geosoft.gxapi.GXGIS.create_map_2d>` Create a new 2D map for `GXGIS <geosoft.gxapi.GXGIS>` imports. :param map: Map name :param map_scale: Map scale (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param ipj: Projection (no orientation) :param map2_d: :ref:`GIS_MAP2D` :type map: str :type map_scale: float :type ipj: GXIPJ :type map2_d: int .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This function was created to minimize duplication in creation of new maps with 2D views. """ self._create_map_2d(map.encode(), map_scale, ipj, map2_d) def get_bpr_models_lst(self, file, lst): """ Get a `GXLST <geosoft.gxapi.GXLST>` of block models contained in a Gemcom BPR or BRP2 file :param file: BPR or BPR2 file :param lst: Returned `GXLST <geosoft.gxapi.GXLST>` of block models :type file: str :type lst: GXLST .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The Returned `GXLST <geosoft.gxapi.GXLST>` has items in the following format: Name: If there is only one sub-directory with models, then only the block model name "Rock Type_5" is required to ensure uniqueness. If there is more than one sub-directory, then the name is set to (.e.g.) "[Standard]Rock Type_5" Value: Sub-directory file path "Standard\\Rock Type_5.BLK", (includes the extension). The Gemcom BPR and BPR2 files keep their block models in one or more sub-directories, identified in the ``*.CAT`` file located beside the input BPR or BPR2. """ self._get_bpr_models_lst(file.encode(), lst) def get_ipj(self): """ Get the `GXGIS <geosoft.gxapi.GXGIS>` `GXIPJ <geosoft.gxapi.GXIPJ>` :returns: `GXIPJ <geosoft.gxapi.GXIPJ>` handle NULL if error :rtype: GXIPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is your copy, you must destroy it. If the `GXGIS <geosoft.gxapi.GXGIS>` does not have an `GXIPJ <geosoft.gxapi.GXIPJ>`, an `GXIPJ <geosoft.gxapi.GXIPJ>` with no warp and UNKNOWN projection is returned. """ ret_val = self._get_ipj() return GXIPJ(ret_val) def get_meta(self, meta): """ Get the `GXGIS <geosoft.gxapi.GXGIS>` `GXMETA <geosoft.gxapi.GXMETA>` :param meta: Meta object to store `GXGIS <geosoft.gxapi.GXGIS>` meta information :type meta: GXMETA .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_meta(meta) def get_range(self, x_min, x_max, y_min, y_max, z_min, z_max): """ Get the range of data in the `GXGIS <geosoft.gxapi.GXGIS>` :param x_min: X min :param x_max: X max :param y_min: Y min :param y_max: Y max :param z_min: Z min :param z_max: Z max :type x_min: float_ref :type x_max: float_ref :type y_min: float_ref :type y_max: float_ref :type z_min: float_ref :type z_max: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ x_min.value, x_max.value, y_min.value, y_max.value, z_min.value, z_max.value = self._get_range(x_min.value, x_max.value, y_min.value, y_max.value, z_min.value, z_max.value) @classmethod def datamine_type(cls, file): """ Returns the type of a Datamine file. :param file: Name of input datamine file :type file: str :returns: Datamine file types - bitwise AND of types. :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Terminates if file is not a Datamine file. A datamine file can contain fields from a multitude of types, so use `GXMATH.and_ <geosoft.gxapi.GXMATH.and_>` or `GXMATH.or_ <geosoft.gxapi.GXMATH.or_>` to determine if the file contains the required data. """ ret_val = gxapi_cy.WrapGIS._datamine_type(GXContext._get_tls_geo(), file.encode()) return ret_val def get_file_name(self, name): """ Get the file name :param name: Returned file name :type name: str_ref .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ name.value = self._get_file_name(name.value.encode()) @classmethod def is_mi_map_file(cls, map): """ Returns TRUE if file is a MapInfo MAP file. :param map: Name of input map file :type map: str :returns: 0 if not a MapInfo MAP file 1 if it is. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** It is important not to overwrite a MapInfo MAP file with a Geosoft one. Use this function to test the MAP file (looks at the first few bytes). """ ret_val = gxapi_cy.WrapGIS._is_mi_map_file(GXContext._get_tls_geo(), map.encode()) return ret_val @classmethod def is_mi_raster_tab_file(cls, tab): """ Returns TRUE if file is a MapInfo Raster TAB file. :param tab: Name of input tab file :type tab: str :returns: 0 if not a MapInfo Raster TAB file 1 if it is. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapGIS._is_mi_raster_tab_file(GXContext._get_tls_geo(), tab.encode()) return ret_val @classmethod def is_mi_rotated_raster_tab_file(cls, tab): """ Returns TRUE if file is a rotated MapInfo Raster TAB file. :param tab: Name of input tab file :type tab: str :returns: 0 if not a rotated MapInfo Raster TAB file 1 if it is (see conditions below). :rtype: int .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Returns 1 if: a) This is a MapInfo RASTER file b) A three-point warp is defined. c) The warp requires a rotation in order to exactly map the input and output warp points. The rotation must be at least 1.e-6 radians. This function will register an error (and return 0) if problems are encountered opening or reading the TAB file. """ ret_val = gxapi_cy.WrapGIS._is_mi_rotated_raster_tab_file(GXContext._get_tls_geo(), tab.encode()) return ret_val def is_shp_file_3d(self): """ Returns TRUE if an ArcView `GXSHP <geosoft.gxapi.GXSHP>` file is type POINTZ, ARCZ, POLYGONZ or MULTIPOINTZ :returns: 0 if the `GXSHP <geosoft.gxapi.GXSHP>` file is 2D 1 if the `GXSHP <geosoft.gxapi.GXSHP>` file is of type POINTZ, ARCZ, POLYGONZ or MULTIPOINTZ :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** `GXSHP <geosoft.gxapi.GXSHP>` files come in 2D and 3D forms. Fails if not `GIS_TYPE_ARCVIEW <geosoft.gxapi.GIS_TYPE_ARCVIEW>`. """ ret_val = self._is_shp_file_3d() return ret_val def is_shp_file_point(self): """ Returns TRUE if an ArcView `GXSHP <geosoft.gxapi.GXSHP>` file is type POINT or POINTZ :returns: 0 if the `GXSHP <geosoft.gxapi.GXSHP>` file is not points if the `GXSHP <geosoft.gxapi.GXSHP>` file is of type POINT or POINTZ :rtype: int .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Fails if not `GIS_TYPE_ARCVIEW <geosoft.gxapi.GIS_TYPE_ARCVIEW>`. """ ret_val = self._is_shp_file_point() return ret_val def num_attribs(self): """ The number of attribute fields in the `GXGIS <geosoft.gxapi.GXGIS>` dataset :returns: The number of attribute fields :rtype: int .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._num_attribs() return ret_val def num_shapes(self): """ The number of shape entities in the `GXGIS <geosoft.gxapi.GXGIS>` dataset :returns: The number of shape entities :rtype: int .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._num_shapes() return ret_val @classmethod def scan_mi_raster_tab_file(cls, tab, file, ipj): """ Scan and set up a MapInf RASTER. :param tab: Name of input file :param file: Name of Raster file (an `GXIMG <geosoft.gxapi.GXIMG>` `GXDAT <geosoft.gxapi.GXDAT>`) :param ipj: Projection :type tab: str :type file: str_ref :type ipj: GXIPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This will create a GI file for the raster image. """ file.value = gxapi_cy.WrapGIS._scan_mi_raster_tab_file(GXContext._get_tls_geo(), tab.encode(), file.value.encode(), ipj) def load_ascii(self, wa): """ Save `GXGIS <geosoft.gxapi.GXGIS>` attribute table information (string fields) into a `GXWA <geosoft.gxapi.GXWA>`. :param wa: `GXWA <geosoft.gxapi.GXWA>` object :type wa: GXWA .. versionadded:: 7.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** All string fields (excluding X/Y and numerical fields) will be saved into the `GXWA <geosoft.gxapi.GXWA>` columns. e field names are saved in the first line, followed by a blank line. e field columns are separated by a tab (delimited character). """ self._load_ascii(wa) def load_gdb(self, db): """ Load `GXGIS <geosoft.gxapi.GXGIS>` table information into a GDB. :param db: Database :type db: GXDB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** All fields of the database will be loaded into the group. Channels will use the same name (or a allowable alias) as the `GXGIS <geosoft.gxapi.GXGIS>` field name. If a channel does not exist, it will be created based on the characteristics of the `GXGIS <geosoft.gxapi.GXGIS>` field. If a channel exists, it will be used as-is. """ self._load_gdb(db) def load_map(self, mview): """ Load `GXGIS <geosoft.gxapi.GXGIS>` table drawing into a `GXMVIEW <geosoft.gxapi.GXMVIEW>`. :param mview: View in which to place `GXGIS <geosoft.gxapi.GXGIS>` drawing. :type mview: GXMVIEW .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The `GXGIS <geosoft.gxapi.GXGIS>` drawing will be drawin in the current group. """ self._load_map(mview) def load_map_ex(self, map, view_name): """ Load `GXGIS <geosoft.gxapi.GXGIS>` table drawing into a `GXMAP <geosoft.gxapi.GXMAP>`. :param map: Map handle :param view_name: Name of existing data view :type map: GXMAP :type view_name: str .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The `GXGIS <geosoft.gxapi.GXGIS>` drawing will be drawin in the current group. """ self._load_map_ex(map, view_name.encode()) def load_meta_groups_map(self, mview, meta, ph_object, prefix, name_field): """ Load `GXGIS <geosoft.gxapi.GXGIS>` table drawing into a `GXMVIEW <geosoft.gxapi.GXMVIEW>`. :param mview: View in which to place `GXGIS <geosoft.gxapi.GXGIS>` drawing. :param ph_object: Class :param prefix: Group Name prefix :param name_field: Name field (Empty to use ID of entity) :type mview: GXMVIEW :type meta: GXMETA :type ph_object: int :type prefix: str :type name_field: str .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The `GXGIS <geosoft.gxapi.GXGIS>` drawing will be drawn in the current group. A group will be created for every entity and data items containing an entity's field will be added to the Meta information of every group into the class specified. Note that the map may grow very large for big datasets. """ self._load_meta_groups_map(mview, meta, ph_object, prefix.encode(), name_field.encode()) def load_ply(self, ply): """ Load `GXGIS <geosoft.gxapi.GXGIS>` table drawing into a Multi-Polygon object. :param ply: Polygon object in which to place `GXGIS <geosoft.gxapi.GXGIS>` shapes. :type ply: GXPLY .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._load_ply(ply) def load_shapes_gdb(self, db): """ Load `GXGIS <geosoft.gxapi.GXGIS>` shapes table information into separate lines in a GDB. :param db: Database :type db: GXDB .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** All fields of the database will be loaded into the group. Channels will use the same name (or a allowable alias) as the `GXGIS <geosoft.gxapi.GXGIS>` field name. If a channel does not exist, it will be created based on the characteristics of the `GXGIS <geosoft.gxapi.GXGIS>` field. If a channel exists, it will be used as-is. The shape ID will be used as the line numbers. """ self._load_shapes_gdb(db) def set_dm_wireframe_pt_file(self, file): """ Specify the wireframe point file corresponding to the input file. :param file: Name of the wireframe point file :type file: str .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Datamine wireframe models are specified by pairs of files, the first is the triangle node file, and the second gives the XYZ locations of the node points. This function allows you to specify the latter when reading the first, so that the full model can be decoded. """ self._set_dm_wireframe_pt_file(file.encode()) def set_ipj(self, ipj): """ Save the `GXIPJ <geosoft.gxapi.GXIPJ>` back to `GXGIS <geosoft.gxapi.GXGIS>` file :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` to save :type ipj: GXIPJ .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_ipj(ipj) def set_lst(self, lst): """ Save a `GXLST <geosoft.gxapi.GXLST>` of items inside the `GXGIS <geosoft.gxapi.GXGIS>` object for special use. :param lst: `GXLST <geosoft.gxapi.GXLST>` object to save to `GXGIS <geosoft.gxapi.GXGIS>` `GXLST <geosoft.gxapi.GXLST>`. :type lst: GXLST .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the `GXGIS <geosoft.gxapi.GXGIS>` `GXLST <geosoft.gxapi.GXLST>` object already exists, it is destroyed and recreated to match the size of the input `GXLST <geosoft.gxapi.GXLST>`, before the input `GXLST <geosoft.gxapi.GXLST>` is copied to it. """ self._set_lst(lst) def set_meta(self, meta): """ Save the `GXMETA <geosoft.gxapi.GXMETA>` back to `GXGIS <geosoft.gxapi.GXGIS>` :param meta: `GXMETA <geosoft.gxapi.GXMETA>` object to save to `GXGIS <geosoft.gxapi.GXGIS>` meta :type meta: GXMETA .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_meta(meta) def set_triangulation_object_index(self, i_toi): """ Set the triangulation object index (Micromine) :param i_toi: Triangulation object index :type i_toi: int .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_triangulation_object_index(i_toi) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/om-extensions/test_project.py #import pydevd #pydevd.settrace('localhost', port=34765, stdoutToServer=True, stderrToServer=True) import json import geosoft.gxpy.project as gxprj ############################################################################################### def test_get_user_input(): gxprj.user_message('TEST', 'Starting get_user_info test') gxprj.pause('Testing pause\nSome descriptive text\nLine 2\nLine 3\nThis tests the ability for DGW to properly resize a a dialog..........') gxprj.pause('Testing pause with cancel',cancel=True) ret = gxprj.get_user_input('Testing string input','String is a bit long',default='test') print('string return: {}'.format(ret)) ret = gxprj.get_user_input('Testing float', 'Float', kind='float', default=1.5) print('float return: {}'.format(ret)) ret = gxprj.get_user_input('Testing int\nThis must be an integer value.', 'Int', kind='int', default=7) print('int return: {}'.format(ret)) ret = gxprj.get_user_input('Testing a list', 'List', kind='list', default='maki', items='maki, rider, explorer') print('list return: {}'.format(ret)) input("Simple inputs test finished, press enter to continue...") def test_file(): ret = gxprj.get_user_input('Testing a file', 'Any file', kind='file', default='anyfile.dat') print('file return: {}'.format(ret)) ret = gxprj.get_user_input('Testing a file', 'New file', kind='newfile', default='new.dat') print('new file return: {}'.format(ret)) ret = gxprj.get_user_input('Testing a file', 'Old file', kind='oldfile') print('old file return: {}'.format(ret)) ret = gxprj.get_user_input('Testing a file *.grd,*.gdb', 'Some file', kind='file', default='maki.dat', filemask="*.grd,*.gdb") print('grid file return: {}'.format(ret)) input("File inputs test finished, press enter to continue...") def test_multifile(): ret = gxprj.get_user_input('Testing a multi-file string default', 'Multiple files:', kind='file', default='maki.dat,mak2.dat;yamma.grd', filemask="**") print('multifile from string default return: {}'.format(ret)) ret = gxprj.get_user_input('Testing a multi-file list default', 'Multiple files:', kind='file', default=['maki.dat', 'list.', '4.5'], filemask="**") print('multifile from list default return: {}'.format(ret)) ret = gxprj.get_user_input('Testing a multi-file *.grd', 'Multiple grids:', kind='file', filemask="**,*.grd") print('multifile grid return: {}'.format(ret)) ret = gxprj.get_user_input('Testing a multi-file *.map,*.gdb', 'Multiple maps/gdb:', kind='file', filemask=["**", "*.map", "*.gdb"]) print('multifile map/gdb return: {}'.format(ret)) input("File inputs test finished, press enter to continue...") def test_scripting(): import os import geosoft.gxapi as gxa py_file = 'py_file.py' gs_file = 'project.gs' with open(py_file, 'w+') as f: f.write('import geosoft\n') f.write('def rungx():\n') f.write(' pass\n') gxprj.user_message('SCRIPTING TEST', 'Enter "project" in the next browse dialog.') gxa.GXSYS.do_command("[ID] ID_GX_RECORD") gxa.GXSYS.do_command("[GX] {}".format(py_file)) gxa.GXSYS.do_command("[ID] ID_GX_ENDRECORD") with open(gs_file, 'r') as f: script = f.read() gxprj.user_message('SCRIPTING TEST RESULT', 'The following should contain "GX py_file.py"\n{}'.format(script)) # run script gxa.GXSYS.set_interactive(0) gxa.GXSYS.run_gs(os.path.normpath(gs_file)) gxa.GXSYS.set_interactive(1) os.remove(gs_file) os.remove(py_file) def test_project(): project = gxprj.Geosoft_project() # open project in debugger and verify content of properties. pass def rungx(): gxprj.user_message('Running:', __file__) test_project() test_get_user_input() test_file() test_multifile() test_scripting() gxprj.user_message('Project test', "test finished") <file_sep>/geosoft/gxapi/GXVM.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXVM(gxapi_cy.WrapVM): """ GXVM class. In-memory vector data methods The `GXVM <geosoft.gxapi.GXVM>` class will store vector (array) data in a memory buffer which can be accessed using the `GXVM <geosoft.gxapi.GXVM>` methods. The main use for the `GXVM <geosoft.gxapi.GXVM>` class is to store data in a single physical memory location. This memory can then be accessed by a user DLL using the `GXGEO.get_ptr_vm <geosoft.gxapi.GXGEO.get_ptr_vm>` function defined in gx_extern.h. `GXVM <geosoft.gxapi.GXVM>` memory can be any size, but a `GXVM <geosoft.gxapi.GXVM>` is intended for handling relatively small sets of data compared to a `GXVV <geosoft.gxapi.GXVV>`, which can work efficiently with very large volumes of data. The acceptable maximum `GXVM <geosoft.gxapi.GXVM>` size depends on the operating system and the performance requirements of an application. The best performance is achieved when all `GXVM <geosoft.gxapi.GXVM>` memory can be stored comfortably within the the available system RAM. If all `GXVM <geosoft.gxapi.GXVM>` memory will not fit in the system RAM, the operating system virtual memory manager will be used to swap memory to the operations systems virtual memory paging file. Note that the operating system virtual memory manager is much slower than the manager used by Geosoft when working with very large arrays in a `GXVV <geosoft.gxapi.GXVV>`. See `GXVV <geosoft.gxapi.GXVV>` for methods to move data between a `GXVM <geosoft.gxapi.GXVM>` and a `GXVV <geosoft.gxapi.GXVV>`. """ def __init__(self, handle=0): super(GXVM, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXVM <geosoft.gxapi.GXVM>` :returns: A null `GXVM <geosoft.gxapi.GXVM>` :rtype: GXVM """ return GXVM() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, type, elements): """ Create a `GXVM <geosoft.gxapi.GXVM>`. :param type: :ref:`GEO_VAR` :param elements: `GXVM <geosoft.gxapi.GXVM>` length (less than 16777215) :type type: int :type elements: int :returns: `GXVM <geosoft.gxapi.GXVM>` Object :rtype: GXVM .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The `GXVM <geosoft.gxapi.GXVM>` elements are initialized to dummies. """ ret_val = gxapi_cy.WrapVM._create(GXContext._get_tls_geo(), type, elements) return GXVM(ret_val) @classmethod def create_ext(cls, type, elements): """ Create a `GXVM <geosoft.gxapi.GXVM>`, using one of the :ref:`GS_TYPES` special data types. :param type: :ref:`GS_TYPES` :param elements: `GXVM <geosoft.gxapi.GXVM>` length (less than 16777215) :type type: int :type elements: int :returns: `GXVM <geosoft.gxapi.GXVM>` Object :rtype: GXVM .. versionadded:: 6.4.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The `GXVM <geosoft.gxapi.GXVM>` elements are initialized to dummies. """ ret_val = gxapi_cy.WrapVM._create_ext(GXContext._get_tls_geo(), type, elements) return GXVM(ret_val) def get_int(self, element): """ Get an integer element from a `GXVM <geosoft.gxapi.GXVM>`. :param element: Element wanted :type element: int :returns: Element wanted, or `iDUMMY <geosoft.gxapi.iDUMMY>` if the value is dummy or outside of the range of data. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_int(element) return ret_val def get_string(self, element, str_val): """ Get a string element from a `GXVM <geosoft.gxapi.GXVM>`. :param element: Element wanted :param str_val: String in which to place element :type element: int :type str_val: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Returns element wanted, or blank string if the value is dummy or outside of the range of data. Type conversions are performed if necessary. Dummy values are converted to "*" string. """ str_val.value = self._get_string(element, str_val.value.encode()) def length(self): """ Returns current `GXVM <geosoft.gxapi.GXVM>` length. :returns: # of elements in the `GXVM <geosoft.gxapi.GXVM>`. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._length() return ret_val def re_size(self, newsize): """ Re-set the size of a `GXVM <geosoft.gxapi.GXVM>`. :param newsize: New size (number of elements) :type newsize: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If increasing the `GXVM <geosoft.gxapi.GXVM>` size, new elements are set to dummies. """ self._re_size(newsize) def get_double(self, element): """ Get a real element from a `GXVM <geosoft.gxapi.GXVM>`. :param element: Element wanted :type element: int :returns: Element wanted, or `rDUMMY <geosoft.gxapi.rDUMMY>` if the value is dummy or outside of the range of data. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_double(element) return ret_val def set_int(self, element, value): """ Set an integer element in a `GXVM <geosoft.gxapi.GXVM>`. :param element: Element to set :param value: Value to set :type element: int :type value: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Element being set cannot be < 0. If the element is > current `GXVM <geosoft.gxapi.GXVM>` length, the `GXVM <geosoft.gxapi.GXVM>` length is increased. Reallocating `GXVM <geosoft.gxapi.GXVM>` lengths can lead to fragmented memory and should be avoided if possible. """ self._set_int(element, value) def set_double(self, element, value): """ Set a real element in a `GXVM <geosoft.gxapi.GXVM>`. :param element: Element to set :param value: Value to set :type element: int :type value: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Element being set cannot be < 0. If the element is > current `GXVM <geosoft.gxapi.GXVM>` length, the `GXVM <geosoft.gxapi.GXVM>` length is increased. Reallocating `GXVM <geosoft.gxapi.GXVM>` lengths can lead to fragmented memory and should be avoided if possible. """ self._set_double(element, value) def set_string(self, element, value): """ Set a string element in a `GXVM <geosoft.gxapi.GXVM>`. :param element: Element to set :param value: String to set :type element: int :type value: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Element being set cannot be < 0. If the element is > current `GXVM <geosoft.gxapi.GXVM>` length, the `GXVM <geosoft.gxapi.GXVM>` length is increased. Reallocating `GXVM <geosoft.gxapi.GXVM>` lengths can lead to fragmented memory and should be avoided if possible. """ self._set_string(element, value.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXIP.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXPG import GXPG from .GXVV import GXVV ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXIP(gxapi_cy.WrapIP): """ GXIP class. This class is used in the `GXIP <geosoft.gxapi.GXIP>` System for the import, export and processing of Induced Polarization data. **Note:** The following defines are used in GX code but are not part of any functions: :ref:`IP_ARRAY` :ref:`IP_CHANNELS` :ref:`IP_LINES` """ def __init__(self, handle=0): super(GXIP, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXIP <geosoft.gxapi.GXIP>` :returns: A null `GXIP <geosoft.gxapi.GXIP>` :rtype: GXIP """ return GXIP() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Plot Jobs @classmethod def convert_ubcip_2d_to_grid(cls, file, pg, vv_x, vv_z, x, z, cx, cz, reciprocal): """ Convert a UBC 2D model to a regular grid. :param file: Output grid file name :param pg: Model data :param vv_x: Model cells sizes (input) :param vv_z: Model cells sizes (input) :param x: Top-left corner X :param z: Top-left corner Z :param cx: Output grid cell size in X :param cz: Output grid cell size in Z :param reciprocal: Output reciprocal of values (0:No, 1:Yes) for resistivity? :type file: str :type pg: GXPG :type vv_x: GXVV :type vv_z: GXVV :type x: float :type z: float :type cx: float :type cz: float :type reciprocal: int .. versionadded:: 7.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Uses `GXTIN <geosoft.gxapi.GXTIN>` gridding to sample the model. By setting the final value, a resistivity grid can be created from conductivity data. """ gxapi_cy.WrapIP._convert_ubcip_2d_to_grid(GXContext._get_tls_geo(), file.encode(), pg, vv_x, vv_z, x, z, cx, cz, reciprocal) def create_default_job(self, ini, type): """ Create a default job from scratch. :param ini: File name of the INI file to create (forces correct suffix) :param type: :ref:`IP_PLOT` :type ini: str :type type: int .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._create_default_job(ini.encode(), type) def export_ubcip3(self, db, line, chan, error_chan, obs, topo, version): """ Export of `GXIP <geosoft.gxapi.GXIP>` data to UBC format. :param db: `GXDB <geosoft.gxapi.GXDB>` object :param line: Output line name :param chan: Output `GXIP <geosoft.gxapi.GXIP>` channel name :param error_chan: Output error channel name ("" for none) :param obs: Output OBS file name :param topo: Output TOPO file name :param version: Version number (3 or 5) :type db: GXDB :type line: str :type chan: str :type error_chan: str :type obs: str :type topo: str :type version: float .. versionadded:: 8.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Outputs a ``*.DAT`` file of the survey data for use in the UBC 2D inversion program IPINV2D. Include error channel output and version-specific formatting. """ self._export_ubcip3(db, line.encode(), chan.encode(), error_chan.encode(), obs.encode(), topo.encode(), version) @classmethod def export_ubcip_control(cls, control, n_iter, i_rest, chi_factor, obs, cond, mesh, topo, initial, ref_mod, alphas, wts): """ Export a control file for using in the UBC IPINV2D program. :param control: Output control file name :param n_iter: niter :param i_rest: irest :param chi_factor: chifact :param obs: `GXIP <geosoft.gxapi.GXIP>` obs file :param cond: Conductivity file :param mesh: Mesh file :param topo: Topography file :param initial: Initial model file :param ref_mod: Reference model :param alphas: Alphas :param wts: Weights file :type control: str :type n_iter: int :type i_rest: int :type chi_factor: float :type obs: str :type cond: str :type mesh: str :type topo: str :type initial: str :type ref_mod: str :type alphas: str :type wts: str .. versionadded:: 6.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** UBC Version 3 Control file. Outputs a control file for use in the UBC 2D `GXIP <geosoft.gxapi.GXIP>` inversion program IPINV2D. """ gxapi_cy.WrapIP._export_ubcip_control(GXContext._get_tls_geo(), control.encode(), n_iter, i_rest, chi_factor, obs.encode(), cond.encode(), mesh.encode(), topo.encode(), initial.encode(), ref_mod.encode(), alphas.encode(), wts.encode()) @classmethod def export_ubcip_control_v5(cls, control, n_iter, chi_factor, obs, topo, cond_selection, cond, mesh_selection, mesh, initial_selection, initial, reference_selection, ref_cond, alphas_selection, alphas, wts): """ Export a control file for using in the UBC IPINV2D program. :param control: Output control file name :param n_iter: niter :param chi_factor: chifact :param obs: RES obs file :param topo: Topography file (required) :param cond_selection: Conductivity type :ref:`IP_UBC_CONTROL` FILE or VALUE :param cond: Conductivity file (can be "") or value :param mesh_selection: Mesh type :ref:`IP_UBC_CONTROL` FILE, VALUE or DEFAULT :param mesh: Mesh file (can be "") or value :param initial_selection: Initial model type :ref:`IP_UBC_CONTROL` FILE, VALUE or DEFAULT :param initial: Initial model file (can be "") or value :param reference_selection: Reference model type :ref:`IP_UBC_CONTROL` FILE, VALUE or DEFAULT :param ref_cond: Reference model file (can be "") or value( :param alphas_selection: Alphas type :ref:`IP_UBC_CONTROL` FILE, VALUE, LENGTH or DEFAULT :param alphas: Alphas file (can be ""), value or length :param wts: Weights file :type control: str :type n_iter: int :type chi_factor: float :type obs: str :type topo: str :type cond_selection: int :type cond: str :type mesh_selection: int :type mesh: str :type initial_selection: int :type initial: str :type reference_selection: int :type ref_cond: str :type alphas_selection: int :type alphas: str :type wts: str .. versionadded:: 8.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** UBC Version 5 Control file. """ gxapi_cy.WrapIP._export_ubcip_control_v5(GXContext._get_tls_geo(), control.encode(), n_iter, chi_factor, obs.encode(), topo.encode(), cond_selection, cond.encode(), mesh_selection, mesh.encode(), initial_selection, initial.encode(), reference_selection, ref_cond.encode(), alphas_selection, alphas.encode(), wts.encode()) def export_ubc_res3(self, db, line, voltage_chan, current_chan, error_chan, obs, topo, version): """ Export of `GXIP <geosoft.gxapi.GXIP>` Resistivity data to UBC format. :param db: `GXDB <geosoft.gxapi.GXDB>` object :param line: Output line name :param voltage_chan: Output voltage channel name :param current_chan: Output current channel name :param error_chan: Output error channel name ("" for none) :param obs: Output OBS file name :param topo: Output TOPO file name :param version: Version number (3 or 5) :type db: GXDB :type line: str :type voltage_chan: str :type current_chan: str :type error_chan: str :type obs: str :type topo: str :type version: float .. versionadded:: 8.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Outputs a ``*.DAT`` file of the survey data for use in the UBC 2D inversion program DCINV2D. Voltage and current channels should be in units such that V/I gives volts/amp (or mV/mA). """ self._export_ubc_res3(db, line.encode(), voltage_chan.encode(), current_chan.encode(), error_chan.encode(), obs.encode(), topo.encode(), version) @classmethod def export_ubc_res_control(cls, control, n_iter, i_rest, chi_factor, obs, mesh, topo, initial, ref_cond, alphas, wts): """ Export a control file for using in the UBC DCINV2D program. :param control: Output control file name :param n_iter: niter :param i_rest: irest :param chi_factor: chifact :param obs: RES obs file :param mesh: Mesh file :param topo: Topography file (required) :param initial: Initial model file (can be "" or "NULL") :param ref_cond: Reference model conductivity :param alphas: Alphas :param wts: Weights file :type control: str :type n_iter: int :type i_rest: int :type chi_factor: float :type obs: str :type mesh: str :type topo: str :type initial: str :type ref_cond: float :type alphas: str :type wts: str .. versionadded:: 6.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** UBC Version 3. Outputs a control file for use in the UBC 2D resistivity inversion program DCINV2D. """ gxapi_cy.WrapIP._export_ubc_res_control(GXContext._get_tls_geo(), control.encode(), n_iter, i_rest, chi_factor, obs.encode(), mesh.encode(), topo.encode(), initial.encode(), ref_cond, alphas.encode(), wts.encode()) @classmethod def export_ubc_res_control_v5(cls, control, n_iter, chi_factor, obs, topo, mesh_selection, mesh, initial_selection, initial, reference_selection, ref_cond, alphas_selection, alphas, wts): """ Export a control file for using in the UBC DCINV2D program. :param control: Output control file name :param n_iter: niter :param chi_factor: chifact :param obs: RES obs file :param topo: Topography file (required) :param mesh_selection: Mesh type :ref:`IP_UBC_CONTROL` FILE, VALUE or DEFAULT :param mesh: Mesh file (can be "") or value :param initial_selection: Initial model type :ref:`IP_UBC_CONTROL` FILE, VALUE or DEFAULT :param initial: Initial model file (can be "") or value :param reference_selection: Reference model type :ref:`IP_UBC_CONTROL` FILE, VALUE or DEFAULT :param ref_cond: Reference model file (can be "") or value( :param alphas_selection: Alphas type :ref:`IP_UBC_CONTROL` FILE, VALUE, LENGTH or DEFAULT :param alphas: Alphas file (can be ""), value or length :param wts: Weights file :type control: str :type n_iter: int :type chi_factor: float :type obs: str :type topo: str :type mesh_selection: int :type mesh: str :type initial_selection: int :type initial: str :type reference_selection: int :type ref_cond: str :type alphas_selection: int :type alphas: str :type wts: str .. versionadded:: 8.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** UBC Version 5. Outputs a control file for use in the UBC 2D resistivity inversion program DCINV2D. """ gxapi_cy.WrapIP._export_ubc_res_control_v5(GXContext._get_tls_geo(), control.encode(), n_iter, chi_factor, obs.encode(), topo.encode(), mesh_selection, mesh.encode(), initial_selection, initial.encode(), reference_selection, ref_cond.encode(), alphas_selection, alphas.encode(), wts.encode()) def export_data_to_ubc_3d(self, db, line_lst, locations_only, include_z, chan, error_chan, mask_chan, ip_type, comments, obs): """ Export of `GXIP <geosoft.gxapi.GXIP>` data to UBC 3D `GXIP <geosoft.gxapi.GXIP>` format. :param db: `GXDB <geosoft.gxapi.GXDB>` object :param line_lst: Lines to export (Name, Symbol) :param locations_only: Locations only (0: No, 1: Yes)? :param include_z: Include Z values (0: No, 1: Yes)? :param chan: `GXIP <geosoft.gxapi.GXIP>` channel name (can be "" if exporting locations only) :param error_chan: Error channel name (can be "" if exporting locations only) :param mask_chan: Mask channel name (can be "") :param ip_type: IPTYPE (1: Vp, 2: Chargeability) :param comments: Comments (can be "") :param obs: Output OBS file name :type db: GXDB :type line_lst: GXLST :type locations_only: int :type include_z: int :type chan: str :type error_chan: str :type mask_chan: str :type ip_type: int :type comments: str :type obs: str .. versionadded:: 9.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Outputs a ``*.DAT`` file of the survey data for use in the UBC `GXIP <geosoft.gxapi.GXIP>` 3D inversion programs. """ self._export_data_to_ubc_3d(db, line_lst, locations_only, include_z, chan.encode(), error_chan.encode(), mask_chan.encode(), ip_type, comments.encode(), obs.encode()) @classmethod def import_ubc2_dmod(cls, file, type): """ Import a MOD file from the UBC IPINV2D program. :param file: UBC MOD file name to import :param type: 0 - CON, 1 - CHG :type file: str :type type: int :returns: `GXPG <geosoft.gxapi.GXPG>` Object :rtype: GXPG .. versionadded:: 7.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Imports the MOD file values to a `GXPG <geosoft.gxapi.GXPG>` object. The CON/CHG selection is necessary because the import sets padding values to dummies based on the type of file. """ ret_val = gxapi_cy.WrapIP._import_ubc2_dmod(GXContext._get_tls_geo(), file.encode(), type) return GXPG(ret_val) @classmethod def import_ubc2_dmsh(cls, file, x, z, vv_x, vv_z): """ Import a MSH file from the UBC IPINV2D program. :param file: UBC MSH file to import :param x: Returned origin X (top left corner) :param z: Returned origin Z (top left corner) :param vv_x: Cell widths (left to right) (real) :param vv_z: Cell heights (top down) (real) :type file: str :type x: float_ref :type z: float_ref :type vv_x: GXVV :type vv_z: GXVV .. versionadded:: 7.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Imports the MSH file geometry. """ x.value, z.value = gxapi_cy.WrapIP._import_ubc2_dmsh(GXContext._get_tls_geo(), file.encode(), x.value, z.value, vv_x, vv_z) @classmethod def import_ubc_2d_topo(cls, file, elev0, vv_x, vv_z): """ Import a Topography file from the UBC IPINV2D program. :param file: UBC Topo file to import :param elev0: Returned top of mesh elevation :param vv_x: Topography X values :param vv_z: Topography Z values (elevations) :type file: str :type elev0: float_ref :type vv_x: GXVV :type vv_z: GXVV .. versionadded:: 7.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Imports the maximum elevation (top of mesh) as well as the topo (X, Z) values. """ elev0.value = gxapi_cy.WrapIP._import_ubc_2d_topo(GXContext._get_tls_geo(), file.encode(), elev0.value, vv_x, vv_z) def open_job(self, job, type): """ Open a `GXIP <geosoft.gxapi.GXIP>` plotting job :param job: Job file name :param type: Job type :ref:`IP_PLOT` :type job: str :type type: int .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._open_job(job.encode(), type) def save_job(self, job, type): """ Save a `GXIP <geosoft.gxapi.GXIP>` plotting job :param job: Job file name :param type: Job type :ref:`IP_PLOT` :type job: str :type type: int .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._save_job(job.encode(), type) @classmethod def trim_ubc_2d_model(cls, pg, trim_xl, trim_xr, trim_z, vv_x, vv_z, x): """ Trim the padding cells from the UBC IPINV2D Model. :param pg: Input model (unchanged) :param trim_xl: Cells to remove on left :param trim_xr: Cells to remove on right :param trim_z: Cells to remove on the bottom :param vv_x: Column widths (modified) :param vv_z: Row heights (modified) :param x: Top left corner X (modified) :type pg: GXPG :type trim_xl: int :type trim_xr: int :type trim_z: int :type vv_x: GXVV :type vv_z: GXVV :type x: float_ref :returns: `GXPG <geosoft.gxapi.GXPG>` Object :rtype: GXPG .. versionadded:: 7.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The cells are removed from the left, right and bottom. The returned `GXPG <geosoft.gxapi.GXPG>` is the trimmed version. The input cell size VVs are also trimmed to match, and the origin is updated (still upper left corner). """ ret_val, x.value = gxapi_cy.WrapIP._trim_ubc_2d_model(GXContext._get_tls_geo(), pg, trim_xl, trim_xr, trim_z, vv_x, vv_z, x.value) return GXPG(ret_val) def write_distant_electrodes(self, db): """ Write distant electrode locations to channels :param db: `GXDB <geosoft.gxapi.GXDB>` object :type db: GXDB .. versionadded:: 6.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Writes values for ALL lines. """ self._write_distant_electrodes(db) def write_distant_electrodes_lst(self, db, lst): """ Write distant electrode locations to channels for a `GXLST <geosoft.gxapi.GXLST>` of lines :param db: `GXDB <geosoft.gxapi.GXDB>` object :param lst: Lines to write out :type db: GXDB :type lst: GXLST .. versionadded:: 6.4.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Writes values for lines in the input `GXLST <geosoft.gxapi.GXLST>`. """ self._write_distant_electrodes_lst(db, lst) # Miscellaneous def average_duplicates_qc(self, db, chan, qc_chan, out): """ Average duplicate samples in a database. :param db: Database to export from :param chan: Mask or reference channel (required) :param qc_chan: QC channel (can be left blank) :param out: :ref:`IP_DUPLICATE` :type db: GXDB :type chan: str :type qc_chan: str :type out: int .. versionadded:: 7.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Averages all values with shared station and N values, as long as the mask channel is defined at that FID. Previous averaged values (IP_DATA_AVG) are overwritten according to the overwrite flag. If the QC channel is selected, only those rows of data where the QC channel value is "1" will be included in the average. """ self._average_duplicates_qc(db, chan.encode(), qc_chan.encode(), out) @classmethod def create(cls): """ Create `GXIP <geosoft.gxapi.GXIP>`. :returns: `GXIP <geosoft.gxapi.GXIP>` Object :rtype: GXIP .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapIP._create(GXContext._get_tls_geo()) return GXIP(ret_val) def export_i2_x(self, db, file, line, res_data, ip_data, res_model, ip_model, res_synth, ip_synth, res_poly, ip_poly): """ Export line(s) to an Interpex RESIX I2X format file. :param db: Database to export from :param file: Name of the file :param line: Name of the line :param res_data: Resistivity (data) channel :param ip_data: `GXIP <geosoft.gxapi.GXIP>` (data) channel (can be "") :param res_model: Image model resistivity channel (can be "") :param ip_model: Image model `GXIP <geosoft.gxapi.GXIP>` channel (can be "") :param res_synth: Image model synthetic resistivity channel (can be "") :param ip_synth: Image model synthetic `GXIP <geosoft.gxapi.GXIP>` channel (can be "") :param res_poly: Resistivity (polygon) channel (can be "") :param ip_poly: `GXIP <geosoft.gxapi.GXIP>` (polygon) channel (can be "") :type db: GXDB :type file: str :type line: str :type res_data: str :type ip_data: str :type res_model: str :type ip_model: str :type res_synth: str :type ip_synth: str :type res_poly: str :type ip_poly: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Exports a line to an ".I2X" file. """ self._export_i2_x(db, file.encode(), line.encode(), res_data.encode(), ip_data.encode(), res_model.encode(), ip_model.encode(), res_synth.encode(), ip_synth.encode(), res_poly.encode(), ip_poly.encode()) def export_ipdata(self, db, chan, title): """ Exports data in the Geosoft IPDATA format. :param db: Database to export from :param chan: Channel to export :param title: Title for IPDATA files :type db: GXDB :type chan: str :type title: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._export_ipdata(db, chan.encode(), title.encode()) def export_ipdata_dir(self, db, chan, title, dir): """ Exports data in the Geosoft IPDATA format in the specified directory :param db: Database to export from :param chan: Channel to export :param title: Title for IPDATA files :param dir: Directory for IPDATA files :type db: GXDB :type chan: str :type title: str :type dir: str .. versionadded:: 6.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._export_ipdata_dir(db, chan.encode(), title.encode(), dir.encode()) def export_ipred(self, db, title, chan, suffix, filter, wts, stn1, stn2, max_n): """ Exports pseudo-section in the Geosoft IPRED format. :param db: Database to export from :param title: Title for first line of file :param chan: Channel to process :param suffix: File suffix (type) :param filter: :ref:`IP_FILTER` :param wts: The Fraser Filter weights :param stn1: First Station position (`rDUMMY <geosoft.gxapi.rDUMMY>` for default) :param stn2: Last Station position (`rDUMMY <geosoft.gxapi.rDUMMY>` for default) :param max_n: Maximum n spacing :type db: GXDB :type title: str :type chan: str :type suffix: str :type filter: int :type wts: str :type stn1: float :type stn2: float :type max_n: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The Fraser Filter weights apply to each N expansion above, and are listed as w1,w2,w3,... Unspecified values beyond the list's end are set to 1.0. """ self._export_ipred(db, title.encode(), chan.encode(), suffix.encode(), filter, wts.encode(), stn1, stn2, max_n) def export_ipred_dir(self, db, title, chan, suffix, filter, wts, stn1, stn2, max_n, dir): """ Exports pseudo-section in the Geosoft IPRED format in the specified directory :param db: Database to export from :param title: Title for first line of file :param chan: Channel to process :param suffix: File suffix (type) :param filter: :ref:`IP_FILTER` :param wts: The Fraser Filter weights :param stn1: First Station position (`rDUMMY <geosoft.gxapi.rDUMMY>` for default) :param stn2: Last Station position (`rDUMMY <geosoft.gxapi.rDUMMY>` for default) :param max_n: Maximum n spacing :param dir: Directory to export to :type db: GXDB :type title: str :type chan: str :type suffix: str :type filter: int :type wts: str :type stn1: float :type stn2: float :type max_n: int :type dir: str .. versionadded:: 6.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The Fraser Filter weights apply to each N expansion above, and are listed as w1,w2,w3,... Unspecified values beyond the list's end are set to 1.0. """ self._export_ipred_dir(db, title.encode(), chan.encode(), suffix.encode(), filter, wts.encode(), stn1, stn2, max_n, dir.encode()) def export_line_ipdata(self, db, line, chan, title): """ Exports one line of data in the Geosoft IPDATA format. :param db: Database to export from :param line: Line to export :param chan: Channel to export :param title: Title for IPDATA files :type db: GXDB :type line: str :type chan: str :type title: str .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._export_line_ipdata(db, line.encode(), chan.encode(), title.encode()) def export_sgdf(self, db, file, chan, chan2): """ Exports data to a Scintrex Geophysical Data Format file. :param db: Database to export from :param file: SGDF file to create :param chan: Time Domain channel or Frequency Amplitude Channel :param chan2: Frequency Domain Phase channel (optional) :type db: GXDB :type file: str :type chan: str :type chan2: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._export_sgdf(db, file.encode(), chan.encode(), chan2.encode()) def get_n_value_lst(self, db, lst): """ Fill a list with unique N values in selected lines. :param db: Database :param lst: `GXLST <geosoft.gxapi.GXLST>` object :type db: GXDB :type lst: GXLST .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._get_n_value_lst(db, lst) def get_topo_line(self, db, line, x_min, x_max, x_inc, vv): """ Get topography values for a line. :param db: Database to import data to :param line: Line name :param x_min: Starting "X" (station) value (`rDUMMY <geosoft.gxapi.rDUMMY>` for default) :param x_max: Ending "X" (station) value (`rDUMMY <geosoft.gxapi.rDUMMY>` for default) :param x_inc: "X" increment along the line (`rDUMMY <geosoft.gxapi.rDUMMY>` for default = half "A" separation) :param vv: Returned topography values :type db: GXDB :type line: str :type x_min: float :type x_max: float :type x_inc: float :type vv: GXVV .. versionadded:: 6.4.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If topography info is available, returns values calculated for the input line. If no topography is available, returned values will be dummies. Values between actual data are interpolated using the Akima spline. Ends are extrapolated using the end data points. """ self._get_topo_line(db, line.encode(), x_min, x_max, x_inc, vv) def get_chan_domain(self, db, chan): """ Is this channel registered as a Time or Frequency domain channel? :param db: Database :param chan: Channel to check :type db: GXDB :type chan: str :returns: :ref:`IP_DOMAIN` :rtype: int .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._get_chan_domain(db, chan.encode()) return ret_val @classmethod def get_chan_label(cls, chan, label, units): """ Get the default label and units for a given channel. :param chan: Input channel :param label: Returned label :param units: Returned units :type chan: str :type label: str_ref :type units: str_ref .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ label.value, units.value = gxapi_cy.WrapIP._get_chan_label(GXContext._get_tls_geo(), chan.encode(), label.value.encode(), units.value.encode()) def get_channel_info(self, db, chan, domain, delay, n_windows, vv): """ Time Windows or Frequency info from a channel. :param db: Database :param chan: Channel to check :param domain: :ref:`IP_DOMAIN` :param delay: Delay or Base Frequency :param n_windows: Number of time windows or frequencies :param vv: Time windows or frequencies :type db: GXDB :type chan: str :type domain: int_ref :type delay: float_ref :type n_windows: int_ref :type vv: GXVV .. versionadded:: 8.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ domain.value, delay.value, n_windows.value = self._get_channel_info(db, chan.encode(), domain.value, delay.value, n_windows.value, vv) def set_channel_info(self, db, chan, domain, delay, n_windows, vv): """ Set Time Windows or Frequency info for a channel. :param db: Database :param chan: Channel to check :param domain: :ref:`IP_DOMAIN` :param delay: Delay or Base Frequency :param n_windows: Number of time windows or frequencies :param vv: Time windows or frequencies :type db: GXDB :type chan: str :type domain: int :type delay: float :type n_windows: int :type vv: GXVV .. versionadded:: 8.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._set_channel_info(db, chan.encode(), domain, delay, n_windows, vv) def import_dump(self, ip_sys, db, dump_file): """ Imports data from an `GXIP <geosoft.gxapi.GXIP>` instrument dump file. :param ip_sys: :ref:`IP_SYS` :param db: `GXDB <geosoft.gxapi.GXDB>` Handle :param dump_file: Dump file name :type ip_sys: int :type db: GXDB :type dump_file: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._import_dump(ip_sys, db, dump_file.encode()) def import_grid(self, db, grid, chan): """ Imports data from a grid :param db: Database to import data to :param grid: The name of the grid file, with decorations :param chan: The name of the channel to import to :type db: GXDB :type grid: str :type chan: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Data is imported to the specified channel. The values are interpolated at each row's X and Y positions. """ self._import_grid(db, grid.encode(), chan.encode()) def import_i2_x(self, db, file, line, res_data, ip_data, res_model, ip_model, res_synth, ip_synth, res_poly, ip_poly, mode): """ Imports an Interpex RESIX I2X format file to a line. :param db: Database to import to :param file: Name of file to import :param line: Line to import to :param res_data: Resistivity (data) channel :param ip_data: `GXIP <geosoft.gxapi.GXIP>` (data) channel (can be "") :param res_model: Image model resistivity channel (can be "") :param ip_model: Image model `GXIP <geosoft.gxapi.GXIP>` channel (can be "") :param res_synth: Image model synthetic resistivity channel (can be "") :param ip_synth: Image model synthetic `GXIP <geosoft.gxapi.GXIP>` channel (can be "") :param res_poly: Resistivity (polygon) channel (can be "") :param ip_poly: `GXIP <geosoft.gxapi.GXIP>` (polygon) channel (can be "") :param mode: :ref:`IP_I2XIMPMODE` :type db: GXDB :type file: str :type line: str :type res_data: str :type ip_data: str :type res_model: str :type ip_model: str :type res_synth: str :type ip_synth: str :type res_poly: str :type ip_poly: str :type mode: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Imports a single ".I2X" file to a specified line. If the line does not exist, it will be created. """ self._import_i2_x(db, file.encode(), line.encode(), res_data.encode(), ip_data.encode(), res_model.encode(), ip_model.encode(), res_synth.encode(), ip_synth.encode(), res_poly.encode(), ip_poly.encode(), mode) def import_i2_x_ex(self, db, file, line, res_data, ip_data, res_model, ip_model, res_synth, ip_synth, res_poly, ip_poly, res_zonge, ip_zonge, mode): """ Same as `import_i2_x <geosoft.gxapi.GXIP.import_i2_x>`, with Zonge data imported as well. :param db: Database to import to :param file: Name of file to import :param line: Line to import to :param res_data: Resistivity (data) channel :param ip_data: `GXIP <geosoft.gxapi.GXIP>` (data) channel (can be "") :param res_model: Image model resistivity channel (can be "") :param ip_model: Image model `GXIP <geosoft.gxapi.GXIP>` channel (can be "") :param res_synth: Image model synthetic resistivity channel (can be "") :param ip_synth: Image model synthetic `GXIP <geosoft.gxapi.GXIP>` channel (can be "") :param res_poly: Resistivity (polygon) channel (can be "") :param ip_poly: `GXIP <geosoft.gxapi.GXIP>` (polygon) channel (can be "") :param res_zonge: Zonge Resistivity channel (can be "") :param ip_zonge: Zonge `GXIP <geosoft.gxapi.GXIP>` channel (can be "") :param mode: :ref:`IP_I2XIMPMODE` :type db: GXDB :type file: str :type line: str :type res_data: str :type ip_data: str :type res_model: str :type ip_model: str :type res_synth: str :type ip_synth: str :type res_poly: str :type ip_poly: str :type res_zonge: str :type ip_zonge: str :type mode: int .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Imports a single ".I2X" file to a specified line. If the line does not exist, it will be created. """ self._import_i2_x_ex(db, file.encode(), line.encode(), res_data.encode(), ip_data.encode(), res_model.encode(), ip_model.encode(), res_synth.encode(), ip_synth.encode(), res_poly.encode(), ip_poly.encode(), res_zonge.encode(), ip_zonge.encode(), mode) def import_instrumentation_gdd(self, db, file): """ Imports an Instrumentation GDD format file. :param db: Database to import to :param file: GDD file name :type db: GXDB :type file: str .. versionadded:: 8.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._import_instrumentation_gdd(db, file.encode()) def import_ipdata(self, db, file, chan): """ Imports data in the Geosoft IPDATA format. :param db: Database to import to :param file: IPDATA file name :param chan: Channel to import to :type db: GXDB :type file: str :type chan: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._import_ipdata(db, file.encode(), chan.encode()) def import_ipdata2(self, db, file, chan, chan2): """ Imports data in the Geosoft IPDATA format - up to two arrays. :param db: Database to import to :param file: IPDATA file name :param chan: Channel to import to (default is "`GXIP <geosoft.gxapi.GXIP>`") :param chan2: (optional) Second channel to import to :type db: GXDB :type file: str :type chan: str :type chan2: str .. versionadded:: 5.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The second channel may be specified for frequency domain data sets with two array channels; e.g. amplitude and phase, or real and imaginary parts. If the second channel is specified, and no time or frequncy information is specified in the header (using the T= or F= fields) then the import is assumed to be frequency domain. """ self._import_ipdata2(db, file.encode(), chan.encode(), chan2.encode()) def import_ipred(self, db, file, chan): """ Imports data from the Geosoft IPRED format. :param db: Database to import to :param file: File to import from :param chan: Channel to import :type db: GXDB :type file: str :type chan: str .. versionadded:: 5.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** This import produces a limited `GXIP <geosoft.gxapi.GXIP>` data set with no Current "I", Voltage "Vp" or Apparent Resistivity "ResApp" values. """ self._import_ipred(db, file.encode(), chan.encode()) def import_merge_ipred(self, db, file, chan): """ Imports IPRED data to an existing line. :param db: Database to import to :param file: File to import from :param chan: Channel to import :type db: GXDB :type file: str :type chan: str .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Exits with error if the line does not exist. Data is merged on basis of Stn and N value. """ self._import_merge_ipred(db, file.encode(), chan.encode()) def import_sgdf(self, db, file): """ Imports data from a Scintrex Geophysical Data Format file. :param db: Database to import to :param file: SGDF file name :type db: GXDB :type file: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._import_sgdf(db, file.encode()) def import_topo_csv(self, db, csv): """ Imports topography data from a CSV line-station file :param db: Database to calculate topography for :param csv: The name of CSV file :type db: GXDB :type csv: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The elevation of each point in the current database is interpolated from the input topography values. """ self._import_topo_csv(db, csv.encode()) def import_topo_grid(self, db, grid): """ Imports topography data from a grid :param db: Database to calculate topography for :param grid: The name of the grid file, with decorations :type db: GXDB :type grid: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The elevation of each point in the current database is interpolated from the input topography grid. """ self._import_topo_grid(db, grid.encode()) def import_zonge_avg(self, db, file, line, scale, mult): """ Imports a Zonge AVG format file. :param db: Database to import to :param file: FLD file name :param line: Line number (will be scaled if applicable) :param scale: :ref:`IP_STNSCALE` :param mult: Line, station multiplier (for `IP_STNSCALE_VALUE <geosoft.gxapi.IP_STNSCALE_VALUE>`) :type db: GXDB :type file: str :type line: float :type scale: int :type mult: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** See `import_zonge_fld <geosoft.gxapi.GXIP.import_zonge_fld>` """ self._import_zonge_avg(db, file.encode(), line, scale, mult) def import_zonge_fld(self, db, file, scale, mult): """ Imports a Zonge FLD format file. :param db: Database to import to :param file: FLD file name :param scale: :ref:`IP_STNSCALE` :param mult: Line, station multiplier (for `IP_STNSCALE_VALUE <geosoft.gxapi.IP_STNSCALE_VALUE>`) :type db: GXDB :type file: str :type scale: int :type mult: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The Zonge Line and Station numbers may not be the X or Y position values, and a conversion is required. The line direction is taken from the `GXIP <geosoft.gxapi.GXIP>` setup values. """ self._import_zonge_fld(db, file.encode(), scale, mult) def new_xy_database(self, db, new_db, chan_vv, mask, pr_n_val): """ Create a subset database using a mask channel, "N" value :param db: `GXDB <geosoft.gxapi.GXDB>` object :param new_db: New `GXDB <geosoft.gxapi.GXDB>` object :param chan_vv: Channel list :param mask: Mask channel :param pr_n_val: "N" Value :type db: GXDB :type new_db: GXDB :type chan_vv: GXVV :type mask: str :type pr_n_val: float .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** A mask channel can be used to select a subset of the data. A single N value can also be selected (Dummy for all). """ self._new_xy_database(db, new_db, chan_vv, mask.encode(), pr_n_val) def pseudo_plot(self, db, ini_file, cur_line, map): """ Create pseudo-sections of a single line using a control file. :param db: Database :param ini_file: "IPPLOT" INI file name :param cur_line: Current line name :param map: Map name to create :type db: GXDB :type ini_file: str :type cur_line: str :type map: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The control file is created using the IPPLTCON GX. It may then be modified by hand as required. """ self._pseudo_plot(db, ini_file.encode(), cur_line.encode(), map.encode()) def pseudo_plot2(self, db, ini_file, cur_line, tag, map): """ Same as `pseudo_plot <geosoft.gxapi.GXIP.pseudo_plot>`, but specify a tag for grids created. :param db: Database :param ini_file: "IPPLOT" INI file name :param cur_line: Current line name :param tag: Tag for created grids :param map: Map name to create :type db: GXDB :type ini_file: str :type cur_line: str :type tag: str :type map: str .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The control file is created using the IPPLTCON GX. It may then be modified by hand as required. """ self._pseudo_plot2(db, ini_file.encode(), cur_line.encode(), tag.encode(), map.encode()) def pseudo_plot2_dir(self, db, ini_file, cur_line, tag, map, dir): """ Same as `pseudo_plot2 <geosoft.gxapi.GXIP.pseudo_plot2>`, but with directory specified. :param db: Database :param ini_file: "IPPLOT" INI file name :param cur_line: Current line name :param tag: Tag for created grids :param map: Map name to create :param dir: Directory to create files :type db: GXDB :type ini_file: str :type cur_line: str :type tag: str :type map: str :type dir: str .. versionadded:: 6.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The control file is created using the IPPLTCON GX. It may then be modified by hand as required. """ self._pseudo_plot2_dir(db, ini_file.encode(), cur_line.encode(), tag.encode(), map.encode(), dir.encode()) def ps_stack(self, db, chan, con_file, map): """ Create a stacked pseudo-section plot using a control file. :param db: `GXDB <geosoft.gxapi.GXDB>` object :param chan: Channel to plot :param con_file: "IPPLOT" INI file name :param map: Map name to create :type db: GXDB :type chan: str :type con_file: str :type map: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The control file is created using the IPSTAKCON GX. It may then be modified by hand as required. """ self._ps_stack(db, chan.encode(), con_file.encode(), map.encode()) def ps_stack2(self, db, chan, con_file, type, map): """ As `ps_stack <geosoft.gxapi.GXIP.ps_stack>`, but select section spacing option. :param db: `GXDB <geosoft.gxapi.GXDB>` object :param chan: Channel to plot :param con_file: "IPPLOT" INI file name :param type: :ref:`IP_STACK_TYPE` :param map: Map name to create :type db: GXDB :type chan: str :type con_file: str :type type: int :type map: str .. versionadded:: 5.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._ps_stack2(db, chan.encode(), con_file.encode(), type, map.encode()) def ps_stack2_dir(self, db, chan, con_file, type, map, dir): """ Same as `pseudo_plot2 <geosoft.gxapi.GXIP.pseudo_plot2>`, but with directory specified. :param db: `GXDB <geosoft.gxapi.GXDB>` object :param chan: Channel to plot :param con_file: "IPPLOT" INI file name :param type: :ref:`IP_STACK_TYPE` :param map: Map name to create :param dir: Directory to create files :type db: GXDB :type chan: str :type con_file: str :type type: int :type map: str :type dir: str .. versionadded:: 6.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._ps_stack2_dir(db, chan.encode(), con_file.encode(), type, map.encode(), dir.encode()) def qc_chan_lst(self, db, lst): """ Fill a list with QC channels. :param db: Database :param lst: `GXLST <geosoft.gxapi.GXLST>` object :type db: GXDB :type lst: GXLST .. versionadded:: 7.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Searches for the following QC channels existing in a database: QC, QC_RES. """ self._qc_chan_lst(db, lst) def recalculate(self, db): """ Recalculate derived channel values. :param db: Database :type db: GXDB .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** This function recalculates "derived" channel values from "core" data. 1. Recalculates the "STN" and "N" channels, using the TX1, TX2, RX1 and RX2 channels (depending on the system). 2. Recalculates the apparent resistivity "ResCalc", average "IP_Avg" and metal factor "MF" channels 3. Recalculates the "X" and "Y" channels. One of these will be equal to "STN", the other to the internally stored line number for the current line. 4. Recalculate the "Z" channel, based on the current "Topo" channel, and the "N" values. Warning: If you make a change to an electrode location, you would have to call `recalculate <geosoft.gxapi.GXIP.recalculate>`, then recalculate "Topo" (since the X and Y values would have changed), then call `recalculate_z <geosoft.gxapi.GXIP.recalculate_z>`, since "Z" values are based on "Topo" values. """ self._recalculate(db) def recalculate_ex(self, db, recalculate_xyz): """ Recalculate derived channel values, with option for including/excluding location calculations. :param db: Database :param recalculate_xyz: Recalculate XYZ locations (TRUE or FALSE)? :type db: GXDB :type recalculate_xyz: int .. versionadded:: 8.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** See `recalculate <geosoft.gxapi.GXIP.recalculate>`. This version allows you to suppress the recalculation of the current X, Y and Z channel values from the station locations. """ self._recalculate_ex(db, recalculate_xyz) def recalculate_derived_data(self, db, line_handle, channel_list, recalculate_xyz): """ Recalculate derived channel values. :param db: Database :param line_handle: line handle [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param channel_list: `GXLST <geosoft.gxapi.GXLST>` object - channel name overrides :param recalculate_xyz: Recalculate XYZ locations (TRUE or FALSE)? :type db: GXDB :type line_handle: int :type channel_list: GXLST :type recalculate_xyz: int .. versionadded:: 9.10 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** This function recalculates "derived" channel values from "core" data. It duplicates and extends the functionality of RecalateEx_IP by allowing for the input/output channel names to be overridden, with extra control on what is calculated. 1. Recalculates the "STN" and "N" channels (depending on the system). 2. Recalculates the apparent resistivity "ResCalc", average "IP_Avg" and metal factor "MF" channels 3. Recalculates the "X" and "Y" channels. One of these will be equal to "STN", the other to the internally stored line number for the current line. 4. Recalculate the "Z" channel, based on the current "Topo" channel, and the "N" values. Channel overrides are passed via a LST object, with the channel key (type) passed in the LST_ITEM_NAME part, and the channel name passed in the LST_ITEM_VALUE part. The following channel overrides supported (NOTE: Different behaviours for 2D and 3D arrays) 3D Electrode location channels (IP_ARRAY_3D_XXX): "R1X", "R1Y", "R1Z", "R2X", "R2Y", "R2Z", "T1X", "T1Y", "T1Z", "T2X", "T2Y", "T2Z" (electrodes not included or set to "" are not read) or IN-LINE arrays (DPDP, PLDP, PLPL, GRAD) "R1X", "R2X", "T1X", "T2X" (In-line locations) "R1Y", "R2Y", "T1Y", "T2Y" (Across-line locations) "R1Z", "R2Z", "T1Z", "T2Z" (Z) (for any electrode not include or set to "" the default channel name For the array line direction, or the defined distant electrode location is used) Other input channels overridden if defined: "Vp" (primary voltage - must be in mV) "I" (current - must be in A) Other output channels overridden if defined (if you DON'T want the various output channels modified, then set the override values to ""): "MF" (metal factor) - formulation defined in settings, "AvgIP" (average IP) "AppRes" (apparent resistivity) "N" (Pseudo-section pseudo-depth) "Stn" (Station value) "X" (Station "X" value) "Y" (Station "Y" value) "Z" (Station "Z" value) "Topo" (Ground elevation at station location) Recalculating XYZ will result in any channel grid makers from the IPIMPGRID.GX being re-run, then if a maker exists for the Topo channel it is re-run, and finally the Z channel is recalculated (see "RecalculateZ_IP"). """ self._recalculate_derived_data(db, line_handle, channel_list, recalculate_xyz) def recalculate_z(self, db): """ Recalculate Z channel values. :param db: Database :type db: GXDB .. versionadded:: 5.1.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The "Z" channel values are calculated as follows: If the "Topo" value is defined, then Z = Topo - 0.5*N*A, where "N" is the N-spacing, and A is the A-spacing. If the Topography is not defined, then it is assumed to be equal to 0. .. seealso:: `recalculate <geosoft.gxapi.GXIP.recalculate>` """ self._recalculate_z(db) def set_import_line(self, line): """ Set the line name for some imports. :param line: Line name :type line: str .. versionadded:: 9.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** For some imports, no line name is derivable from the import itself. """ self._set_import_line(line.encode()) def set_import_mode(self, append): """ When importing data to a line, set append/overwrite mode. :param append: 0: Overwrite, 1: Append :type append: int .. versionadded:: 6.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** By default, importing data overwrites existing data. Call this function before doing the import in order to append imported data to existing data. "Short" data channels will be dummied to the existing data length before the new data is appended. """ self._set_import_mode(append) def window(self, db, va_chan, chan, windows): """ Window an `GXIP <geosoft.gxapi.GXIP>` array channel to produce a normal channel. :param db: `GXDB <geosoft.gxapi.GXDB>` object :param va_chan: `GXVA <geosoft.gxapi.GXVA>` channel to use :param chan: New channel :param windows: Window list :type db: GXDB :type va_chan: str :type chan: str :type windows: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The array channels cannot be used directly to produce sections. `window <geosoft.gxapi.GXIP.window>` allows the user to select one or more of the windows and create a new channel. In time domain, if more than one channel is selected a weighted sum is performed, according to window widths. In frequency domain a simple sum is performed. Window List Syntax: """ self._window(db, va_chan.encode(), chan.encode(), windows.encode()) @classmethod def winnow_chan_list(cls, lst): """ Removes obviously non-pseudo-section type channels from list. :param lst: List of channels :type lst: GXLST .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapIP._winnow_chan_list(GXContext._get_tls_geo(), lst) @classmethod def winnow_chan_list2(cls, lst, db): """ Same as `winnow_chan_list <geosoft.gxapi.GXIP.winnow_chan_list>`, but removes current X,Y,Z. :param lst: List of channels :param db: Database :type lst: GXLST :type db: GXDB .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapIP._winnow_chan_list2(GXContext._get_tls_geo(), lst, db) def is_valid_line(self, db, line): """ See if a given database line is registered for the `GXIP <geosoft.gxapi.GXIP>` system :param db: Database :param line: Line name :type db: GXDB :type line: str :returns: 1 if the line is a valid `GXIP <geosoft.gxapi.GXIP>` line, 0 if not :rtype: int .. versionadded:: 8.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._is_valid_line(db, line.encode()) return ret_val def line_array_type(self, db, line): """ Return the type of `GXIP <geosoft.gxapi.GXIP>` array for the input line. If necessary, first imports the specified line into the `GXIP <geosoft.gxapi.GXIP>` object :param db: Database :param line: Line name :type db: GXDB :type line: str :returns: :ref:`IP_ARRAY` :rtype: int .. versionadded:: 8.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._line_array_type(db, line.encode()) return ret_val def a_spacing(self, db, line): """ Return the A-Spacing for the input line. If necessary, first imports the specified line into the `GXIP <geosoft.gxapi.GXIP>` object. :param db: Database :param line: Line name :type db: GXDB :type line: str :returns: The A-Spacing value. If there are multiple A-Spacings, the base or smallest value. This value could be `rDUMMY <geosoft.gxapi.rDUMMY>` for some arrays (such as 3D) where no A-Spacing is explicitly defined. :rtype: float .. versionadded:: 8.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._a_spacing(db, line.encode()) return ret_val def pldp_convention(self): """ Return the user's plot point convention for pole-dipole arrays. :returns: The user's PLDP plot point convention :ref:`IP_PLDP_CONV` :rtype: int .. versionadded:: 8.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._pldp_convention() return ret_val def get_electrode_locations_and_mask_values(self, db, line, tx_rx, v_vx, v_vy, v_vm1, v_vm2): """ Get unique electrodes, along with current mask info. :param db: `GXDB <geosoft.gxapi.GXDB>` object :param line: Line name ("" for all selected lines) :param tx_rx: Electrode type. 0:Tx, 1:Rx :param v_vx: X locations :param v_vy: Y locations :param v_vm1: `GXIP <geosoft.gxapi.GXIP>` QC channel values ("QC" or "QC_IP") :param v_vm2: Resistivity QC channel values ("QC_RES") :type db: GXDB :type line: str :type tx_rx: int :type v_vx: GXVV :type v_vy: GXVV :type v_vm1: GXVV :type v_vm2: GXVV .. versionadded:: 9.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The mask values are determined from the first row where a given electrode is found. Values returned for all currently selected lines. """ self._get_electrode_locations_and_mask_values(db, line.encode(), tx_rx, v_vx, v_vy, v_vm1, v_vm2) def get_electrode_locations_and_mask_values2(self, db, line, tx_rx, v_vx, v_vy, v_vm1, v_vm2, v_vlines): """ Get unique electrodes, along with current mask info. :param db: `GXDB <geosoft.gxapi.GXDB>` object :param line: Line name ("" for all selected lines) :param tx_rx: Electrode type. 0:Tx, 1:Rx :param v_vx: X locations :param v_vy: Y locations :param v_vm1: `GXIP <geosoft.gxapi.GXIP>` QC channel values ("QC" or "QC_IP") :param v_vm2: Resistivity QC channel values ("QC_RES") :param v_vlines: Line symbol values (`GS_INT <geosoft.gxapi.GS_INT>`) :type db: GXDB :type line: str :type tx_rx: int :type v_vx: GXVV :type v_vy: GXVV :type v_vm1: GXVV :type v_vm2: GXVV :type v_vlines: GXVV .. versionadded:: 9.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The mask values are determined from the first row where a given electrode is found. Values returned for all currently selected lines. """ self._get_electrode_locations_and_mask_values2(db, line.encode(), tx_rx, v_vx, v_vy, v_vm1, v_vm2, v_vlines) def set_electrode_mask_values(self, db, line, tx_rx, v_vx, v_vy, v_vm1, v_vm2): """ Set unique electrodes, along with current mask info. :param db: `GXDB <geosoft.gxapi.GXDB>` object :param line: Line name ("" for all selected lines) :param tx_rx: Electrode type. 0:Tx, 1:Rx :param v_vx: X locations :param v_vy: Y locations :param v_vm1: `GXIP <geosoft.gxapi.GXIP>` QC channel values ("QC" or "QC_IP") :param v_vm2: Resistivity QC channel values ("QC_RES") :type db: GXDB :type line: str :type tx_rx: int :type v_vx: GXVV :type v_vy: GXVV :type v_vm1: GXVV :type v_vm2: GXVV .. versionadded:: 9.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Mask values are set for all included electrode locations, currently selected lines. """ self._set_electrode_mask_values(db, line.encode(), tx_rx, v_vx, v_vy, v_vm1, v_vm2) def set_electrode_mask_values_single_qc_channel(self, db, line, tx_rx, qc_type, v_vx, v_vy, v_vm): """ Set unique electrodes, along with current mask info. :param db: `GXDB <geosoft.gxapi.GXDB>` object :param line: Line name ("" for all selected lines) :param tx_rx: Electrode type. 0:Tx, 1:Rx :param qc_type: QC channel type. :ref:`IP_QCTYPE` :param v_vx: X locations :param v_vy: Y locations :param v_vm: QC channel values ("QC") :type db: GXDB :type line: str :type tx_rx: int :type qc_type: int :type v_vx: GXVV :type v_vy: GXVV :type v_vm: GXVV .. versionadded:: 9.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Mask values are set for all included electrode locations, currently selected lines. """ self._set_electrode_mask_values_single_qc_channel(db, line.encode(), tx_rx, qc_type, v_vx, v_vy, v_vm) @classmethod def get_qc_channel(cls, db, qc_type, chan): """ Get the QC channel handle, if it exists. :param db: `GXDB <geosoft.gxapi.GXDB>` object :param qc_type: QC channel type. :ref:`IP_QCTYPE` :param chan: String to place name into :type db: GXDB :type qc_type: int :type chan: str_ref :returns: Channel handle, `NULLSYMB <geosoft.gxapi.NULLSYMB>` if not found :rtype: int .. versionadded:: 9.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** For `GXIP <geosoft.gxapi.GXIP>`, looks for "QC_IP", then "QC_OffTime", then "QC". For Resistivity, looks for "QC_Res", then "QC_OnTime" (case insensitive). """ ret_val, chan.value = gxapi_cy.WrapIP._get_qc_channel(GXContext._get_tls_geo(), db, qc_type, chan.value.encode()) return ret_val @classmethod def locate_contributing_electrodes(cls, db, map, rx1x, rx1y, rx2x, rx2y, tx1x, tx1y, tx2x, tx2y, sym_size): """ Locate on a map electrodes selected in a database row. :param db: `GXDB <geosoft.gxapi.GXDB>` object :param map: The current map :param rx1x: Rx1 X channel :param rx1y: Rx1 Y channel :param rx2x: Rx2 X channel :param rx2y: Rx2 Y channel :param tx1x: Tx1 X channel :param tx1y: Tx1 Y channel :param tx2x: Tx2 X channel :param tx2y: Tx2 Y channel :param sym_size: Symbol size (mm) :type db: GXDB :type map: str :type rx1x: str :type rx1y: str :type rx2x: str :type rx2y: str :type tx1x: str :type tx1y: str :type tx2x: str :type tx2y: str :type sym_size: float .. versionadded:: 9.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Sets up an EXT object in the database that captures row/line change events and plots the electrodes for the selected row on an accompanying map. The EXT object is removed by running LaunchRemoveContributingElectrodesEXTTool_IPGUI. This EXT is not serialized, so it is also removed if the database is closed (since this is not the normal behaviour expected from a database). """ gxapi_cy.WrapIP._locate_contributing_electrodes(GXContext._get_tls_geo(), db, map.encode(), rx1x.encode(), rx1y.encode(), rx2x.encode(), rx2y.encode(), tx1x.encode(), tx1y.encode(), tx2x.encode(), tx2y.encode(), sym_size) @classmethod def locate_contributing_electrodes_3d(cls, db, map, rx1x, rx1y, rx1z, rx2x, rx2y, rx2z, tx1x, tx1y, tx1z, tx2x, tx2y, tx2z, sym_size): """ Locate on a 3D view electrodes selected in a database row. :param db: `GXDB <geosoft.gxapi.GXDB>` object :param map: The current map :param rx1x: Rx1 X channel (required) :param rx1y: Rx1 Y channel (required) :param rx1z: Rx1 Z channel (assume zero elevation if not specified) :param rx2x: Rx2 X channel :param rx2y: Rx2 Y channel :param rx2z: Rx2 Z channel (assume zero elevation if not specified) :param tx1x: Tx1 X channel :param tx1y: Tx1 Y channel :param tx1z: Tx1 Z channel (assume zero elevation if not specified) :param tx2x: Tx2 X channel :param tx2y: Tx2 Y channel :param tx2z: Tx2 Z channel (assume zero elevation if not specified) :param sym_size: Symbol size (ground units) :type db: GXDB :type map: str :type rx1x: str :type rx1y: str :type rx1z: str :type rx2x: str :type rx2y: str :type rx2z: str :type tx1x: str :type tx1y: str :type tx1z: str :type tx2x: str :type tx2y: str :type tx2z: str :type sym_size: float .. versionadded:: 9.7 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Sets up an EXT object in the database that captures row/line change events and plots the electrodes for the selected row on an accompanying 3D view. The EXT object is removed by running LaunchRemoveContributingElectrodesEXTTool_IPGUI. This EXT is not serialized, so it is also removed if the database is closed (since this is not the normal behaviour expected from a database). """ gxapi_cy.WrapIP._locate_contributing_electrodes_3d(GXContext._get_tls_geo(), db, map.encode(), rx1x.encode(), rx1y.encode(), rx1z.encode(), rx2x.encode(), rx2y.encode(), rx2z.encode(), tx1x.encode(), tx1y.encode(), tx1z.encode(), tx2x.encode(), tx2y.encode(), tx2z.encode(), sym_size) def get_grids_vv(self): """ Get a VV populated with grids created making pseudosections by this IP object :returns: `GXVV <geosoft.gxapi.GXVV>` Object :rtype: GXVV .. versionadded:: 9.7 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** You can use this list (for instance) to add the created grids to the project list. """ ret_val = self._get_grids_vv() return GXVV(ret_val) def get_line_data(self, db, line, vv_R1X, vv_R1Y, vv_R2X, vv_R2Y, vv_T1X, vv_T1Y, vv_T2X, vv_T2Y, vv_QC_IP, vv_QC_Res, data, vv_data): """ Get electrodes, data and mask values for a single line. :param db: `GXDB <geosoft.gxapi.GXDB>` object :param line: Line name ("" for all selected lines) :param vv_R1X: RX1 x locations (returned) :param vv_R1Y: RX1 y locations (returned) :param vv_R2X: RX2 x locations (returned) :param vv_R2Y: RX2 y locations (returned) :param vv_T1X: TX1 x locations (returned) :param vv_T1Y: TX1 y locations (returned) :param vv_T2X: TX2 x locations (returned) :param vv_T2Y: TX2 y locations (returned) :param vv_QC_IP: `GXIP <geosoft.gxapi.GXIP>` QC channel values ("QC" or "QC_IP") (returned) :param vv_QC_Res: Resistivity QC channel values ("QC_RES") (returned) :param data: data channel (optional) :param vv_data: data channel values (returned) :type db: GXDB :type line: str :type vv_R1X: GXVV :type vv_R1Y: GXVV :type vv_R2X: GXVV :type vv_R2Y: GXVV :type vv_T1X: GXVV :type vv_T1Y: GXVV :type vv_T2X: GXVV :type vv_T2Y: GXVV :type vv_QC_IP: GXVV :type vv_QC_Res: GXVV :type data: str :type vv_data: GXVV .. versionadded:: 9.10 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** True XY locations are returned for 2D arrays. Distant Electrode locations may be dummies. """ self._get_line_data(db, line.encode(), vv_R1X, vv_R1Y, vv_R2X, vv_R2Y, vv_T1X, vv_T1Y, vv_T2X, vv_T2Y, vv_QC_IP, vv_QC_Res, data.encode(), vv_data) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXPLY.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXPLY(gxapi_cy.WrapPLY): """ GXPLY class. The `GXPLY <geosoft.gxapi.GXPLY>` object contains the definitions for one or more polygons, and does import and export of polygon files. """ def __init__(self, handle=0): super(GXPLY, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXPLY <geosoft.gxapi.GXPLY>` :returns: A null `GXPLY <geosoft.gxapi.GXPLY>` :rtype: GXPLY """ return GXPLY() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def add_polygon(self, vv_x, vv_y): """ Add a polygon to the polygon file. :param vv_x: X `GXVV <geosoft.gxapi.GXVV>`. :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>`. :type vv_x: GXVV :type vv_y: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._add_polygon(vv_x, vv_y) def add_polygon_ex(self, vv_x, vv_y, exclude): """ Add a polygon to the polygon file. :param vv_x: X `GXVV <geosoft.gxapi.GXVV>`. :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>`. :param exclude: bExclude :type vv_x: GXVV :type vv_y: GXVV :type exclude: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._add_polygon_ex(vv_x, vv_y, exclude) def change_ipj(self, ipj): """ Set the projection. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` to place in the `GXPLY <geosoft.gxapi.GXPLY>` :type ipj: GXIPJ .. versionadded:: 5.0.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The `GXPLY <geosoft.gxapi.GXPLY>` is re-projected to the new projection. """ self._change_ipj(ipj) def clear(self): """ Clear/remove all polygons from the `GXPLY <geosoft.gxapi.GXPLY>`. .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._clear() def copy(self, srce): """ Copies one `GXPLY <geosoft.gxapi.GXPLY>` Object to another :param srce: Source :type srce: GXPLY .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._copy(srce) def is_valid(self): """ Ensure a polygon is valid :rtype: int .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_valid() return ret_val def combine(self, srce, exclude): """ Combines two `GXPLY <geosoft.gxapi.GXPLY>` Object with another :param srce: Source :param exclude: Make all the polygons copied excluded? - if not they will be copied as they are. :type srce: GXPLY :type exclude: int .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._combine(srce, exclude) @classmethod def create(cls): """ Creates a Polygon Object. :returns: `GXPLY <geosoft.gxapi.GXPLY>` Handle :rtype: GXPLY .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapPLY._create(GXContext._get_tls_geo()) return GXPLY(ret_val) @classmethod def create_s(cls, bf): """ Create an `GXPLY <geosoft.gxapi.GXPLY>` Object from a `GXBF <geosoft.gxapi.GXBF>` :param bf: `GXBF <geosoft.gxapi.GXBF>` to serialize from :type bf: GXBF :returns: `GXPLY <geosoft.gxapi.GXPLY>` Handle :rtype: GXPLY .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapPLY._create_s(GXContext._get_tls_geo(), bf) return GXPLY(ret_val) def extent(self, min_x, min_y, max_x, max_y): """ Get the extent of the current polygon. :param min_x: Min X :param min_y: Min Y :param max_x: Max X :param max_y: Max Y :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If there are no polygons in the `GXPLY <geosoft.gxapi.GXPLY>` object, returns dummies. """ min_x.value, min_y.value, max_x.value, max_y.value = self._extent(min_x.value, min_y.value, max_x.value, max_y.value) def get_ipj(self, ipj): """ Get the projection. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` in which to place the `GXPLY <geosoft.gxapi.GXPLY>` projection :type ipj: GXIPJ .. versionadded:: 5.0.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_ipj(ipj) def get_polygon(self, vv_x, vv_y, poly): """ Get a polygon from the `GXPLY <geosoft.gxapi.GXPLY>` :param vv_x: X `GXVV <geosoft.gxapi.GXVV>`. :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>`. :param poly: Polygon number :type vv_x: GXVV :type vv_y: GXVV :type poly: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_polygon(vv_x, vv_y, poly) def get_polygon_ex(self, vv_x, vv_y, poly, exclude): """ Get a polygon from the `GXPLY <geosoft.gxapi.GXPLY>` :param vv_x: X `GXVV <geosoft.gxapi.GXVV>`. :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>`. :param poly: Polygon number :param exclude: TRUE if exclusion polygon :type vv_x: GXVV :type vv_y: GXVV :type poly: int :type exclude: int_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ exclude.value = self._get_polygon_ex(vv_x, vv_y, poly, exclude.value) def clip_area(self, min_x, min_y, max_x, max_y): """ Clip a polygon to an area :param min_x: Min X :param min_y: Min Y :param max_x: Max X :param max_y: Max y :type min_x: float :type min_y: float :type max_x: float :type max_y: float :returns: :ref:`PLY_CLIP` :rtype: int .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._clip_area(min_x, min_y, max_x, max_y) return ret_val def clip_line_int(self, min_x, min_y, max_x, max_y, vv, inc, first): """ Clips a line in or out of the polygons for intersections (`GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`). Intersections are returned as fiducials down the line stored in `GXVV <geosoft.gxapi.GXVV>` starting at the first point of the line. Examples: No intersection: `PLY_LINE_CLIP_OUTSIDE <geosoft.gxapi.PLY_LINE_CLIP_OUTSIDE>`, 0 intersections Starts outside, ends inside: `PLY_LINE_CLIP_OUTSIDE <geosoft.gxapi.PLY_LINE_CLIP_OUTSIDE>`, 1 intersection Starts outside, intersects then ends inside or outside: `PLY_LINE_CLIP_OUTSIDE <geosoft.gxapi.PLY_LINE_CLIP_OUTSIDE>`, 2 intersections Starts inside, ends inside : `PLY_LINE_CLIP_INSIDE <geosoft.gxapi.PLY_LINE_CLIP_INSIDE>`, 1 intersection (gives end-of-line) Starts inside, ends outside : `PLY_LINE_CLIP_INSIDE <geosoft.gxapi.PLY_LINE_CLIP_INSIDE>`, 1 intersection :param min_x: Min X of line to clip :param min_y: Min Y of line to clip :param max_x: Max X of line to clip :param max_y: Max y of line to clip :param vv: DOUBLE `GXVV <geosoft.gxapi.GXVV>` holding intersection fids :param inc: Data element increment (precision) :param first: First point value (:ref:`PLY_LINE_CLIP` value) :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type vv: GXVV :type inc: float :type first: int_ref :returns: 0, Terminates on error (you can ignore this value) :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, first.value = self._clip_line_int(min_x, min_y, max_x, max_y, vv, inc, first.value) return ret_val def clip_ply(self, pply_b, pply_c): """ Clip one polygon against another :param pply_b: Polygon B :param pply_c: Resulting clipped region :type pply_b: GXPLY :type pply_c: GXPLY :returns: :ref:`PLY_CLIP` :rtype: int .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Resulting clipped polygon only has inclusive regions of the clipped area. Exclusion polygons are treated as included areas. """ ret_val = self._clip_ply(pply_b, pply_c) return ret_val def clip_point(self, x, y): """ Clips a point in or out of the polygon. Point is inside: `PLY_POINT_CLIP_INSIDE <geosoft.gxapi.PLY_POINT_CLIP_INSIDE>` Point is outside: `PLY_POINT_CLIP_OUTSIDE <geosoft.gxapi.PLY_POINT_CLIP_OUTSIDE>` An error occurred: `PLY_POINT_CLIP_ERROR <geosoft.gxapi.PLY_POINT_CLIP_ERROR>` :param x: Point X :param y: Point Y :type x: float :type y: float :returns: :ref:`PLY_POINT_CLIP` :rtype: int .. versionadded:: 9.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._clip_point(x, y) return ret_val def get_description(self, desc): """ Get the `GXPLY <geosoft.gxapi.GXPLY>` description string :param desc: Polygon description :type desc: str_ref .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ desc.value = self._get_description(desc.value.encode()) def num_poly(self): """ Get the number of polygons. :returns: Number of polygons in the `GXPLY <geosoft.gxapi.GXPLY>`. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._num_poly() return ret_val def load_table(self, table): """ Loads Polygons from a Polygon file. :param table: Name of the polygon file File contains coordinates of one or more polygons :type table: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._load_table(table.encode()) def area(self): """ Compute the Area of a polygon :returns: Area of a polygon :rtype: float .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Excluded polygons have negative area. """ ret_val = self._area() return ret_val def rectangle(self, min_x, min_y, max_x, max_y): """ Creates a polygon from a rectangular area. :param min_x: Min X :param min_y: Min Y :param max_x: Max X :param max_y: Max Y :type min_x: float :type min_y: float :type max_x: float :type max_y: float .. versionadded:: 5.0.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._rectangle(min_x, min_y, max_x, max_y) def rotate(self, x, y, rot): """ Rotate a polygon about a point. :param x: Rotation point, X :param y: Rotation point, Y :param rot: Rotation angle, CCW in degrees :type x: float :type y: float :type rot: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._rotate(x, y, rot) def save_table(self, table): """ Save Polygons to a Polygon file. :param table: Name of the polygon file :type table: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._save_table(table.encode()) def serial(self, bf): """ Serialize an `GXPLY <geosoft.gxapi.GXPLY>` to a `GXBF <geosoft.gxapi.GXBF>` :param bf: `GXBF <geosoft.gxapi.GXBF>` to serialize to :type bf: GXBF .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._serial(bf) def set_description(self, desc): """ Set the `GXPLY <geosoft.gxapi.GXPLY>` description string :param desc: Polygon description :type desc: str .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_description(desc.encode()) def set_ipj(self, ipj): """ Set the projection. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` to place in the `GXPLY <geosoft.gxapi.GXPLY>` :type ipj: GXIPJ .. versionadded:: 5.0.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This changes the projection information only. """ self._set_ipj(ipj) def thin(self, thin): """ Thin polygons to a desired resolution :param thin: Thining resolution :type thin: float .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Points on the polygon that deviate from a line drawn between neighboring points by more than the thining resolution will be removed. """ self._thin(thin) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/view.py """ Views, which can be 2D or 3D, contain groups of graphical elements that can be displayed to a user in a Geosoft Map viewer or a Geosoft 3D viewer. Geosoft maps can contain any number of 2D or 3D views. Views contain one or more :class:`geosoft.gxpy.group.Group` instances. 2D views can contain 2D groups, while 3D views can contain both 2D and 3D groups. :Classes: :`View`: single 2D plane view :`View_3d`: 3D view in a `geosoft.3dv` file, or a 3D view on a 2D map. :`CrookedPath`: defines the path for a crooked section. Both 2D and 3D views can be placed on a :class:`geosoft.gxpy.map.Map`, though 3D views are stored in a `geosoft_3dv` file which can also be viewed separately from a map. :Constants: :READ_ONLY: `geosoft.gxapi.MVIEW_READ` :WRITE_NEW: `geosoft.gxapi.MVIEW_WRITENEW` :WRITE_OLD: `geosoft.gxapi.MVIEW_WRITEOLD` :UNIT_VIEW: 0 :UNIT_MAP: 2 :UNIT_VIEW_UNWARPED: 3 :GROUP_ALL: 0 :GROUP_MARKED: 1 :GROUP_VISIBLE: 2 :GROUP_AGG: 3 :GROUP_CSYMB: 4 :GROUP_VOXD: 5 :GROUP_VECTORVOX: 6 :GROUP_SURFACE: 7 :EXTENT_ALL: `geosoft.gxapi.MVIEW_EXTENT_ALL` :EXTENT_VISIBLE: `geosoft.gxapi.MVIEW_EXTENT_VISIBLE` :EXTENT_CLIPPED: `geosoft.gxapi.MVIEW_EXTENT_CLIP` .. seealso:: :mod:`geosoft.gxpy.map`, :mod:`geosoft.gxpy.group` :mod:`geosoft.geosoft.gxapi.GXMVIEW` .. note:: Regression tests provide usage examples: `View tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_view.py>`_ """ import os import numpy as np from typing import NamedTuple import geosoft import geosoft.gxapi as gxapi from . import gx as gx from . import coordinate_system as gxcs from . import utility as gxu from . import map as gxmap from . import metadata as gxmeta from . import geometry as gxgeo from . import spatialdata as gxspd from . import vv as gxvv __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) class ViewException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.view`. .. versionadded:: 9.2 """ pass def _crooked_path_from_ipj(gxipj): if gxipj.get_orientation() != gxapi.IPJ_ORIENT_SECTION_CROOKED: raise ViewException(_t('This coordinate system does not define a crooked path')) dvv = gxvv.GXvv() xvv = gxvv.GXvv() yvv = gxvv.GXvv() log_z = gxapi.int_ref() gxipj.get_crooked_section_view_v_vs(dvv.gxvv, xvv.gxvv, yvv.gxvv, log_z) return dvv, xvv, yvv, log_z.value class CrookedPath(gxgeo.Geometry): """ Description of a crooked (x, y) path that defines a crooked-section view, or a crooked-section grid. .. versionadded:: 9.4 """ def __str__(self): return 'CrookedPath "{}", {} points'.format(self.name, len(self)) def __init__(self, xy_path, log_z=False, **kw): super().__init__(**kw) if isinstance(xy_path, gxcs.Coordinate_system): self.coordinate_system = xy_path xy_path = xy_path.gxipj if isinstance(xy_path, gxapi.GXIPJ): d, x, y, self._log_z = _crooked_path_from_ipj(xy_path) self._xy = np.empty((x.length, 2)) self._xy[:, 0] = x.np self._xy[:, 1] = y.np self.coordinate_system = gxcs.Coordinate_system(xy_path) else: if not isinstance(xy_path, gxgeo.PPoint): xy_path = gxgeo.PPoint(xy_path, coordinate_system=self.coordinate_system) self._xy = xy_path.xy self.coordinate_system = xy_path.coordinate_system self._log_z = bool(log_z) # calculate a distance along the path dnp = np.zeros(len(self._xy), dtype=np.float64) dx = (self._xy[1:, 0] - self._xy[:-1, 0]) ** 2 dy = (self._xy[1:, 1] - self._xy[:-1, 1]) ** 2 dxy = np.sqrt((dx + dy)) dnp[1:] = dxy self._distances = dnp.cumsum() def __len__(self): return len(self._xy) @property def xy(self): """Path trace as an array (npoints, 2).""" return self._xy @property def distances(self): """Distances along path points as an array (npoints) starting at 0.""" return self._distances @property def ppoint(self): """Path trace as a `geosoft.gxpy.geometry.PPoint` instance.""" return gxgeo.PPoint(self._xy, coordinate_system=self.coordinate_system) def set_in_geosoft_ipj(self, coordinate_system): """ Set the crooked-path in the `geosoft.gxapi.GXIPJ` instance of the coordinate system. Geosoft stores crooked-path information in the GXIPJ, from which views are able to :param coordinate_system: .. versionadded:: 9.4 """ # make vv's to set the path dvv = gxvv.GXvv(self._distances) xvv = gxvv.GXvv(self._xy[:, 0]) yvv = gxvv.GXvv(self._xy[:, 1]) coordinate_system.gxipj.set_crooked_section_view(dvv.gxvv, xvv.gxvv, yvv.gxvv, self._log_z) @property def extent(self): return self.ppoint.extent class PlaneReliefSurfaceInfo(NamedTuple): """ Information about a relief surface assigned to a plane. The following properties are represented: surface_grid_name: grid file name refine: relief refinement between 1 (low) and 4 (high). Default is 3. base: base value in grid, will be at z=0. Default is 0. scale: scale to apply to grid after removing base, default is 1. min: minimum clip in unscaled grid values max: maximum clip in unscaled grid values .. versionadded:: 9.9 """ surface_grid_name: str refine: int base: float scale: float min: float max: float def delete_files(v3d_file): """ Delete a v3d file with associated files. Just calls `geosoft.gxpy.map.delete_files`. The view must be closed. :param v3d_file: View_3d file name .. versionadded:: 9.3.1 """ gxmap.delete_files(v3d_file) def _plane_err(plane, view): raise ViewException(_t('Plane "{}" does not exist in view "{}"'.format(plane, view))) VIEW_NAME_SIZE = 2080 READ_ONLY = gxapi.MVIEW_READ WRITE_NEW = gxapi.MVIEW_WRITENEW WRITE_OLD = gxapi.MVIEW_WRITEOLD UNIT_VIEW = 0 UNIT_MAP = 2 UNIT_VIEW_UNWARPED = 3 GROUP_ALL = 0 GROUP_MARKED = 1 GROUP_VISIBLE = 2 GROUP_AGG = 3 GROUP_CSYMB = 4 GROUP_VOXD = 5 GROUP_VECTORVOX = 6 GROUP_SURFACE = 7 _group_selector = (None, None, None, gxapi.MVIEW_IS_AGG, gxapi.MVIEW_IS_CSYMB, gxapi.MVIEW_IS_VOXD, gxapi.MVIEW_IS_VECTOR3D, None) EXTENT_ALL = gxapi.MVIEW_EXTENT_ALL EXTENT_VISIBLE = gxapi.MVIEW_EXTENT_VISIBLE EXTENT_CLIPPED = gxapi.MVIEW_EXTENT_CLIP class View(gxgeo.Geometry): """ Geosoft view class. :Constructors: :`open`: open an existing view in a map :`new`: create a new view in a map .. versionadded:: 9.2 """ def __enter__(self): return self def __exit__(self, xtype, xvalue, xtraceback): self.__del__() def __del__(self): if hasattr(self, '_close'): self._close() def _close(self): if hasattr(self, '_open'): if self._open: self._gxview = None self._pen = None self._map = None # release map self._open = False def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): return self.name def __init__(self, map, name="_unnamed_view", mode=WRITE_OLD, coordinate_system=None, map_location=(0, 0), area=(0, 0, 30, 20), scale=100, copy=None, gxmview=None, **kwargs): if not isinstance(map, geosoft.gxpy.map.Map): raise ViewException(_t('First argument must be a map.')) super().__init__(**kwargs) self._gx = gx.gx() self._map = map if gxmview is not None: name_ref = gxapi.str_ref() gxmview.get_name(name_ref) name = name_ref.value self._name = map.classview(name) self._gxview = gxmview else: self._name = map.classview(name) if mode == WRITE_OLD and not map.has_view(self._name): mode = WRITE_NEW self._gxview = gxapi.GXMVIEW.create(self._map.gxmap, self._name, mode) self._mode = mode self._lock = None self._open = True self._cs = None self._clip_mode = False if mode == WRITE_NEW: self.locate(coordinate_system, map_location, area, scale) if copy: with View(map, name=copy, mode=READ_ONLY) as v: v.gxview.mark_all_groups(1) v.gxview.copy_marked_groups(self.gxview) else: ipj = gxapi.GXIPJ.create() self.gxview.get_ipj(ipj) self._cs = gxcs.Coordinate_system(ipj) metres_per = self._cs.metres_per_unit self._uname = self._cs.units_name if metres_per <= 0.: raise ViewException(_t('Invalid units {}({})'.format(self._uname, metres_per))) self._metres_per_unit = 1.0 / metres_per @classmethod def from_gxapi(cls, gxmap, gxmview): """ Instantiate View from gxapi instance. :param gxmap: a gxapi.CGXMAP :param gxmview: a gxapi.CGXMVIEW .. versionadded:: 9.9 """ return cls(geosoft.gxpy.map.Map.from_gxapi(gxmap), gxmview=gxmview) @classmethod def new(cls, map=None, name="_unnamed_view", coordinate_system=None, map_location=(0, 0), area=(0, 0, 30, 20), scale=100, copy=None, crooked_path=None): """ Create a new view on a map. :parameters: :map: :class:`geosoft.gxpy.map.Map` instance, if not specified a new unique default map is created and deleted when this session finished. :name: view name, default is "_unnamed_view". :coordinate_system: coordinate system as a `geosoft.gxpy.coordinate_system.Coordinate_system` instance, or one of the Coordinate_system constructor types. :map_location: (x, y) view location on the map, in map cm, default (0, 0) :area: (min_x, min_y, max_x, max_y) area in view units, default (0, 0, 30, 20) :scale: Map scale if a coordinate system is defined. If the coordinate system is not defined this is view units per map metre. :copy: name of a view to copy into the new view. :crooked_path: provide a `CrookedPath` instance to create a section view along a wandering path. Should the coordinate system already contain a crooked path it will be replaced. .. versionadded:: 9.2 """ if map is None: map = gxmap.Map.new() view = cls(map, mode=WRITE_NEW, name=name, coordinate_system=coordinate_system, map_location=map_location, area=area, scale=scale, copy=copy) if crooked_path: if not isinstance(crooked_path, CrookedPath): crooked_path = CrookedPath(crooked_path, coordinate_system=view.coordinate_system) crooked_path.set_in_geosoft_ipj(view.coordinate_system) return view @classmethod def open(cls, map, view_name, read_only=False): """ Open an en existing view on a map. :param map: :class:`geosoft.gxpy.map.Map` :param view_name: name of the view :param read_only: True to open read-only .. versionadded:: 9.2 """ if not map.has_view(view_name): raise ViewException(_t('Map does not have a view named \'{}\''.format(view_name))) if read_only: mode = READ_ONLY else: mode = WRITE_OLD view = cls(map, name=view_name, mode=mode) return view @property def lock(self): """ True if the view is locked by a group. Only one group may hold a lock on a view at the same time. When drawing with groups you should use a `with gxgrp.Draw(...) as g:` which will ensure group locks are properly created and released. """ return self._lock @lock.setter def lock(self, group): if group: if self.lock: raise ViewException(_t('View is locked by group {}.').format(self.lock)) self._lock = group else: self._lock = None @property def is_crooked_path(self): """True if this grid follows a crooked path section.""" return self.coordinate_system.gxipj.get_orientation() == gxapi.IPJ_ORIENT_SECTION_CROOKED def crooked_path(self): """ Return the `CrookedPath` instance for a crooked-path view. .. versionadded::9.4 """ if not self.is_crooked_path: raise ViewException(_t("This is not a crooked-path view.")) return CrookedPath(self.coordinate_system) @property def clip(self): """ Current view clip mode for groups, applies to groups following in this stream. Can be set. .. versionadded:: 9.3.1 """ return self._clip_mode @clip.setter def clip(self, mode): self._clip_mode = bool(mode) self.gxview.group_clip_mode(int(mode)) @property def metadata(self): """ Return the view/map metadata as a dictionary. Can be set, in which case the dictionary items passed will be added to, or replace existing metadata. All views on a map share the metadata with the map. .. versionadded:: 9.2 """ return self.map.metadata @metadata.setter def metadata(self, meta): self.map.metadata = meta @property def coordinate_system(self): """ :class:`geosoft.gxpy.coordinate_system.Coordinate_system` instance of the view.""" if self._cs is None: self._cs = gxcs.Coordinate_system() return self._cs @property def gxview(self): """ The :class:`geosoft.gxapi.GXIPJ` instance handle.""" return self._gxview @coordinate_system.setter def coordinate_system(self, cs): if not isinstance(cs, gxcs.Coordinate_system): cs = gxcs.Coordinate_system(cs) self._cs = gxcs.Coordinate_system(cs) metres_per = self._cs.metres_per_unit self._uname = self._cs.units_name if metres_per <= 0.: raise ViewException(_t('Invalid units {}({})'.format(self._uname, metres_per))) self._metres_per_unit = 1.0 / metres_per self.gxview.set_ipj(self._cs.gxipj) @property def map_scale(self): """ Map scale for this view. Can be set, in which case the entire view will move on the map. """ return self.gxview.get_map_scale() @map_scale.setter def map_scale(self, s): if s > 0.0: self.gxview.re_scale(s) def close(self): """ Close a view. Use to close a view when working outside of a `with ... as:` construct. .. versionadded:: 9.2 """ self._close() def add_child_files(self, file_list): """ Add files to the list of child files for this view. :param file_list: file, or a list of files to add .. versionaddded 9.3.1 """ meta = self.metadata node = 'geosoft/dataset/map/views/' + self.name + '/child_files' child_files = gxmeta.get_node_from_meta_dict(node, meta) if child_files is None: child_files = [] if isinstance(file_list, str): child_files.append(file_list) else: for f in file_list: if f not in child_files: child_files.append(f) gxmeta.set_node_in_meta_dict(node, meta, child_files) self.metadata = meta def locate(self, coordinate_system=None, map_location=None, area=None, scale=None): """ Locate and scale the view on the map. :parameters: :coordinate_system: coordinate system as a class:`gxpy.coordinate_system.Coordinate_system` instance, or one of the Coordinate_system constructor types. :map_location: New (x, y) view location on the map, in map cm. :area: New (min_x, min_y, max_x, max_y) area in view units :scale: New scale in view units per map metre, either as a single value or (x_scale, y_scale), defaults to the current x scale. .. versionadded:: 9.2 """ if self._mode == READ_ONLY: raise ViewException(_t('Cannot modify a READ_ONLY view.')) # coordinate system if coordinate_system: self.coordinate_system = coordinate_system upm = self.units_per_metre if area is None: area = self.extent_clip # area and scale if scale is None: if self.scale is None: raise ViewException(_t('A scale is required.')) scale = self.scale if hasattr(scale, "__iter__"): x_scale, y_scale = scale else: x_scale = y_scale = scale a_minx, a_miny, a_maxx, a_maxy = area if map_location is None: map_location = (0., 0.) mm_minx = map_location[0] * 10.0 mm_miny = map_location[1] * 10.0 mm_maxx = mm_minx + (a_maxx - a_minx) * 1000.0 / upm / x_scale mm_maxy = mm_miny + (a_maxy - a_miny) * 1000.0 / upm / y_scale self.gxview.fit_window(mm_minx, mm_miny, mm_maxx, mm_maxy, a_minx, a_miny, a_maxx, a_maxy) self.gxview.set_window(a_minx, a_miny, a_maxx, a_maxy, UNIT_VIEW) @property def map(self): """ :class:`geosoft.gxpy.map.Map` instance that contains this view.""" return self._map @property def name(self): """ Name of the view""" return self._name @property def is_3d(self): """True if this is a 3D view""" return bool(self.gxview.is_view_3d()) @property def units_per_metre(self): """view units per view metres (eg. a view in 'ft' will be 3.28084)""" return 1.0 / self.coordinate_system.metres_per_unit @property def units_per_map_cm(self): """view units per map cm. (eg. a view in ft, with a scale of 1:12000 returns 393.7 ft/cm)""" return self.gxview.scale_mm() * 10.0 @property def units_name(self): """name of the view distance units""" return self.coordinate_system.units_name @property def guid(self): """ The view GUID. .. versionadded:: 9.3 """ sr = gxapi.str_ref() self.gxview.get_guid(sr) return sr.value def mdf(self, base_view=None): """ Returns the Map Description File specification for this view as a data view. :param base_view: name of the base view on the map from which to calculate margins. If not specified only the left and bottom margin is calculated based on the view clip minimum location and the right and top margins will be 0. :returns: ((x_size, y_size, margin_bottom, margin_right, margin_top, margin_left), (scale, units_per_metre, x_origin, y_origin)) .. versionadded: 9.2 """ view_mnx, view_mny, view_mxx, view_mxy = self.extent_clip map_mnx, map_mny = self.view_to_map_cm(view_mnx, view_mny) map_mxx, map_mxy = self.view_to_map_cm(view_mxx, view_mxy) if base_view: if not isinstance(base_view, View): base_view = View(self.map, base_view) _, _, mapx, mapy = base_view.extent_clip mapx, mapy = base_view.view_to_map_cm(mapx, mapy) else: mapx, mapy = map_mxx, map_mxy m1 = (mapx, mapy, map_mny, mapx - map_mxx, mapy - map_mxy, map_mnx) m2 = (self.scale, self.units_per_metre, view_mnx, view_mny) return m1, m2 def _groups(self, gtype=GROUP_ALL): def gdict(what): self.gxview.list_groups(gxlst, what) return gxu.dict_from_lst(gxlst) gxlst = gxapi.GXLST.create(VIEW_NAME_SIZE) if gtype == GROUP_ALL: return list(gdict(gxapi.MVIEW_GROUP_LIST_ALL)) elif gtype == GROUP_MARKED: return list(gdict(gxapi.MVIEW_GROUP_LIST_MARKED)) elif gtype == GROUP_VISIBLE: return list(gdict(gxapi.MVIEW_GROUP_LIST_VISIBLE)) # filter by type wanted gd = gdict(gxapi.MVIEW_GROUP_LIST_ALL) groups = [] if gtype == GROUP_SURFACE: for g in gd: if g[:5] == 'SURF_': groups.append(g) else: isg = _group_selector[gtype] for g in gd: if self.gxview.is_group(g, isg): groups.append(g) return groups @property def group_list(self): """list of group names in this view""" return self._groups() @property def group_list_marked(self): """list of marked group names in this view""" return self._groups(GROUP_MARKED) @property def group_list_visible(self): """list of visible group names in this view""" return self._groups(GROUP_VISIBLE) @property def group_list_agg(self): """list of aggregate group names in this view""" return self._groups(GROUP_AGG) @property def group_list_csymb(self): """list of csymb group names in this view""" return self._groups(GROUP_CSYMB) @property def group_list_voxel(self): """list of voxel group names in this view""" return self._groups(GROUP_VOXD) @property def group_list_vectorvoxel(self): """list of vectorvoxel group names in this view""" return self._groups(GROUP_VECTORVOX) @property def group_list_surface(self): """list of surface group names in this view""" return self._groups(GROUP_SURFACE) def has_group(self, group): """ Returns True if the map contains this group by name.""" return bool(self.gxview.exist_group(group)) def _extent(self, what): xmin = gxapi.float_ref() ymin = gxapi.float_ref() xmax = gxapi.float_ref() ymax = gxapi.float_ref() self.gxview.extent(what, UNIT_VIEW, xmin, ymin, xmax, ymax) return xmin.value, ymin.value, xmax.value, ymax.value @property def extent(self): """ View clip extent as a `geosoft.gxpy.geometry.Point2`. .. versionadded:: 9.3.1 """ cs = self.coordinate_system ex2d = self.extent_clip if self.is_crooked_path: min_x, min_y, max_x, max_y = self.crooked_path().extent_xy min_z = cs.xyz_from_oriented((ex2d[0], ex2d[1], 0.0))[2] max_z = cs.xyz_from_oriented((ex2d[0], ex2d[3], 0.0))[2] else: xyz0 = cs.xyz_from_oriented((ex2d[0], ex2d[1], 0.0)) xyz1 = cs.xyz_from_oriented((ex2d[2], ex2d[1], 0.0)) xyz2 = cs.xyz_from_oriented((ex2d[2], ex2d[3], 0.0)) xyz3 = cs.xyz_from_oriented((ex2d[0], ex2d[3], 0.0)) min_x = min(xyz0[0], xyz1[0], xyz2[0], xyz3[0]) min_y = min(xyz0[1], xyz1[1], xyz2[1], xyz3[1]) min_z = min(xyz0[2], xyz1[2], xyz2[2], xyz3[2]) max_x = max(xyz0[0], xyz1[0], xyz2[0], xyz3[0]) max_y = max(xyz0[1], xyz1[1], xyz2[1], xyz3[1]) max_z = max(xyz0[2], xyz1[2], xyz2[2], xyz3[2]) return gxgeo.Point2(((min_x, min_y, min_z), (max_x, max_y, max_z)), self.coordinate_system) @property def extent_clip(self): """clip extent of the view as (x_min, y_min, x_max, y_max)""" return self._extent(gxapi.MVIEW_EXTENT_CLIP) @property def extent_all(self): """extent of all groups in the view as (x_min, y_min, x_max, y_max)""" return self._extent(gxapi.MVIEW_EXTENT_ALL) @property def extent_visible(self): """extent of visible groups in the view as (x_min, y_min, x_max, y_max)""" return self._extent(gxapi.MVIEW_EXTENT_VISIBLE) def extent_map_cm(self, extent=None): """ Return an extent in map cm. :param extent: tuple returned from one of the extent properties. Default is :attr:`extent_clip`. .. versionadded:: 9.2 """ if extent is None: extent = self.extent_clip xmin, ymin = self.view_to_map_cm(extent[0], extent[1]) xmax, ymax = self.view_to_map_cm(extent[2], extent[3]) return xmin, ymin, xmax, ymax @property def scale(self): """map scale for the view""" return 1000.0 * self.gxview.scale_mm() * self.coordinate_system.metres_per_unit @property def aspect(self): """view aspect ratio, usually 1.""" return self.gxview.scale_ymm() / self.gxview.scale_mm() def extent_group(self, group, unit=UNIT_VIEW): """ Extent of a group :param group: group name :param unit: units: :: UNIT_VIEW UNIT_MAP :returns: extent as (x_min, y_min, x_max, y_max) .. versionadded: 9.2 """ xmin = gxapi.float_ref() ymin = gxapi.float_ref() xmax = gxapi.float_ref() ymax = gxapi.float_ref() self.gxview.get_group_extent(group, xmin, ymin, xmax, ymax, unit) if unit == UNIT_MAP: xmin.value *= 0.1 xmax.value *= 0.1 ymin.value *= 0.1 ymax.value *= 0.1 return xmin.value, ymin.value, xmax.value, ymax.value def delete_group(self, group_name): """ Delete a group from a map. Nothing happens if the view does not contain this group. :param group_name: Name of the group to delete. .. versionadded:: 9.2 """ self.gxview.delete_group(group_name) def map_cm_to_view(self, x, y=None): """ Returns the location of this point on the map (in cm) to the view location in view units. :param x: x, or a tuple (x,y), in map cm :param y: y if x is not a tuple .. versionadded:: 9.2 """ if y is None: y = x[1] x = x[0] xr = gxapi.float_ref() xr.value = x * 10.0 yr = gxapi.float_ref() yr.value = y * 10.0 self.gxview.plot_to_view(xr, yr) return xr.value, yr.value def view_to_map_cm(self, x, y=None): """ Returns the location of this point on the map in the view. :param x: x, or a tuple (x,y), in view units :param y: y if x is not a tuple .. versionadded:: 9.2 """ if y is None: y = x[1] x = x[0] xr = gxapi.float_ref() xr.value = x yr = gxapi.float_ref() yr.value = y self.gxview.view_to_plot(xr, yr) return xr.value / 10.0, yr.value / 10.0 def get_class_name(self, view_class): """ Get the name associated with a view class. :param view_class: desired class in this view Common view class names are:: 'Plane' the name of the default 2D drawing plane 'Section' a section view Other class names may be defined, though they are not used by Geosoft. :returns: name associated with the class, '' if not defined. .. versionadded:: 9.2 """ sr = gxapi.str_ref() self.gxview.get_class_name(view_class, sr) return sr.value.lower() def set_class_name(self, view_class, name): """ Set the name associated with a class. :param view_class: class name in this view :param name: name of the view associated with this class. Common view class names are:: 'Plane' the name of the default 2D drawing plane 'Section' a section view .. versionadded:: 9.2 """ self.gxview.set_class_name(view_class, name) class View_3d(View): """ Geosoft 3D views, which contain 3D drawing groups. Geosoft 3D views are stored in a file with extension `.geosoft_3dv`. A 3d view is required to draw 3D elements using :class:`geosoft.gxpy.group.Draw_3d`, which must be created from a :class:`geosoft.gxpy.view.View_3d` instance. 3D views also contain 2D drawing planes on which :class:`geosoft.gxpy.group.Draw` groups are placed. A default horizontal plane at elevation 0, named 'plane_0' is created when a new 3d view is created. Planes are horizontal and flat by default, but can be provided a grid that defines the plane surface relief, which is intended for creating things like terrain surfaces on which 2d graphics are rendered. Planes can also be oriented within the 3D space to create sections, or for other more esoteric purposes. :Constructors: ============ ============================= :meth:`open` open an existing geosoft_3dv :meth:`new` create a new geosoft_3dv ============ ============================= .. versionadded:: 9.2 """ def __init__(self, file_name, mode, _internal=False, map=None, gxmview=None, **kwargs): if not _internal: raise ViewException(_t("Must be called by a class constructor 'open' or 'new' or 'from_gxapi'")) if map and gxmview: super().__init__(map, gxmview=gxmview, **kwargs) else: file_name = geosoft.gxpy.map.map_file_name(file_name, 'geosoft_3dv') map = geosoft.gxpy.map.Map(file_name=file_name, mode=mode, _internal=True) super().__init__(map, '3D', **kwargs) self._extent3d = None def _extent_union(self, extent): """Expand the extent""" if self._extent is None: self._extent = gxgeo.Point2(extent, self.coordinate_system) else: self._extent = self._extent.union(extent) @classmethod def from_gxapi(cls, gxmap, gxmview): """ Instantiate View_3d from gxapi instance. :param gxmap: a gxapi.CGXMAP :param gxmview: a gxapi.CGXMVIEW .. versionadded:: 9.9 """ return cls(file_name=None, mode=WRITE_OLD, _internal=True, map=geosoft.gxpy.map.Map.from_gxapi(gxmap), gxmview=gxmview) @classmethod def new(cls, file_name=None, area_2d=None, overwrite=False, **kwargs): """ Create a new 3D view. :param file_name: name for the new 3D view file (.geosoft_3dv added). If not specified a unique temporary file is created. :param area_2d: 2D drawing extent for the default 2D drawing plane :param overwrite: True to overwrite an existing 3DV .. versionadded:: 9.2 """ if file_name is None: file_name = gx.gx().temp_file('.geosoft_3dv') else: file_name = geosoft.gxpy.map.map_file_name(file_name, 'geosoft_3dv') if not overwrite: if os.path.isfile(file_name): raise ViewException(_t('Cannot overwrite existing file: {}').format(file_name)) g_3dv = cls(file_name, geosoft.gxpy.map.WRITE_NEW, area=area_2d, _internal=True, **kwargs) map_minx, map_miny, map_maxx, map_maxy = g_3dv.extent_map_cm(g_3dv.extent_clip) view_minx, view_miny, view_maxx, view_maxy = g_3dv.extent_clip # make this a 3D view h3dn = gxapi.GX3DN.create() g_3dv.gxview.set_3dn(h3dn) g_3dv.gxview.fit_map_window_3d(map_minx, map_miny, map_maxx, map_maxy, view_minx, view_miny, view_maxx, view_maxy) return g_3dv @classmethod def open(cls, file_name, **kw): """ Open an existing geosoft_3dv file. :param file_name: name of the geosoft_3dv file .. versionadded:: 9.2 """ file_name = geosoft.gxpy.map.map_file_name(file_name, 'geosoft_3dv') if not os.path.isfile(file_name): raise ViewException(_t('geosoft_3dv file not found: {}').format(file_name)) g_3dv = cls(file_name, geosoft.gxpy.map.WRITE_OLD, _internal=True) # read extents from the metadata try: g_3dv.add_extent(gxspd.extent_from_metadata(g_3dv.metadata)) except KeyError: pass return g_3dv def __exit__(self, xtype, xvalue, xtraceback): self.__del__() def __del__(self): if hasattr(self, 'close'): self.close() def close(self): """close the view, releases resources.""" if hasattr(self, 'map'): if self.map: self.map.close() if hasattr(self, '_close'): self._close() def add_extent(self, extent): """ Expand current extent to include this extent. :param extent: extent as a `geosoft.gxpy.geometry.Geometry` or Point2 constructor TODO: review once issue #75 is resolved. .. versionadded:: 9.3.1 """ self._extent3d = gxgeo.extent_union(self._extent3d, extent) @property def extent(self): """ Extent of 3D objects in this view. :return: `geosoft.gxpy.geometry.Point2` instance TODO: review once issue #75 is resolved. .. versionadded:: 9.3.1 """ return self._extent3d @property def file_name(self): """ the `geosoft_3dv` file name""" return self.map.file_name @property def name(self): """the view name""" return self.map.name @property def current_3d_drawing_plane(self): """Current drawing plane name in a 3D view, `None` if not defined. Can be set to a plane number or name.""" if len(self.plane_list): s = gxapi.str_ref() self.gxview.get_def_plane(s) return s.value else: return None @current_3d_drawing_plane.setter def current_3d_drawing_plane(self, plane): if plane: if isinstance(plane, int): plane = self.plane_name(plane) if plane not in self.plane_list: self.new_drawing_plane(plane) self.gxview.set_def_plane(plane) @property def current_3d_drawing_plane_number(self): """The current drawing plane number, can be set.""" return self.plane_number(self.current_3d_drawing_plane) @current_3d_drawing_plane_number.setter def current_3d_drawing_plane_number(self, plane): self.current_3d_drawing_plane = plane @property def plane_list(self): """list of drawing planes in the view""" gxlst = gxapi.GXLST.create(VIEW_NAME_SIZE) self.gxview.list_planes(gxlst) return list(gxu.dict_from_lst(gxlst)) def plane_name(self, plane): """Return the name of a numbered plane""" if isinstance(plane, str): if self.gxview.find_plane(plane) == -1: _plane_err(plane, self.name) return plane gxlst = gxapi.GXLST.create(VIEW_NAME_SIZE) self.gxview.list_planes(gxlst) item = gxlst.find_item(gxapi.LST_ITEM_VALUE, str(plane)) if item == -1: _plane_err(plane, self.name) sr = gxapi.str_ref() gxlst.gt_item(gxapi.LST_ITEM_NAME, item, sr) return sr.value def plane_number(self, plane): """Return the plane number of a plane, or None if plane does not exist.""" if plane: if isinstance(plane, int): self.plane_name(plane) return plane plane_number = self.gxview.find_plane(plane) if plane_number == -1: _plane_err(plane, self.name) else: return plane_number else: return None def delete_plane(self, plane): """ Delete a plane, and all content :param plane: plane number or plane name .. versionadded:: 9.3.1 """ if isinstance(plane, str): plane = self.plane_number(plane) try: self.gxview.delete_plane(plane, True) except gxapi.GXError: pass def has_plane(self, plane): """ True if the view contains plane :param plane: name of the plane :returns: True if the plane exists in the view .. versionadded:: 9.2 """ try: self.plane_number(plane) return True except ViewException: return False def groups_on_plane_list(self, plane=0): """ List of groups on a plane. :param plane: name of the plane or plane number :returns: list of groups on the plane .. versionadded:: 9.2 """ gxlst = gxapi.GXLST.create(VIEW_NAME_SIZE) if isinstance(plane, str): plane = self.plane_number(plane) self.gxview.list_plane_groups(plane, gxlst) return list(gxu.dict_from_lst(gxlst)) def new_drawing_plane(self, name, rotation=(0., 0., 0.), offset=(0., 0., 0.), scale=(1., 1., 1.)): """ Create a new drawing plane in a 3d view. :param name: name of the plane, overwritten if it exists :param rotation: plane rotation as (rx, ry, rz), default (0, 0, 0) :param offset: (x, y, z) offset of the plane, default (0, 0, 0) :param scale: (xs, ys, zs) axis scaling, default (1, 1, 1) .. versionadded::9.2 """ if self.has_plane(name): raise ViewException(_t('3D drawing plane "{}" exists.'.format(name))) self.gxview.create_plane(str(name)) self.gxview.set_plane_equation(self.plane_number(name), rotation[0], rotation[1], rotation[2], offset[0], offset[1], offset[2], scale[0], scale[1], scale[2]) def get_plane_relief_surface_info(self, plane): """ Get relief surface parameters for a plane. :param plane: plane number or plane name :returns: relief surface properties :rtype: :class:`geosoft.gxpy.view.PlaneReliefSurfaceInfo` .. versionadded::9.2 """ if isinstance(plane, str): plane = self.plane_number(plane) surface_grid_name = gxapi.str_ref() sample = gxapi.int_ref() base = gxapi.float_ref() scale = gxapi.float_ref() min_ref = gxapi.float_ref() max_ref = gxapi.float_ref() self.gxview.get_plane_surface(plane, surface_grid_name) self.gxview.get_plane_surf_info(plane, sample, base, scale, min_ref, max_ref) refine = 1 + int(sample.value / 16) min_val = None if min_ref.value == gxapi.rDUMMY else min_ref.value max_val = None if max_ref.value == gxapi.rDUMMY else max_ref.value return PlaneReliefSurfaceInfo(surface_grid_name.value, refine, base.value, scale.value, min_val, max_val) def set_plane_relief_surface(self, surface_grid_name, refine=3, base=0, scale=1, min=None, max=None): """ Establish a relief surface for the current plane based on a grid. :param surface_grid_name: grid file name :param refine: relief refinement between 1 (low) and 4 (high). Default is 3. :param base: base value in grid, will be at z=0. Default is 0. :param scale: scale to apply to grid after removing base, default is 1. :param min: minimum clip in unscaled grid values :param max: maximum clip in unscaled grid values .. versionadded:: 9.3 """ if not self.current_3d_drawing_plane: name = os.path.basename(surface_grid_name).split('.')[0] self.current_3d_drawing_plane = name self.gxview.set_plane_surface(self.current_3d_drawing_plane_number, surface_grid_name) if min is None: min = gxapi.rDUMMY if max is None: max = gxapi.rDUMMY refine = int(refine) if refine <= 1: refine = 1 elif refine >= 4: refine = 48 else: refine = (refine - 1) * 16 self.gxview.set_plane_surf_info(self.current_3d_drawing_plane_number, refine, base, scale, min, max) <file_sep>/docs/GXMAP.rst .. _GXMAP: GXMAP class ================================== .. autoclass:: geosoft.gxapi.GXMAP :members: .. _DUPMAP: DUPMAP constants ----------------------------------------------------------------------- Duplicate Modes .. autodata:: geosoft.gxapi.DUPMAP_BLANK :annotation: .. autoattribute:: geosoft.gxapi.DUPMAP_BLANK .. autodata:: geosoft.gxapi.DUPMAP_COPY :annotation: .. autoattribute:: geosoft.gxapi.DUPMAP_COPY .. autodata:: geosoft.gxapi.DUPMAP_COPY_PRE62 :annotation: .. autoattribute:: geosoft.gxapi.DUPMAP_COPY_PRE62 .. _MAP_EXPORT_BITS: MAP_EXPORT_BITS constants ----------------------------------------------------------------------- Color Types .. autodata:: geosoft.gxapi.MAP_EXPORT_BITS_32 :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_BITS_32 .. autodata:: geosoft.gxapi.MAP_EXPORT_BITS_24 :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_BITS_24 .. autodata:: geosoft.gxapi.MAP_EXPORT_BITS_GREY8 :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_BITS_GREY8 .. autodata:: geosoft.gxapi.MAP_EXPORT_BITS_8 :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_BITS_8 .. autodata:: geosoft.gxapi.MAP_EXPORT_BITS_GREY4 :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_BITS_GREY4 .. autodata:: geosoft.gxapi.MAP_EXPORT_BITS_4 :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_BITS_4 .. autodata:: geosoft.gxapi.MAP_EXPORT_BITS_GREY1 :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_BITS_GREY1 .. autodata:: geosoft.gxapi.MAP_EXPORT_BITS_DEFAULT :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_BITS_DEFAULT .. _MAP_EXPORT_FORMAT: MAP_EXPORT_FORMAT constants ----------------------------------------------------------------------- Export Formats Format Description Type ======= ========================== ==== .. autodata:: geosoft.gxapi.MAP_EXPORT_FORMAT_PLT :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_FORMAT_PLT .. autodata:: geosoft.gxapi.MAP_EXPORT_FORMAT_SHP :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_FORMAT_SHP .. autodata:: geosoft.gxapi.MAP_EXPORT_FORMAT_DXF12 :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_FORMAT_DXF12 .. autodata:: geosoft.gxapi.MAP_EXPORT_FORMAT_DXF13 :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_FORMAT_DXF13 .. autodata:: geosoft.gxapi.MAP_EXPORT_FORMAT_GTIFF :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_FORMAT_GTIFF .. autodata:: geosoft.gxapi.MAP_EXPORT_FORMAT_CGTIFF :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_FORMAT_CGTIFF .. autodata:: geosoft.gxapi.MAP_EXPORT_FORMAT_MTIFF :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_FORMAT_MTIFF .. autodata:: geosoft.gxapi.MAP_EXPORT_FORMAT_ATIFF :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_FORMAT_ATIFF .. autodata:: geosoft.gxapi.MAP_EXPORT_FORMAT_GEO :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_FORMAT_GEO .. autodata:: geosoft.gxapi.MAP_EXPORT_FORMAT_ERM :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_FORMAT_ERM .. autodata:: geosoft.gxapi.MAP_EXPORT_FORMAT_KMZ :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_FORMAT_KMZ .. _MAP_EXPORT_METHOD: MAP_EXPORT_METHOD constants ----------------------------------------------------------------------- Dithering Methods .. autodata:: geosoft.gxapi.MAP_EXPORT_METHOD_STANDARD :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_METHOD_STANDARD .. autodata:: geosoft.gxapi.MAP_EXPORT_METHOD_DIFFUSE :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_METHOD_DIFFUSE .. autodata:: geosoft.gxapi.MAP_EXPORT_METHOD_NONE :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_METHOD_NONE .. _MAP_EXPORT_RASTER_FORMAT: MAP_EXPORT_RASTER_FORMAT constants ----------------------------------------------------------------------- Export Raster Formats . Format Description Type B/W B/W COL B/W COL COL ======= ========================== =========== === === === === === === .. autodata:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_EMF :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_EMF .. autodata:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_BMP :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_BMP .. autodata:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_JPEGL :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_JPEGL .. autodata:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_JPEG :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_JPEG .. autodata:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_JPEGH :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_JPEGH .. autodata:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_GIF :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_GIF .. autodata:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_PCX :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_PCX .. autodata:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_PNG :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_PNG .. autodata:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_EPS :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_EPS .. autodata:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_TIFF :annotation: .. autoattribute:: geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_TIFF .. _MAP_LIST_MODE: MAP_LIST_MODE constants ----------------------------------------------------------------------- Map List modes .. autodata:: geosoft.gxapi.MAP_LIST_MODE_ALL :annotation: .. autoattribute:: geosoft.gxapi.MAP_LIST_MODE_ALL .. autodata:: geosoft.gxapi.MAP_LIST_MODE_3D :annotation: .. autoattribute:: geosoft.gxapi.MAP_LIST_MODE_3D .. autodata:: geosoft.gxapi.MAP_LIST_MODE_NOT3D :annotation: .. autoattribute:: geosoft.gxapi.MAP_LIST_MODE_NOT3D .. _MAP_OPEN: MAP_OPEN constants ----------------------------------------------------------------------- Open Modes .. autodata:: geosoft.gxapi.MAP_WRITENEW :annotation: .. autoattribute:: geosoft.gxapi.MAP_WRITENEW .. autodata:: geosoft.gxapi.MAP_WRITEOLD :annotation: .. autoattribute:: geosoft.gxapi.MAP_WRITEOLD <file_sep>/geosoft/gxpy/tests/test_coordinate_system.py import unittest import numpy as np import os import geosoft.gxapi as gxapi import geosoft.gxpy.coordinate_system as gxcs import geosoft.gxpy.vv as gxvv import geosoft.gxpy.grid as gxgrd import geosoft.gxpy.spatialdata as gxspd import geosoft.gxpy.utility as gxu import geosoft.gxpy.gx as gx import geosoft.gxpy.system as gxsys import geosoft.gxpy.view as gxview from geosoft.gxpy.geometry import PPoint from base import GXPYTest class Test(GXPYTest): @classmethod def setUpClass(cls): cls.setUpGXPYTest() cls.folder, files = gxsys.unzip(os.path.join(os.path.dirname(cls._test_case_py), 'section_grids.zip'), folder=cls._gx.temp_folder()) cls.section = os.path.join(cls.folder, 'section.grd') def test_any(self): self.start() # name with gxcs.Coordinate_system( 'DHDN / Okarito 2000') as cs: gxfs = cs.gxf self.assertEqual(gxfs[0],'DHDN / Okarito 2000') self.assertEqual(gxfs[1],'DHDN,6377397.155,0.0816968312225275,0') self.assertEqual(gxfs[2],'"Transverse Mercator",-43.11,170.260833333333,1,400000,800000') self.assertEqual(gxfs[3],'m,1') self.assertEqual(gxfs[4],'"DHDN to WGS 84 (1)",582,105,414,1.04,0.35,-3.08,8.3') self.assertEqual(cs.cs_name(),'DHDN / Okarito 2000') self.assertEqual(cs.cs_name(what=gxcs.NAME),'DHDN / Okarito 2000') self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS),'DHDN / Okarito 2000') self.assertEqual(cs.cs_name(what=gxcs.NAME_VCS), '') self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS_VCS), 'DHDN / Okarito 2000') self.assertEqual(cs.cs_name(what=gxcs.NAME_DATUM),'DHDN') self.assertEqual(cs.cs_name(what=gxcs.NAME_PROJECTION),'Okarito 2000') self.assertEqual(cs.cs_name(what=gxcs.NAME_ORIENTATION),'0,0,0,0,0,0') self.assertEqual(cs.cs_name(what=gxcs.NAME_UNIT),'m') self.assertEqual(cs.cs_name(what=gxcs.NAME_UNIT_FULL),'metre') # GXF strings with gxcs.Coordinate_system(['','DHDN','Okarito 2000','','']) as cs: gxfs = cs.gxf self.assertEqual(gxfs[0],'DHDN / Okarito 2000') self.assertEqual(gxfs[1],'DHDN,6377397.155,0.0816968312225275,0') self.assertEqual(gxfs[2],'"Transverse Mercator",-43.11,170.260833333333,1,400000,800000') self.assertEqual(gxfs[3],'m,1') self.assertEqual(gxfs[4],'"DHDN to WGS 84 (1)",582,105,414,1.04,0.35,-3.08,8.3') cs.gxf = ['', 'NAD27', '"Transverse Mercator",0,-87,0.9996,500000,0', 'm,1', ''] self.assertEqual(cs, 'NAD27 / UTM zone 16N') # dictionary, json with gxcs.Coordinate_system('DHDN / Okarito 2000') as cs: dct = cs.coordinate_dict() with gxcs.Coordinate_system(dct) as cs: gxfs = cs.gxf self.assertEqual(gxfs[0],'DHDN / Okarito 2000') self.assertEqual(gxfs[1],'DHDN,6377397.155,0.0816968312225275,0') self.assertEqual(gxfs[2],'"Transverse Mercator",-43.11,170.260833333333,1,400000,800000') self.assertEqual(gxfs[3],'m,1') self.assertEqual(gxfs[4],'"DHDN to WGS 84 (1)",582,105,414,1.04,0.35,-3.08,8.3') csd = cs.coordinate_dict() self.assertEqual(csd['name'],'DHDN / Okarito 2000') self.assertEqual(gxfs[1],'DHDN,6377397.155,0.0816968312225275,0') self.assertEqual(gxfs[2],'"Transverse Mercator",-43.11,170.260833333333,1,400000,800000') self.assertEqual(gxfs[3],'m,1') self.assertEqual(gxfs[4],'"DHDN to WGS 84 (1)",582,105,414,1.04,0.35,-3.08,8.3') js = cs.json with gxcs.Coordinate_system(js) as cs: gxfs = cs.gxf self.assertEqual(gxfs[0],'DHDN / Okarito 2000') self.assertEqual(gxfs[1],'DHDN,6377397.155,0.0816968312225275,0') self.assertEqual(gxfs[2],'"Transverse Mercator",-43.11,170.260833333333,1,400000,800000') self.assertEqual(gxfs[3],'m,1') self.assertEqual(gxfs[4],'"DHDN to WGS 84 (1)",582,105,414,1.04,0.35,-3.08,8.3') cs = gxcs.Coordinate_system() cs.json = '{"units": "m,1", "orientation": "", "datum": "NAD27,6378206.4,0.0822718542230039,0", "local_datum": "\\"NAD27 to WGS 84 (4)\\",-8,160,176,0,0,0,0", "projection": "\\"Transverse Mercator\\",0,-105,0.9996,500000,0", "name": "NAD27 / UTM zone 13N", "type": "Geosoft", "vcs": ""}' self.assertEqual(cs, 'NAD27 / UTM zone 13N') # ESRI wkt esri_wkt = 'PROJCS["Okarito_2000",GEOGCS["GCS_Deutsches_Hauptdreiecksnetz",DATUM["D_Deutsches_Hauptdreiecksnetz",SPHEROID["Bessel_1841",6377397.155,299.152812800001]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199432955]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",400000],PARAMETER["False_Northing",800000],PARAMETER["Central_Meridian",170.260833333333],PARAMETER["Scale_Factor",1],PARAMETER["Latitude_Of_Origin",-43.11],UNIT["Meter",1]]' with gxcs.Coordinate_system(esri_wkt) as cs: gxfs = cs.gxf self.assertEqual(gxfs[0],'DHDN / Okarito 2000') self.assertEqual(gxfs[1],'DHDN,6377397.155,0.0816968312225274,0') self.assertEqual(gxfs[2],'"Transverse Mercator",-43.11,170.260833333333,1,400000,800000') self.assertEqual(gxfs[3],'m,1') self.assertEqual(gxfs[4],'"DHDN to WGS 84 (1)",582,105,414,1.04,0.35,-3.08,8.3') cs.esri_wkt = 'PROJCS["NAD_1927_UTM_Zone_16N",GEOGCS["GCS_North_American_1927",DATUM["D_North_American_1927",SPHEROID["Clarke_1866",6378206.4,294.9786982]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000.0],PARAMETER["False_Northing",0.0],PARAMETER["Central_Meridian",-87.0],PARAMETER["Scale_Factor",0.9996],PARAMETER["Latitude_Of_Origin",0.0],UNIT["Meter",1.0],AUTHORITY["EPSG",26716]]' self.assertEqual(cs, 'NAD27 / UTM zone 16N') # name with a separate vcs with gxcs.Coordinate_system('DHDN / Okarito 2000') as cs: cs.vcs = 'geoid' self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS_VCS), str(cs)) self.assertEqual(cs.cs_name(what=gxcs.NAME_VCS), 'geoid') self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS_VCS), 'DHDN / Okarito 2000 [geoid]') # name with embedded vcs with gxcs.Coordinate_system('DHDN / Okarito 2000 [geoid]') as cs: self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS_VCS), str(cs)) self.assertEqual(cs.cs_name(what=gxcs.NAME_VCS), 'geoid') self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS_VCS), 'DHDN / Okarito 2000 [geoid]') ipj = gxapi.GXIPJ.create() ipj.set_gxf('', 'DHDN', 'Okarito 2000', '', '') with gxcs.Coordinate_system(ipj) as cs: gxfs = cs.gxf self.assertEqual(gxfs[0],'DHDN / Okarito 2000') self.assertEqual(gxfs[1],'DHDN,6377397.155,0.0816968312225275,0') self.assertEqual(gxfs[2],'"Transverse Mercator",-43.11,170.260833333333,1,400000,800000') self.assertEqual(gxfs[3],'m,1') self.assertEqual(gxfs[4],'"DHDN to WGS 84 (1)",582,105,414,1.04,0.35,-3.08,8.3') self.assertEqual(cs.cs_name(),'DHDN / Okarito 2000') self.assertEqual(cs.cs_name(what=gxcs.NAME),'DHDN / Okarito 2000') self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS),'DHDN / Okarito 2000') self.assertEqual(cs.cs_name(what=gxcs.NAME_VCS), '') self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS_VCS), 'DHDN / Okarito 2000') self.assertEqual(cs.cs_name(what=gxcs.NAME_DATUM),'DHDN') self.assertEqual(cs.cs_name(what=gxcs.NAME_PROJECTION),'Okarito 2000') self.assertEqual(cs.cs_name(what=gxcs.NAME_ORIENTATION),'0,0,0,0,0,0') self.assertEqual(cs.cs_name(what=gxcs.NAME_UNIT),'m') self.assertEqual(cs.cs_name(what=gxcs.NAME_UNIT_FULL),'metre') with gxcs.Coordinate_system(gxcs.Coordinate_system(ipj)) as cs: self.assertEqual(cs.vcs, '') gxfs = cs.gxf self.assertEqual(gxfs[0],'DHDN / Okarito 2000') self.assertEqual(gxfs[1],'DHDN,6377397.155,0.0816968312225275,0') self.assertEqual(gxfs[2],'"Transverse Mercator",-43.11,170.260833333333,1,400000,800000') self.assertEqual(gxfs[3],'m,1') self.assertEqual(gxfs[4],'"DHDN to WGS 84 (1)",582,105,414,1.04,0.35,-3.08,8.3') self.assertEqual(cs.cs_name(),'DHDN / Okarito 2000') self.assertEqual(cs.cs_name(what=gxcs.NAME),'DHDN / Okarito 2000') self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS),'DHDN / Okarito 2000') self.assertEqual(cs.cs_name(what=gxcs.NAME_VCS), '') self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS_VCS), 'DHDN / Okarito 2000') self.assertEqual(cs.cs_name(what=gxcs.NAME_DATUM),'DHDN') self.assertEqual(cs.cs_name(what=gxcs.NAME_PROJECTION),'Okarito 2000') self.assertEqual(cs.cs_name(what=gxcs.NAME_ORIENTATION),'0,0,0,0,0,0') self.assertEqual(cs.cs_name(what=gxcs.NAME_UNIT),'m') self.assertEqual(cs.cs_name(what=gxcs.NAME_UNIT_FULL),'metre') def test_name_cs(self): self.start() hcs, orient, vcs = gxcs.hcs_orient_vcs_from_name("DHDN / Okarito 2000 [geoid]") self.assertEqual(hcs, "DHDN / Okarito 2000") self.assertEqual(orient, "") self.assertEqual(vcs, "geoid") self.assertEqual(gxcs.name_from_hcs_orient_vcs(hcs, orient, vcs), "DHDN / Okarito 2000 [geoid]") hcs, orient, vcs = gxcs.hcs_orient_vcs_from_name("DHDN / Okarito 2000 <0,0,0,0,0,0> [geoid] ") self.assertEqual(hcs, "DHDN / Okarito 2000") self.assertEqual(orient, "0,0,0,0,0,0") self.assertEqual(vcs, "geoid") self.assertEqual(gxcs.name_from_hcs_orient_vcs(hcs, orient, vcs), "DHDN / Okarito 2000 <0,0,0,0,0,0> [geoid]") hcs, orient, vcs = gxcs.hcs_orient_vcs_from_name("DHDN / Okarito 2000 <0,0,0,0,0,0>") self.assertEqual(hcs, "DHDN / Okarito 2000") self.assertEqual(orient, "0,0,0,0,0,0") self.assertEqual(vcs, "") self.assertEqual(gxcs.name_from_hcs_orient_vcs(hcs, orient, vcs), "DHDN / Okarito 2000 <0,0,0,0,0,0>") hcs, orient, vcs = gxcs.hcs_orient_vcs_from_name("DHDN / Okarito 2000") self.assertEqual(hcs, "DHDN / Okarito 2000") self.assertEqual(orient, "") self.assertEqual(vcs, "") self.assertEqual(gxcs.name_from_hcs_orient_vcs(hcs, orient, vcs), "DHDN / Okarito 2000") with gxcs.Coordinate_system( 'DHDN / Okarito 2000 [geodetic]') as cs: gxfs = cs.gxf self.assertEqual(gxfs[0],'DHDN / Okarito 2000 [geodetic]') self.assertEqual(gxfs[1],'DHDN,6377397.155,0.0816968312225275,0') self.assertEqual(gxfs[2],'"Transverse Mercator",-43.11,170.260833333333,1,400000,800000') self.assertEqual(gxfs[3],'m,1') self.assertEqual(gxfs[4],'"DHDN to WGS 84 (1)",582,105,414,1.04,0.35,-3.08,8.3') self.assertEqual(cs.cs_name(),'DHDN / Okarito 2000 [geodetic]') self.assertEqual(cs.cs_name(what=gxcs.NAME),'DHDN / Okarito 2000 [geodetic]') self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS),'DHDN / Okarito 2000') self.assertEqual(cs.cs_name(what=gxcs.NAME_VCS), 'geodetic') self.assertEqual(cs.cs_name(what=gxcs.NAME_DATUM),'DHDN') self.assertEqual(cs.cs_name(what=gxcs.NAME_PROJECTION),'Okarito 2000') self.assertEqual(cs.cs_name(what=gxcs.NAME_ORIENTATION),'0,0,0,0,0,0') self.assertEqual(cs.cs_name(what=gxcs.NAME_UNIT),'m') self.assertEqual(cs.cs_name(what=gxcs.NAME_UNIT_FULL),'metre') with gxcs.Coordinate_system('DHDN [geoid]') as cs: self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS_VCS), cs.name) self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS), cs.hcs) self.assertEqual(cs.cs_name(what=gxcs.NAME_VCS), cs.vcs) self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS), 'DHDN') self.assertEqual(cs.cs_name(what=gxcs.NAME_VCS), 'geoid') self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS_VCS), 'DHDN [geoid]') with gxcs.Coordinate_system('DHDN [geodetic]') as cs: self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS_VCS), cs.name) self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS), cs.hcs) self.assertEqual(cs.cs_name(what=gxcs.NAME_VCS), cs.vcs) self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS), 'DHDN') self.assertEqual(cs.cs_name(what=gxcs.NAME_VCS), 'geodetic') self.assertEqual(cs.cs_name(what=gxcs.NAME_HCS_VCS), 'DHDN [geodetic]') def test_vcs(self): self.start() self.assertEqual(gxcs.Coordinate_system("nad83 / UTM zone 15N [NAVD92]").name, 'NAD83 / UTM zone 15N [NAVD92]') self.assertEqual(gxcs.Coordinate_system("nad83 [NAVD92]").name, 'NAD83 [NAVD92]') self.assertFalse(gxcs.Coordinate_system("nad83 [NAVD92]").same_as(gxcs.Coordinate_system("NAD83 [geodetic]"))) self.assertFalse(gxcs.Coordinate_system("nad83 [geoid]").same_vcs(gxcs.Coordinate_system("NAD27 [NAVD92]"))) def test_pj(self): self.start() with gxcs.Coordinate_system('DHDN / Okarito 2000') as cs: with gxcs.Coordinate_system('DHDN') as csll: with gxcs.Coordinate_translate(cs, csll) as pj: lon, lat = pj.convert((500000, 6500000)) self.assertAlmostEqual(lon, 171) self.assertAlmostEqual(lat, 8) lon, lat = pj.convert((500000., 6500000.)) self.assertAlmostEqual(lon, 171.168823147) self.assertAlmostEqual(lat, 8.36948254242) ll = pj.convert(np.array([500000., 6500000.], dtype=np.float32)) self.assertAlmostEqual(ll[0], 171.16882, 5) self.assertAlmostEqual(ll[1], 8.36948, 5) lon, lat, z = pj.convert((500000., 6500000., 50.)) self.assertAlmostEqual(lon, 171.168823147) self.assertAlmostEqual(lat, 8.36948254242) self.assertAlmostEqual(z, 50) ll = pj.convert([[500000., 6500000.], [505000., 6510000.]]) self.assertAlmostEqual(ll[0][0], 171.168823147) self.assertAlmostEqual(ll[0][1], 8.36948254242) self.assertAlmostEqual(ll[1][0], 171.214439577) self.assertAlmostEqual(ll[1][1], 8.45978927383) ll = pj.convert(np.array([[500000., 6500000.], [505000., 6510000.]])) self.assertTrue(type(ll) is np.ndarray) self.assertAlmostEqual(ll[0][0], 171.168823147) self.assertAlmostEqual(ll[0][1], 8.36948254242) self.assertAlmostEqual(ll[1][0], 171.214439577) self.assertAlmostEqual(ll[1][1], 8.45978927383) vvx = gxvv.GXvv([500000., 505000.]) vvy = gxvv.GXvv([6500000., 6510000.]) pj.convert_vv(vvx, vvy) self.assertAlmostEqual(vvx[0][0], 171.168823147) self.assertAlmostEqual(vvy[0][0], 8.36948254242) self.assertAlmostEqual(vvx[1][0], 171.214439577) self.assertAlmostEqual(vvy[1][0], 8.45978927383) def test_local_dict(self): self.start() self.assertRaises(gxcs.CSException, gxcs.Coordinate_system, {'type': 'local'}) csdict = {'type': 'local', 'lon_lat': (-96,43)} csd = gxcs.Coordinate_system(csdict) self.assertEqual(csd.name, 'WGS 84 / *Local(43,-96,0,0)') with gxcs.Coordinate_translate(csd, gxcs.Coordinate_system('WGS 84')) as pj: lon, lat, z = pj.convert((0, 0, 0)) self.assertAlmostEqual(lat, 43) self.assertAlmostEqual(lon, -96) self.assertAlmostEqual(z, 0) csdict['azimuth'] = 25 csd = gxcs.Coordinate_system(csdict) self.assertEqual(csd.name, 'WGS 84 / *Local(43,-96,0,0) <0,0,0,0,0,25>') self.assertEqual(csd.gxf[2], '"Oblique Stereographic",43,-96,0.9996,0,0') with gxcs.Coordinate_translate(gxcs.Coordinate_system('WGS 84'), csd) as pj: x, y, z = pj.convert((-96., 43., 0)) self.assertAlmostEqual(x, 0) self.assertAlmostEqual(y, 0) self.assertAlmostEqual(z, 0) x, y, z = pj.convert((-95., 43., 0.)) self.assertAlmostEqual(x, 73665.899715) self.assertAlmostEqual(y, 34886.2319719) self.assertAlmostEqual(z, 0) csdict['origin'] = (1800, 500) csd = gxcs.Coordinate_system(csdict) self.assertEqual(csd.name, 'WGS 84 / *Local(43,-96,1800,500) <0,0,0,0,0,25>') self.assertEqual(csd.gxf[2], '"Oblique Stereographic",43,-96,0.9996,1842.66314753632,-307.558977614934') csdict['elevation'] = 800.5 csdict['vcs'] = 'geoid' csd = gxcs.Coordinate_system(csdict) self.assertEqual(csd.name, 'WGS 84 / *Local(43,-96,1800,500) <0,0,800.5,0,0,25>') self.assertEqual(csd.gxf[2], '"Oblique Stereographic",43,-96,0.9996,1842.66314753632,-307.558977614934') with gxcs.Coordinate_translate(gxcs.Coordinate_system('WGS 84'), csd) as pj: x, y = pj.convert((-96., 43.)) self.assertAlmostEqual(x, 1800) self.assertAlmostEqual(y, 500) with gxcs.Coordinate_translate(csd, gxcs.Coordinate_system('WGS 84')) as pj: lon, lat, z = pj.convert((1800., 500., 0.)) self.assertAlmostEqual(lat, 43) self.assertAlmostEqual(lon, -96) self.assertAlmostEqual(z, 800.5) def test_local(self): self.start() csd = gxcs.Coordinate_system.local(lon_lat=(-96, 43)) self.assertEqual(csd.name, 'WGS 84 / *Local(43,-96,0,0)') with gxcs.Coordinate_translate(csd, gxcs.Coordinate_system('WGS 84')) as pj: lon, lat, z = pj.convert((0, 0, 0)) self.assertAlmostEqual(lat, 43) self.assertAlmostEqual(lon, -96) self.assertAlmostEqual(z, 0) csd = gxcs.Coordinate_system.local(lon_lat=(-96, 43), azimuth=25) self.assertEqual(csd.name, 'WGS 84 / *Local(43,-96,0,0) <0,0,0,0,0,25>') self.assertEqual(csd.gxf[2], '"Oblique Stereographic",43,-96,0.9996,0,0') with gxcs.Coordinate_translate(gxcs.Coordinate_system('WGS 84'), csd) as pj: x, y, z = pj.convert((-96., 43., 0)) self.assertAlmostEqual(x, 0) self.assertAlmostEqual(y, 0) self.assertAlmostEqual(z, 0) x, y, z = pj.convert((-95., 43., 0.)) self.assertAlmostEqual(x, 73665.899715) self.assertAlmostEqual(y, 34886.2319719) self.assertAlmostEqual(z, 0) csd = gxcs.Coordinate_system.local(lon_lat=(-96, 43), azimuth=25, origin=(1800, 500)) self.assertEqual(csd.name, 'WGS 84 / *Local(43,-96,1800,500) <0,0,0,0,0,25>') self.assertEqual(csd.gxf[2], '"Oblique Stereographic",43,-96,0.9996,1842.66314753632,-307.558977614934') csd = gxcs.Coordinate_system.local(lon_lat=(-96, 43), azimuth=25, origin=(1800, 500), elevation=800.5, vcs='geoid') self.assertEqual(csd.name, 'WGS 84 / *Local(43,-96,1800,500) <0,0,800.5,0,0,25>') self.assertEqual(csd.gxf[2], '"Oblique Stereographic",43,-96,0.9996,1842.66314753632,-307.558977614934') with gxcs.Coordinate_translate(gxcs.Coordinate_system('WGS 84'), csd) as pj: x, y = pj.convert((-96., 43.)) self.assertAlmostEqual(x, 1800) self.assertAlmostEqual(y, 500) with gxcs.Coordinate_translate(csd, gxcs.Coordinate_system('WGS 84')) as pj: lon, lat, z = pj.convert((1800., 500., 0.)) self.assertAlmostEqual(lat, 43) self.assertAlmostEqual(lon, -96) self.assertAlmostEqual(z, 800.5) datum = 'NAD27' local_datum = '[NAD27] (10m) USA - CONUS - onshore' csd = gxcs.Coordinate_system.local(lon_lat=(-96, 43), azimuth=25, origin=(1800, 500), datum='NAD83', local_datum=local_datum, elevation=800.5, vcs='geoid') self.assertEqual(csd.name, 'NAD83 / *Local(43,-96,1800,500) <0,0,800.5,0,0,25>') self.assertEqual(csd.gxf[4], '"NAD83 to WGS 84 (1)",0,0,0,0,0,0,0') def test_oriented(self): self.start() with gxcs.Coordinate_system({'type': 'local', 'lon_lat': (-96., 43.), 'azimuth':25}) as cs: self.assertTrue(cs.is_oriented) xyzo = (10., 0., 0.) xyz = cs.xyz_from_oriented(xyzo) self.assertEqual(xyz, (9.063077870366499, -4.2261826174069945, 0.0)) xyz = (9.063077870366499, -4.2261826174069945, 0.0) xyz = cs.oriented_from_xyz(xyz) self.assertAlmostEqual(xyz[0], xyzo[0]) self.assertAlmostEqual(xyz[1], xyzo[1]) self.assertAlmostEqual(xyz[2], xyzo[2]) xyzo = ((10., 0., 0.), (0., 10.,5.)) xyz = cs.xyz_from_oriented(xyzo) self.assertEqual(tuple(xyz[0]), (9.063077870366499, -4.2261826174069945, 0.0)) self.assertEqual(tuple(xyz[1]), (4.2261826174069945, 9.0630778703664987, 5.0)) xyz = ((9.063077870366499, -4.2261826174069945, 0.0), (4.2261826174069945, 9.0630778703664987, 5.0)) xyz = cs.oriented_from_xyz(xyz) self.assertAlmostEqual(xyz[0][0], xyzo[0][0]) self.assertAlmostEqual(xyz[0][1], xyzo[0][1]) self.assertAlmostEqual(xyz[0][2], xyzo[0][2]) xyzo = ((10., 0.), (0., 10.), (0., 5.)) xyz = cs.xyz_from_oriented(xyzo, column_ordered=True) self.assertEqual(tuple(xyz[0]), (9.063077870366499, 4.2261826174069945)) self.assertEqual(tuple(xyz[1]), (-4.2261826174069945, 9.063077870366499)) self.assertEqual(tuple(xyz[2]), (0.0, 5.0)) self.assertTrue(cs == gxcs.Coordinate_system({'type': 'local', 'lon_lat': (-96., 43.), 'azimuth': 25})) self.assertFalse(cs == gxcs.Coordinate_system({'type': 'local', 'lon_lat': (-96., 43.), 'azimuth': 20})) def test_parameters(self): self.start() self.assertTrue(gxcs.parameter_exists(gxcs.PARM_DATUM, 'NAD83')) self.assertTrue(gxcs.parameter_exists(gxcs.PARM_PROJECTION, 'UTM zone 15N')) self.assertTrue(gxcs.parameter_exists(gxcs.PARM_UNITS, 'ftUS')) self.assertTrue(gxcs.parameter_exists(gxcs.PARM_LOCAL_DATUM, gxcs.name_list(gxcs.LIST_LOCALDATUMNAME)[5])) self.assertFalse(gxcs.parameter_exists(gxcs.PARM_UNITS, 'hoofs')) params = gxcs.parameters(gxcs.PARM_DATUM, 'WGS 84') self.assertTrue('ELLIPSOID' in params) params = gxcs.parameters(gxcs.PARM_PROJECTION, 'UTM zone 15N') self.assertEqual(float(params['P5']), 0.9996) def test_unit_only(self): self.start() with gxcs.Coordinate_system('cm') as cs: self.assertEqual(str(cs), '*unknown') self.assertEqual(cs.gxf[3], 'cm,0.01') self.assertEqual(cs.unit_of_measure, 'cm') self.assertEqual(cs.units_name, cs.unit_of_measure) def test_array(self): self.start() # define coordinate systems and a transformer cs_utm = gxcs.Coordinate_system('NAD83 / UTM zone 15N') cs_nad27 = gxcs.Coordinate_system('NAD27') cs_transform = gxcs.Coordinate_translate(cs_utm, cs_nad27) # example transform a single (x, y) coordinate lon_lat = cs_transform.convert((345000., 64250000.)) self.assertEqual(tuple(lon_lat), (88.777242210445195, -38.498998514257273)) # example transform a single (x, y, elevation) coordinate ct = cs_transform.convert((345000., 64250000., 50)) self.assertAlmostEqual(ct[0], 88.77724221146941) self.assertAlmostEqual(ct[1], -38.49899848105302) self.assertAlmostEqual(ct[2], 50.0) # example translate a list of (x, y, z) tuples locations = [(345000, 64250000, 50), (345500, 64250000, 60), (346000, 64250000, 70)] nad27_locations = cs_transform.convert(locations) self.assertEqual(len(nad27_locations), 3) ct = nad27_locations[2] self.assertAlmostEqual(ct[0], 89) self.assertAlmostEqual(ct[1], -38) self.assertAlmostEqual(ct[2], 70) # example transform a numpy array in-place data = np.array([[345000, 64250000, 50, 55000], [345500, 64250000, 60, 55150], [346000, 64250000, 70, 56000]], dtype=float) nad27_locations = cs_transform.convert(data, in_place=True) self.assertEqual(len(nad27_locations), 3) ct = nad27_locations[2] self.assertAlmostEqual(ct[0], 8.87657800e+01) self.assertAlmostEqual(ct[1], -3.84991719e+01) self.assertAlmostEqual(ct[2], 7.00000000e+01) self.assertAlmostEqual(ct[3], 5.60000000e+04) def test_known(self): self.start() self.assertFalse(gxcs.is_known(None)) self.assertTrue(gxcs.is_known('WGS 84')) self.assertTrue(gxcs.is_known(gxcs.Coordinate_system('WGS 84'))) self.assertFalse(gxcs.is_known('crazy')) def test_from_xml(self): self.start() temp_grid = gx.gx().temp_file('grd') cs = gxcs.Coordinate_system('NAD27 / UTM zone 18N') gxgrd.Grid.new(temp_grid, properties={'dtype': np.int16, 'nx': 100, 'ny': 50, 'x0': 4, 'y0': 8, 'dx': 0.1, 'dy': 0.2, 'rot': 5, 'coordinate_system': cs}).close() cs = gxspd.coordinate_system_from_metadata_file(temp_grid) self.assertEqual(str(cs), 'NAD27 / UTM zone 18N') gxf = cs.gxf.copy() gxf[0] = 'crazy' gxf[1] = 'weird,6378206.4,0.0822718542230039,0' cs = gxcs.Coordinate_system(gxf) with gxgrd.Grid.new(temp_grid, overwrite=True, properties={'dtype': np.int16, 'nx': 100, 'ny': 50, 'x0': 4, 'y0': 8, 'dx': 0.1, 'dy': 0.2, 'rot': 5, 'coordinate_system': cs}) as grd: pass cs = gxspd.coordinate_system_from_metadata_file(temp_grid) self.assertEqual(str(cs), 'weird / UTM zone 18N') with gxcs.Coordinate_system('WGS 84') as cs: xml = cs.xml xml_dict = gxu.dict_from_xml(xml) with gxcs.Coordinate_system(xml_dict) as wgs: self.assertEqual(str(wgs), 'WGS 84') with gxcs.Coordinate_system() as cs: cs.xml = xml self.assertEqual(str(cs), 'WGS 84') with gxcs.Coordinate_system(xml) as cs: self.assertEqual(str(cs), 'WGS 84') with gxcs.Coordinate_system.local(lon_lat=(-96, 43), azimuth=25) as csd: self.assertEqual(csd.name, 'WGS 84 / *Local(43,-96,0,0) <0,0,0,0,0,25>') self.assertEqual(csd.gxf[2], '"Oblique Stereographic",43,-96,0.9996,0,0') xml = csd.xml with gxcs.Coordinate_system() as csxml: csxml.xml = xml self.assertEqual(csxml.name, 'WGS 84 / *Local(43,-96,0,0) <0,0,0,0,0,25>') self.assertEqual(csxml.gxf[2], '"Oblique Stereographic",43,-96,0.9996,0,0') with gxcs.Coordinate_system(xml) as csxml: self.assertEqual(csxml.name, 'WGS 84 / *Local(43,-96,0,0) <0,0,0,0,0,25>') self.assertEqual(csxml.gxf[2], '"Oblique Stereographic",43,-96,0.9996,0,0') def test_section_ipj(self): self.start() cs = gxgrd.Grid.open(self.section).coordinate_system self.assertFalse(cs.is_known) self.assertTrue(cs.is_oriented) new_cs = gxcs.Coordinate_system(cs) self.assertTrue(new_cs.is_oriented) self.assertFalse(new_cs.is_known) ############################################################################################### if __name__ == '__main__': unittest.main() <file_sep>/docs/GXEXP.rst .. _GXEXP: GXEXP class ================================== .. autoclass:: geosoft.gxapi.GXEXP :members: <file_sep>/geosoft/gxapi/GXLAYOUT.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXLAYOUT(gxapi_cy.WrapLAYOUT): """ GXLAYOUT class. Layout class for generic relative layout calculation The relative layout algorithm allows a logical organization of layout rectangles. You can set constraints with English-like semantics. For example: "Set the left side of rectangle 1 equal to the right side of rectangle 2 plus 10 pixels." "Set the bottom of rectangle 1 to 25 percent of the height of rectangle 2." "Move node 1 such that its bottom is equal to the top of rectangle 2 minus 10 pixels." The last constraint set would enjoy priority over any others as it would be the last one that would influence the rectangle calculations. See the notes for iSetConstraint for more details. """ def __init__(self, handle=0): super(GXLAYOUT, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXLAYOUT <geosoft.gxapi.GXLAYOUT>` :returns: A null `GXLAYOUT <geosoft.gxapi.GXLAYOUT>` :rtype: GXLAYOUT """ return GXLAYOUT() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def calculate_rects(self, min_x, min_y, max_x, max_y): """ Calculate new positions based on initial conditions and constraints :param min_x: Parent Rectangle Min X after calculation :param min_y: Parent Rectangle Min Y after calculation :param max_x: Parent Rectangle Max X after calculation :param max_y: Parent Rectangle Max Y after calculation :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Use iGetRectangle to obtain the results for the other rectangles. Depending on the constraints set the parent rectangle may also change after the calculation (returned here for convenience). """ min_x.value, min_y.value, max_x.value, max_y.value = self._calculate_rects(min_x.value, min_y.value, max_x.value, max_y.value) def clear_all(self): """ Remove all children and constraints from layout .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._clear_all() def clear_constraints(self): """ Remove all constraints from layout .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._clear_constraints() @classmethod def create(cls, num, name): """ Creates a layout calculation object :param num: Initial number of objects (may be 0) :param name: Optional name of parent layout (may be empty) :type num: int :type name: str :returns: `GXLAYOUT <geosoft.gxapi.GXLAYOUT>` object. :rtype: GXLAYOUT .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapLAYOUT._create(GXContext._get_tls_geo(), num, name.encode()) return GXLAYOUT(ret_val) def get_rectangle(self, rect, min_x, min_y, max_x, max_y): """ Gets the current bounds for a rectangle or the parent layout :param rect: Rectangle to get info for (-1 for parent) :param min_x: Rectangle Min X :param min_y: Rectangle Min Y :param max_x: Rectangle Max X :param max_y: Rectangle Max Y :type rect: int :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ min_x.value, min_y.value, max_x.value, max_y.value = self._get_rectangle(rect, min_x.value, min_y.value, max_x.value, max_y.value) def get_rect_name(self, rect, name): """ Gets an optional name the current info for a rectangle or the parent layout :param rect: Rectangle to get info for (-1 for parent) :param name: Buffer for name of the rectangle :type rect: int :type name: str_ref .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ name.value = self._get_rect_name(rect, name.value.encode()) def add_constraint(self, rect_from, constr_from, rect_to, constr_to, o_mod, m_mod): """ Add a constraint between any two rectangles or to one with absolute positioning :param rect_from: From rectangle (Or -1 for parent) :param constr_from: :ref:`LAYOUT_CONSTR` From constraint flag :param rect_to: To rectangle (Or -1 for parent Or -2 for absolute positioning) :param constr_to: :ref:`LAYOUT_CONSTR` To constraint flag :param o_mod: Offset modifier :param m_mod: Multiplicative modifier :type rect_from: int :type constr_from: int :type rect_to: int :type constr_to: int :type o_mod: float :type m_mod: float :returns: 0 - OK 1 - Error :rtype: int .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Constraints can be applied between 2 rectangles in the layout, or to 1 rectangle with absolute positioning. Use the constraints to control left, right, bottom, top, width, height, or centering configurations. Examples: (ordered as rectangle from, constraint from, rectangle to, constraint to, offset modifier, multiplicative modifier) A, `LAYOUT_CONSTR_LEFT <geosoft.gxapi.LAYOUT_CONSTR_LEFT>`, B, `LAYOUT_CONSTR_LEFT <geosoft.gxapi.LAYOUT_CONSTR_LEFT>`, 0, 0, 1.0 Set left position of A equal to left pos of B A, `LAYOUT_CONSTR_LEFT <geosoft.gxapi.LAYOUT_CONSTR_LEFT>`, B, `LAYOUT_CONSTR_RIGHT <geosoft.gxapi.LAYOUT_CONSTR_RIGHT>`, 0, 0, 1.0 Set left pos of A equal to right of B The offset modifier is used for additive manipulation of constraints A, `LAYOUT_CONSTR_LEFT <geosoft.gxapi.LAYOUT_CONSTR_LEFT>`, B, `LAYOUT_CONSTR_LEFT <geosoft.gxapi.LAYOUT_CONSTR_LEFT>`, 10, 0, 1.0 Set left pos of A equal to left of B, plus 10 A, `LAYOUT_CONSTR_BOTTOM <geosoft.gxapi.LAYOUT_CONSTR_BOTTOM>`, B, `LAYOUT_CONSTR_TOP <geosoft.gxapi.LAYOUT_CONSTR_TOP>`, -20, 0, 1.0 Set bottom of A equal to top of B, minus 20 Multiplicative manipulation of constraints A, `LAYOUT_CONSTR_WIDTH <geosoft.gxapi.LAYOUT_CONSTR_WIDTH>`, B, `LAYOUT_CONSTR_WIDTH <geosoft.gxapi.LAYOUT_CONSTR_WIDTH>`, 0, 0.5 Set the width of A equal to 0.5 times the width of B A, `LAYOUT_CONSTR_HEIGHT <geosoft.gxapi.LAYOUT_CONSTR_HEIGHT>`, B, `LAYOUT_CONSTR_WIDTH <geosoft.gxapi.LAYOUT_CONSTR_WIDTH>`, 0, 1.2 Set the height of A equal to 1.2 times the width of B You can use BOTH the multiplicative and offset modifiers in conjunction (multiplicative gets precedence) A, `LAYOUT_CONSTR_WIDTH <geosoft.gxapi.LAYOUT_CONSTR_WIDTH>`, B, `LAYOUT_CONSTR_WIDTH <geosoft.gxapi.LAYOUT_CONSTR_WIDTH>`, 10, 0.5 A(width) = (0.5 * B(width)) + 10 A, `LAYOUT_CONSTR_LEFT <geosoft.gxapi.LAYOUT_CONSTR_LEFT>`, B, `LAYOUT_CONSTR_WIDTH <geosoft.gxapi.LAYOUT_CONSTR_WIDTH>`, -20, 0.1 A(left) = (0.1 * B(width)) + (-20) If second node is -2, use absolute positioning A,`LAYOUT_CONSTR_LEFT <geosoft.gxapi.LAYOUT_CONSTR_LEFT>`,-2,<ignored>,25,<ignored>,<ignored> Position left of A at position 25 A,`LAYOUT_CONSTR_WIDTH <geosoft.gxapi.LAYOUT_CONSTR_WIDTH>`,-2,<ignored>,30,<ignored>,<ignored> Set width of A to 30 Use the MOVE constraints to move an entire window without resizing A, `LAYOUT_CONSTR_MOVEL <geosoft.gxapi.LAYOUT_CONSTR_MOVEL>`, B, `LAYOUT_CONSTR_LEFT <geosoft.gxapi.LAYOUT_CONSTR_LEFT>`, 0, 0, 1.0 Move node A, align left with left side of B A, `LAYOUT_CONSTR_MOVEL <geosoft.gxapi.LAYOUT_CONSTR_MOVEL>`, B, `LAYOUT_CONSTR_RIGHT <geosoft.gxapi.LAYOUT_CONSTR_RIGHT>`, 0, 0, 1.0 Move node A, align left with right side of B A, `LAYOUT_CONSTR_MOVET <geosoft.gxapi.LAYOUT_CONSTR_MOVET>`, B, `LAYOUT_CONSTR_WIDTH <geosoft.gxapi.LAYOUT_CONSTR_WIDTH>`, 0, 0, 1.0 Move node A, align bottom to position equal to width of B A, `LAYOUT_CONSTR_MOVER <geosoft.gxapi.LAYOUT_CONSTR_MOVER>`, B, `LAYOUT_CONSTR_RIGHT <geosoft.gxapi.LAYOUT_CONSTR_RIGHT>`, 10, 1.1 Move node A, align right to 1.1*right of B, plus 10 A, `LAYOUT_CONSTR_MOVEL <geosoft.gxapi.LAYOUT_CONSTR_MOVEL>`, NULL, 10, 0, 1.0 Move node A, align left at position 10 """ ret_val = self._add_constraint(rect_from, constr_from, rect_to, constr_to, o_mod, m_mod) return ret_val def add_rectangle(self, min_x, min_y, max_x, max_y): """ Adds a rectangle as one of the layout's children (Higer. :param min_x: Rectangle Min X (All 0's for undefined allowed) :param min_y: Rectangle Min Y :param max_x: Rectangle Max X :param max_y: Rectangle Max Y :type min_x: float :type min_y: float :type max_x: float :type max_y: float :returns: Rectangle number, -1 on error :rtype: int .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._add_rectangle(min_x, min_y, max_x, max_y) return ret_val def num_rectangles(self): """ Returns the number of children in the list. :returns: Number of rectangles not counting the parent :rtype: int .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._num_rectangles() return ret_val def set_rectangle(self, rect, min_x, min_y, max_x, max_y): """ Sets the current bounds for a rectangle previously added to the layout :param rect: Rectangle to set info for (-1 for parent) :param min_x: Rectangle Min X :param min_y: Rectangle Min Y :param max_x: Rectangle Max X :param max_y: Rectangle Max Y :type rect: int :type min_x: float :type min_y: float :type max_x: float :type max_y: float .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_rectangle(rect, min_x, min_y, max_x, max_y) def set_rectangle_name(self, rect, p3): """ Sets an optional name the current info for a rectangle or the parent layout :param rect: Rectangle to set info for (-1 for parent) :param p3: Name :type rect: int :type p3: str .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_rectangle_name(rect, p3.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/tutorial/3D Views/tmi_3d_stack.py import geosoft.gxpy.gx as gx import geosoft.gxpy.view as gxview import geosoft.gxpy.group as gxgroup import geosoft.gxpy.agg as gxagg import geosoft.gxpy.grid as gxgrd import geosoft.gxpy.viewer as gxviewer gxc = gx.GXpy() tmi_file = 'Wittichica Creek Residual Total Field.grd' dem_file = 'Wittichica DEM.grd' # create a 3D view with gxview.View_3d.new("TMI drapped on DEM", area_2d=gxgrd.Grid.open(tmi_file).extent_2d(), coordinate_system=gxgrd.Grid.open(tmi_file).coordinate_system, scale=5000, overwrite=True) as v: v3d_name = v.file_name # use the DEM as the relief surface v.set_plane_relief_surface(dem_file) gxgroup.Aggregate_group.new(v, gxagg.Aggregate_image.new(dem_file, color_map='elevation.tbl')) # relief plane for the TMI, offset to elevation 2000 v.new_drawing_plane('TMI relief') v.set_plane_relief_surface(tmi_file, base=-4000) gxgroup.Aggregate_group.new(v, gxagg.Aggregate_image.new(tmi_file)) gxgroup.contour(v, 'TMI_contour', tmi_file) # add DEM contours on a plane floating beneath the DEM v.new_drawing_plane('Scratch plane', offset=(0, 0, -2000)) gxgroup.contour(v, 'DEM contour', tmi_file) # display the map in a Geosoft viewer gxviewer.view_document(v3d_name, wait_for_close=False) <file_sep>/geosoft/gxapi/GXMAP.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXLPT import GXLPT from .GXMETA import GXMETA from .GXREG import GXREG ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXMAP(gxapi_cy.WrapMAP): """ GXMAP class. MAPs are containers for `GXMVIEW <geosoft.gxapi.GXMVIEW>` objects. A view is a 3-D translation and a clip window on a map. Graphic entities can be drawn in an `GXMVIEW <geosoft.gxapi.GXMVIEW>`. It is recommended that the `GXMAP <geosoft.gxapi.GXMAP>` class be instantiated by first creating an `GXEMAP <geosoft.gxapi.GXEMAP>` object and calling the `GXEMAP.lock <geosoft.gxapi.GXEMAP.lock>` function. (See the explanation on the distinction between the `GXMAP <geosoft.gxapi.GXMAP>` and `GXEMAP <geosoft.gxapi.GXEMAP>` classes). """ def __init__(self, handle=0): super(GXMAP, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXMAP <geosoft.gxapi.GXMAP>` :returns: A null `GXMAP <geosoft.gxapi.GXMAP>` :rtype: GXMAP """ return GXMAP() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Export def export_all_in_view(self, name, view, pix_size, dpi, bits, dither, format, options): """ Export the entire map in view units to an external format. View and Group names are removed and plane spatial coordinates will be in the units of the map. :param name: File Name To Export :param view: View to export coordinates in :param pix_size: Resolution in view units of one pixel (or dummy, will be used if DPI is dummy) :param dpi: Resolution in DPI (will override view resolution if not dummy, map page size will be used to determine pixel size of output) :param bits: :ref:`MAP_EXPORT_BITS` :param dither: :ref:`MAP_EXPORT_METHOD` :param format: :ref:`MAP_EXPORT_FORMAT` :param options: Extended Options String (format specific) :type name: str :type view: str :type pix_size: float :type dpi: float :type bits: int :type dither: int :type format: str :type options: str .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._export_all_in_view(name.encode(), view.encode(), pix_size, dpi, bits, dither, format.encode(), options.encode()) def export_all_raster(self, name, view, size_x, size_y, dpi, bits, dither, format, options): """ Export the entire map to map to a non-geo raster format. :param name: File Name To Export :param view: View to export coordinates in :param size_x: Number of Pixels in X (X or Y should be specified the other should be 0 and computed by export, or both can be 0 and DPI defined) :param size_y: Number of Pixels in Y (X or Y should be specified the other should be 0 and computed by export, or both can be 0 and DPI defined) :param dpi: Resolution in DPI (will override X and Y if not dummy, map page size will be used to determine pixel size of output) :param bits: :ref:`MAP_EXPORT_BITS` :param dither: :ref:`MAP_EXPORT_METHOD` :param format: :ref:`MAP_EXPORT_RASTER_FORMAT` :param options: Extended Options String (format specific) :type name: str :type view: str :type size_x: int :type size_y: int :type dpi: float :type bits: int :type dither: int :type format: str :type options: str .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._export_all_raster(name.encode(), view.encode(), size_x, size_y, dpi, bits, dither, format.encode(), options.encode()) def export_area_in_view(self, name, view, pix_size, dpi, bits, dither, min_x, min_y, max_x, max_y, format, options): """ Export an area of a map in view units to an external format :param name: File Name To Export :param view: View to export coordinates in :param pix_size: Resolution in view units of one pixel (or dummy, will be used if DPI is dummy) :param dpi: Resolution in DPI (will override view resolution if not dummy, map page size will be used to determine pixel size of output) :param bits: :ref:`MAP_EXPORT_BITS` :param dither: :ref:`MAP_EXPORT_METHOD` :param min_x: Area To Export Min X location in view units :param min_y: Area To Export Min Y location in view units :param max_x: Area To Export Max X location in view units :param max_y: Area To Export Max Y location in view units :param format: :ref:`MAP_EXPORT_FORMAT` :param options: Extended Options String (format specific) :type name: str :type view: str :type pix_size: float :type dpi: float :type bits: int :type dither: int :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type format: str :type options: str .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._export_area_in_view(name.encode(), view.encode(), pix_size, dpi, bits, dither, min_x, min_y, max_x, max_y, format.encode(), options.encode()) def export_area_raster(self, name, view, min_x, min_y, max_x, max_y, size_x, size_y, dpi, bits, dither, format, options): """ Export an area of a map to a non-geo raster format. :param name: File Name To Export :param view: View to export coordinates in :param min_x: Area To Export Min X location in view units :param min_y: Area To Export Min Y location in view units :param max_x: Area To Export Max X location in view units :param max_y: Area To Export Max Y location in view units :param size_x: Number of Pixels in X (X or Y should be specified the other should be 0 and computed by export, or both can be 0 and DPI defined) :param size_y: Number of Pixels in Y (X or Y should be specified the other should be 0 and computed by export, or both can be 0 and DPI defined) :param dpi: Resolution in DPI (will override X and Y if not dummy, map page size will be used to determine pixel size of output) :param bits: :ref:`MAP_EXPORT_BITS` :param dither: :ref:`MAP_EXPORT_METHOD` :param format: :ref:`MAP_EXPORT_RASTER_FORMAT` :param options: Extended Options String (format specific) :type name: str :type view: str :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type size_x: int :type size_y: int :type dpi: float :type bits: int :type dither: int :type format: str :type options: str .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._export_area_raster(name.encode(), view.encode(), min_x, min_y, max_x, max_y, size_x, size_y, dpi, bits, dither, format.encode(), options.encode()) def rename_view(self, current_name, new_name): """ Renames a view in this map. :param current_name: Name of view to rename) :param new_name: New name of view :type current_name: str :type new_name: str .. versionadded:: 9.9 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._rename_view(current_name.encode(), new_name.encode()) def render_bitmap(self, view, min_x, min_y, max_x, max_y, file, max_res): """ Render a map to a bitmap. :param view: View we exporting units in :param min_x: MinX :param min_y: MinY :param max_x: MaxX :param max_y: MaxY :param file: File to generate (BMP or PNG, otherwise extension forced to BMP) :param max_res: Maximum resolution in either direction, -1 for none (will change the pixel density of image if exceeded) :type view: str :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type file: str :type max_res: int .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._render_bitmap(view.encode(), min_x, min_y, max_x, max_y, file.encode(), max_res) def render_view_bitmap(self, view, group, min_x, min_y, max_x, max_y, file, max_res): """ Render a map view to a bitmap. :param view: `GXMVIEW <geosoft.gxapi.GXMVIEW>` object :param group: group (-1 for all) :param min_x: MinX :param min_y: MinY :param max_x: MaxX :param max_y: MaxY :param file: File to generate (BMP or PNG, otherwise extension forced to BMP) :param max_res: Maximum resolution in either direction, -1 for none (will change the pixel density of image if exceeded) :type view: GXMVIEW :type group: int :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type file: str :type max_res: int .. versionadded:: 9.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._render_view_bitmap(view, group, min_x, min_y, max_x, max_y, file.encode(), max_res) # 3D View def create_linked_3d_view(self, mview, view_name, min_x, min_y, max_x, max_y): """ Create a 3D View in this map that is linked to a `GXMVIEW <geosoft.gxapi.GXMVIEW>` in a 3D View file. :param mview: `GX3DV <geosoft.gxapi.GX3DV>`'s 3D `GXMVIEW <geosoft.gxapi.GXMVIEW>` :param view_name: New view name :param min_x: X minimum in mm :param min_y: Y minimun in mm :param max_x: X maximum in mm :param max_y: Y maximum in mm :type mview: GXMVIEW :type view_name: str :type min_x: float :type min_y: float :type max_x: float :type max_y: float .. versionadded:: 9.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._create_linked_3d_view(mview, view_name.encode(), min_x, min_y, max_x, max_y) # Miscellaneous def agg_list(self, lst, optn): """ Get a list of all aggregates in this map. :param lst: List to hold the views (allow up to 96 characters) :param optn: 0 - view/agg only 1 - view/agg/layer :type lst: GXLST :type optn: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** List items are returned as view/agg/layer. The layer name is optional .. seealso:: `GXLST <geosoft.gxapi.GXLST>` class. """ self._agg_list(lst, optn) def agg_list_ex(self, lst, optn, mode): """ Get a list of aggregates in this map based on a mode :param lst: List to hold the views (allow up to 96 characters) :param optn: 0 - view/agg only 1 - view/agg/layer :param mode: :ref:`MAP_LIST_MODE` :type lst: GXLST :type optn: int :type mode: int .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** List items are returned as view/agg/layer. The layer name is optional .. seealso:: `GXLST <geosoft.gxapi.GXLST>` class. """ self._agg_list_ex(lst, optn, mode) def clean(self): """ Clean up empty groups in all views in map. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._clean() def delete_empty_groups(self): """ Remove empty groups in the map, do not delete empty views. .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._delete_empty_groups() def commit(self): """ Commit any changes to a map. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._commit() def copy_map_to_view(self, dest_map, dest_view): """ Copy entire map into one view in output map. :param dest_map: Destination `GXMAP <geosoft.gxapi.GXMAP>` name :param dest_view: Name of View :type dest_map: str :type dest_view: str .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._copy_map_to_view(dest_map.encode(), dest_view.encode()) def crc_map(self, crc, file): """ Generate an XML CRC of a `GXMAP <geosoft.gxapi.GXMAP>` :param crc: CRC returned :param file: Name of xml to generate (.zip added) :type crc: int_ref :type file: str .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ crc.value = self._crc_map(crc.value, file.encode()) @classmethod def create(cls, name, mode): """ Create a `GXMAP <geosoft.gxapi.GXMAP>`. :param name: `GXMAP <geosoft.gxapi.GXMAP>` file name :param mode: :ref:`MAP_OPEN` :type name: str :type mode: int :returns: `GXMAP <geosoft.gxapi.GXMAP>` Object :rtype: GXMAP .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMAP._create(GXContext._get_tls_geo(), name.encode(), mode) return GXMAP(ret_val) @classmethod def current(cls): """ This method returns the Current map opened. :returns: `GXMAP <geosoft.gxapi.GXMAP>` Object :rtype: GXMAP .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If there is no current map, and running interactively, the user is prompted to open a map. """ ret_val = gxapi_cy.WrapMAP._current(GXContext._get_tls_geo()) return GXMAP(ret_val) def delete_view(self, name): """ Deletes a view in this map. :param name: View Name to delete :type name: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the view does not exist, nothing happens. """ self._delete_view(name.encode()) def discard(self): """ Discard all changes made to the map. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._discard() def dup_map(self, ma_pd, content): """ Duplicate copy of current map. :param ma_pd: Destination `GXMAP <geosoft.gxapi.GXMAP>` object :param content: :ref:`DUPMAP` :type ma_pd: GXMAP :type content: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Before version 6.2 text in maps were displayed with a character set defining how characters above ASCII 127 would be displayed. 6.2 introduced Unicode in the core montaj engine that eliminated the need for such a setting and greatly increased the number of symbols that can be used. The only caveat of the new system is that text may appear corrupted (especially with GFN fonts) in versions prior to 6.2 that render maps created in version 6.2 and later. The constant `DUPMAP_COPY_PRE62 <geosoft.gxapi.DUPMAP_COPY_PRE62>` provides a way to create maps that can be distributed to versions prior to 6.2. """ self._dup_map(ma_pd, content) def get_lpt(self): """ Get the `GXLPT <geosoft.gxapi.GXLPT>` Object of a `GXMAP <geosoft.gxapi.GXMAP>`. :returns: `GXLPT <geosoft.gxapi.GXLPT>` Object :rtype: GXLPT .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_lpt() return GXLPT(ret_val) def get_map_size(self, xmin, ymin, xmax, ymax): """ Get the size of the Map. :param xmin: X minimum in mm :param ymin: Y minimun in mm :param xmax: X maximum in mm :param ymax: Y maximum in mm :type xmin: float_ref :type ymin: float_ref :type xmax: float_ref :type ymax: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ xmin.value, ymin.value, xmax.value, ymax.value = self._get_map_size(xmin.value, ymin.value, xmax.value, ymax.value) def get_meta(self): """ Get the map's `GXMETA <geosoft.gxapi.GXMETA>` :returns: `GXMETA <geosoft.gxapi.GXMETA>` Object :rtype: GXMETA .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the map has no `GXMETA <geosoft.gxapi.GXMETA>`, an empty `GXMETA <geosoft.gxapi.GXMETA>` will be created. """ ret_val = self._get_meta() return GXMETA(ret_val) def get_reg(self): """ Get the map's `GXREG <geosoft.gxapi.GXREG>` :returns: `GXREG <geosoft.gxapi.GXREG>` Object :rtype: GXREG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the map has no `GXREG <geosoft.gxapi.GXREG>`, an empty `GXREG <geosoft.gxapi.GXREG>` will be created. """ ret_val = self._get_reg() return GXREG(ret_val) def group_list(self, lst): """ Get a list of all views/groups in this map. :param lst: List to hold the view/groups. Names may be up to 2080 characters in length. :type lst: GXLST .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Returns all groups in the form "ViewName\\GroupName" To get a `GXLST <geosoft.gxapi.GXLST>` of groups in a specific map view, use the `GXMVIEW.list_groups <geosoft.gxapi.GXMVIEW.list_groups>` function. .. seealso:: `GXLST <geosoft.gxapi.GXLST>` class. `GXMVIEW.list_groups <geosoft.gxapi.GXMVIEW.list_groups>` """ self._group_list(lst) def group_list_ex(self, lst, mode): """ Get a list of views/groups in this map for this mode :param lst: List to hold the views. View names may be up to 2080 characters in length. :param mode: :ref:`MAP_LIST_MODE` :type lst: GXLST :type mode: int .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `GXLST <geosoft.gxapi.GXLST>` class. """ self._group_list_ex(lst, mode) def duplicate_view(self, view, n_view, copy): """ Duplicate an entire view :param view: Name of view to duplicate :param n_view: Name of new view created (pass in "" and the new name is returned) :param copy: Copy all groups :type view: str :type n_view: str_ref :type copy: bool .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ n_view.value = self._duplicate_view(view.encode(), n_view.value.encode(), copy) def exist_view(self, name): """ Checks to see if a view exists. :param name: View name :type name: str :returns: 0 view does not exist. 1 view exists. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._exist_view(name.encode()) return ret_val def get_class_name(self, cl, name): """ Get a class name. :param cl: Class :param name: Name :type cl: str :type name: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Map class names are intended to be used to record the names of certain view classes in the map, such as the "Data", "Base" and "Section" views. There can only be one name for each class, but it can be changed. This lets the "Data" class name change, for example, so plotting can select which class to plot to. If a name is not set, the class name is set and returned. """ name.value = self._get_class_name(cl.encode(), name.value.encode()) def get_file_name(self, name): """ Get the name of the map. :param name: Returned map file name :type name: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ name.value = self._get_file_name(name.value.encode()) def get_map_name(self, name): """ Get the Map Name of the Map. :param name: Returned map name :type name: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ name.value = self._get_map_name(name.value.encode()) def packed_files(self): """ The number of packed files in the current map. :returns: The number of packed files in map. :rtype: int .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._packed_files() return ret_val def un_pack_files_ex(self, force, errors): """ UnPack all files from map to workspace. :param force: (0 - Produce errors, 1 - Force overwrites) :param errors: List of files that are problematic returned :type force: int :type errors: str_ref .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The option to force will simply overwrite the files. When the non-force option is in effect the method will stop if any files are going to be overwritting. These file names will end up in the Errors string. """ errors.value = self._un_pack_files_ex(force, errors.value.encode()) def un_pack_files_to_folder(self, force, dir, errors): """ UnPack all files from map to workspace. :param force: (0 - Produce errors, 1 - Force overwrites) :param dir: Directory to place unpacked files in. :param errors: List of files that are problematic returned :type force: int :type dir: str :type errors: str_ref .. versionadded:: 7.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ errors.value = self._un_pack_files_to_folder(force, dir.encode(), errors.value.encode()) def pack_files(self): """ Pack all files in the map so that it can be mailed. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._pack_files() def render(self, name): """ Render a map to file/device. :param name: Plot file/device :type name: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._render(name.encode()) def resize_all(self): """ Resize a map to the extents of all views. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is the same as `resize_all_ex <geosoft.gxapi.GXMAP.resize_all_ex>` with `MVIEW_EXTENT_CLIP <geosoft.gxapi.MVIEW_EXTENT_CLIP>`. """ self._resize_all() def resize_all_ex(self, ext): """ `resize_all <geosoft.gxapi.GXMAP.resize_all>` with selection of view extent type selection. :param ext: :ref:`MVIEW_EXTENT` :type ext: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** `MVIEW_EXTENT_VISIBLE <geosoft.gxapi.MVIEW_EXTENT_VISIBLE>` gives a more "reasonable" map size, and won't clip off labels outside a graph window. """ self._resize_all_ex(ext) def get_map_scale(self): """ Get the current map scale :returns: The current map scale :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If there is a "Data" view, the scale is derived from this view. If their is no data view, the scale is derived from the first view that is not scaled in mm. otherwise, the scale is 1000 (mm). All views must be closed, or open read-only. """ ret_val = self._get_map_scale() return ret_val def save_as_mxd(self, mxd): """ Save as ArcGIS `GXMXD <geosoft.gxapi.GXMXD>` :param mxd: Geosoft map file name :type mxd: str .. versionadded:: 7.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._save_as_mxd(mxd.encode()) def set_class_name(self, cl, name): """ Set a class name. :param cl: Class :param name: Name :type cl: str :type name: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Map class names are intended to be used to record the names of certain view classes in the map, such as the "Data", "Base" and "Section" views. There can only be one name for each class, but it can be changed. This lets the "Data" class name change, for example, so plotting can select which class to plot to. If a name is not set, the class name is set and returned. """ self._set_class_name(cl.encode(), name.encode()) def set_current(self): """ Sets the current map to this map. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_current() def set_map_name(self, name): """ Set the Map Name of the Map. :param name: Map Name :type name: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_map_name(name.encode()) def set_map_scale(self, scale): """ Set the current map scale :param scale: New map scale (must be > 0). :type scale: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** All views in the map will be resized for the new map scale. """ self._set_map_scale(scale) def set_map_size(self, xmin, ymin, xmax, ymax): """ Set the size of the Map. :param xmin: X minimum in mm :param ymin: Y minimun in mm :param xmax: X maximum in mm :param ymax: Y maximum in mm :type xmin: float :type ymin: float :type xmax: float :type ymax: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The map size is area on the `GXMAP <geosoft.gxapi.GXMAP>` that contains graphics to be plotted. The area can be bigger or smaller that the current views. In the absense of any other information only the area defined by the map size is plotted. .. seealso:: SetSizeViews_MAP """ self._set_map_size(xmin, ymin, xmax, ymax) def set_meta(self, meta): """ Write a `GXMETA <geosoft.gxapi.GXMETA>` to a map. :param meta: `GXMETA <geosoft.gxapi.GXMETA>` to write to map :type meta: GXMETA .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_meta(meta) def set_reg(self, reg): """ Write a `GXREG <geosoft.gxapi.GXREG>` to a map. :param reg: `GXREG <geosoft.gxapi.GXREG>` to write to map :type reg: GXREG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_reg(reg) @classmethod def sync(cls, map): """ Syncronize the Metadata :param map: Geosoft map file name :type map: str .. versionadded:: 7.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapMAP._sync(GXContext._get_tls_geo(), map.encode()) def un_pack_files(self): """ UnPack all files from map to workspace. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._un_pack_files() def view_list(self, lst): """ Get a list of all views in this map. :param lst: List to hold the views. View names may be up to 2080 characters in length. :type lst: GXLST .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `GXLST <geosoft.gxapi.GXLST>` class. """ self._view_list(lst) def view_list_ex(self, lst, mode): """ Get a list of views of certain types in this map :param lst: List to hold the views. View names may be up to 2080 characters in length. :param mode: :ref:`MAP_LIST_MODE` :type lst: GXLST :type mode: int .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._view_list_ex(lst, mode) def get_data_proj(self): """ Get the projection type of the Data view of a map. :returns: Project type as an integer :rtype: int .. versionadded:: 8.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_data_proj() return ret_val def dataset_file_path_list(self, lst): """ Get a list of all dataset file paths in this map. :param lst: List to hold the paths. Paths may be up to 2080 characters in length. :type lst: GXLST .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `GXLST <geosoft.gxapi.GXLST>` class. """ self._dataset_file_path_list(lst) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXSURFACEITEM.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXSURFACEITEM(gxapi_cy.WrapSURFACEITEM): """ GXSURFACEITEM class. The `GXSURFACEITEM <geosoft.gxapi.GXSURFACEITEM>` allows you to create, read and alter Geosurface files (``*.geosoft_surface``). A Geosurface file can contain one or more surface items (see `GXSURFACE <geosoft.gxapi.GXSURFACE>` class). A surface item can contains one or more triangular polyhedral meshes. """ def __init__(self, handle=0): super(GXSURFACEITEM, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXSURFACEITEM <geosoft.gxapi.GXSURFACEITEM>` :returns: A null `GXSURFACEITEM <geosoft.gxapi.GXSURFACEITEM>` :rtype: GXSURFACEITEM """ return GXSURFACEITEM() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, type, name): """ Create a `GXSURFACEITEM <geosoft.gxapi.GXSURFACEITEM>` :param type: Type :param name: Name :type type: str :type name: str :returns: `GXSURFACEITEM <geosoft.gxapi.GXSURFACEITEM>` Object :rtype: GXSURFACEITEM .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `set_properties <geosoft.gxapi.GXSURFACEITEM.set_properties>` and `set_default_render_properties <geosoft.gxapi.GXSURFACEITEM.set_default_render_properties>` """ ret_val = gxapi_cy.WrapSURFACEITEM._create(GXContext._get_tls_geo(), type.encode(), name.encode()) return GXSURFACEITEM(ret_val) def get_guid(self, guid): """ Gets the GUID of the surface item. :param guid: GUID :type guid: str_ref .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The value returned by this call will not be valid for newly created items until after a call to `GXSURFACE.add_surface_item <geosoft.gxapi.GXSURFACE.add_surface_item>`. """ guid.value = self._get_guid(guid.value.encode()) def set_properties(self, type, name, source_guid, source_name, source_measure, secondary_source_guid, secondary_source_name, secondary_source_measure): """ Sets the properties of the surface item. :param type: Type :param name: Name :param source_guid: SourceGuid :param source_name: SourceName :param source_measure: SourceMeasure :param secondary_source_guid: SecondarySourceGuid :param secondary_source_name: SecondarySourceName :param secondary_source_measure: SecondarySourceMeasure :type type: str :type name: str :type source_guid: str :type source_name: str :type source_measure: float :type secondary_source_guid: str :type secondary_source_name: str :type secondary_source_measure: float .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `GXSYS.generate_guid <geosoft.gxapi.GXSYS.generate_guid>` """ self._set_properties(type.encode(), name.encode(), source_guid.encode(), source_name.encode(), source_measure, secondary_source_guid.encode(), secondary_source_name.encode(), secondary_source_measure) def set_properties_ex(self, type, name, source_guid, source_name, source_measure, secondary_source_guid, secondary_source_name, secondary_source_option, secondary_source_measure, secondary_source_measure2): """ Sets the properties of the surface item (includes new properties introduced in 8.5). :param type: Type :param name: Name :param source_guid: SourceGuid :param source_name: SourceName :param source_measure: SourceMeasure :param secondary_source_guid: SecondarySourceGuid :param secondary_source_name: SecondarySourceName :param secondary_source_option: SecondarySourceOption :param secondary_source_measure: SecondarySourceMeasure :param secondary_source_measure2: SecondarySourceMeasure2 :type type: str :type name: str :type source_guid: str :type source_name: str :type source_measure: float :type secondary_source_guid: str :type secondary_source_name: str :type secondary_source_option: int :type secondary_source_measure: float :type secondary_source_measure2: float .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `GXSYS.generate_guid <geosoft.gxapi.GXSYS.generate_guid>` """ self._set_properties_ex(type.encode(), name.encode(), source_guid.encode(), source_name.encode(), source_measure, secondary_source_guid.encode(), secondary_source_name.encode(), secondary_source_option, secondary_source_measure, secondary_source_measure2) def get_properties(self, type, name, source_guid, source_name, source_measure, secondary_source_guid, secondary_source_name, secondary_source_measure): """ Gets the properties of the surface item. :param type: Type :param name: Name :param source_guid: SourceGuid :param source_name: SourceName :param source_measure: SourceMeasure :param secondary_source_guid: SecondarySourceGuid :param secondary_source_name: SecondarySourceName :param secondary_source_measure: SecondarySourceMeasure :type type: str_ref :type name: str_ref :type source_guid: str_ref :type source_name: str_ref :type source_measure: float_ref :type secondary_source_guid: str_ref :type secondary_source_name: str_ref :type secondary_source_measure: float_ref .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ type.value, name.value, source_guid.value, source_name.value, source_measure.value, secondary_source_guid.value, secondary_source_name.value, secondary_source_measure.value = self._get_properties(type.value.encode(), name.value.encode(), source_guid.value.encode(), source_name.value.encode(), source_measure.value, secondary_source_guid.value.encode(), secondary_source_name.value.encode(), secondary_source_measure.value) def get_properties_ex(self, type, name, source_guid, source_name, source_measure, secondary_source_guid, secondary_source_name, secondary_source_option, secondary_source_measure, secondary_source_measure2): """ Gets the properties of the surface item (includes new properties introduced in 8.5). :param type: Type :param name: Name :param source_guid: SourceGuid :param source_name: SourceName :param source_measure: SourceMeasure :param secondary_source_guid: SecondarySourceGuid :param secondary_source_name: SecondarySourceName :param secondary_source_option: SecondarySourceOption :param secondary_source_measure: SecondarySourceMeasure :param secondary_source_measure2: SecondarySourceMeasure2 :type type: str_ref :type name: str_ref :type source_guid: str_ref :type source_name: str_ref :type source_measure: float_ref :type secondary_source_guid: str_ref :type secondary_source_name: str_ref :type secondary_source_option: int_ref :type secondary_source_measure: float_ref :type secondary_source_measure2: float_ref .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ type.value, name.value, source_guid.value, source_name.value, source_measure.value, secondary_source_guid.value, secondary_source_name.value, secondary_source_option.value, secondary_source_measure.value, secondary_source_measure2.value = self._get_properties_ex(type.value.encode(), name.value.encode(), source_guid.value.encode(), source_name.value.encode(), source_measure.value, secondary_source_guid.value.encode(), secondary_source_name.value.encode(), secondary_source_option.value, secondary_source_measure.value, secondary_source_measure2.value) def set_default_render_properties(self, color, transparency, render_mode): """ Sets default render properties of the surface item. :param color: Color :param transparency: Transparency :param render_mode: :ref:`SURFACERENDER_MODE` :type color: int :type transparency: float :type render_mode: int .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `GXMVIEW.color <geosoft.gxapi.GXMVIEW.color>` """ self._set_default_render_properties(color, transparency, render_mode) def get_default_render_properties(self, color, transparency, render_mode): """ Gets default render properties of the surface item. :param color: Color :param transparency: Transparency :param render_mode: :ref:`SURFACERENDER_MODE` :type color: int_ref :type transparency: float_ref :type render_mode: int_ref .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `GXMVIEW.color <geosoft.gxapi.GXMVIEW.color>` """ color.value, transparency.value, render_mode.value = self._get_default_render_properties(color.value, transparency.value, render_mode.value) def num_components(self): """ Get the number of components in the surface item. :returns: Number of components in the surface item. :rtype: int .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._num_components() return ret_val def add_mesh(self, vert_v_vx, vert_v_vy, vert_v_vz, tri_vv_pt1, tri_vv_pt2, tri_vv_pt3): """ Adds a triangular polyhedral mesh component to the surface item. :param vert_v_vx: Vertices X location :param vert_v_vy: Vertices Y location :param vert_v_vz: Vertices Z location :param tri_vv_pt1: Triangles 1st Vertex :param tri_vv_pt2: Triangles 2nd Vertex :param tri_vv_pt3: Triangles 3rd Vertex :type vert_v_vx: GXVV :type vert_v_vy: GXVV :type vert_v_vz: GXVV :type tri_vv_pt1: GXVV :type tri_vv_pt2: GXVV :type tri_vv_pt3: GXVV :returns: The index of the component added. :rtype: int .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._add_mesh(vert_v_vx, vert_v_vy, vert_v_vz, tri_vv_pt1, tri_vv_pt2, tri_vv_pt3) return ret_val def get_mesh(self, index, vert_v_vx, vert_v_vy, vert_v_vz, tri_vv_pt1, tri_vv_pt2, tri_vv_pt3): """ Gets a triangular polyhedral mesh of a component in the surface item. :param index: Index of the component :param vert_v_vx: Vertices X :param vert_v_vy: Vertices Y :param vert_v_vz: Vertices Z :param tri_vv_pt1: Triangles 1st Vertex :param tri_vv_pt2: Triangles 2nd Vertex :param tri_vv_pt3: Triangles 3rd Vertex :type index: int :type vert_v_vx: GXVV :type vert_v_vy: GXVV :type vert_v_vz: GXVV :type tri_vv_pt1: GXVV :type tri_vv_pt2: GXVV :type tri_vv_pt3: GXVV .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_mesh(index, vert_v_vx, vert_v_vy, vert_v_vz, tri_vv_pt1, tri_vv_pt2, tri_vv_pt3) def get_extents(self, min_x, min_y, min_z, max_x, max_y, max_z): """ Get the spatial range of the the surface item. :param min_x: Minimum valid data in X. :param min_y: Minimum valid data in Y. :param min_z: Minimum valid data in Z. :param max_x: Maximum valid data in X. :param max_y: Maximum valid data in Y. :param max_z: Maximum valid data in Z. :type min_x: float_ref :type min_y: float_ref :type min_z: float_ref :type max_x: float_ref :type max_y: float_ref :type max_z: float_ref .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value = self._get_extents(min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value) def get_mesh_info(self, index, closed, n_inner_comps, area, volume, volume_confidence_interval): """ Gets information about a triangular polyhedral mesh component in the surface item. :param index: Index of the component :param closed: indicating if mesh is closed :param n_inner_comps: Number of inner components :param area: Area :param volume: Volume :param volume_confidence_interval: Volume confidence interval :type index: int :type closed: bool_ref :type n_inner_comps: int_ref :type area: float_ref :type volume: float_ref :type volume_confidence_interval: float_ref .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ closed.value, n_inner_comps.value, area.value, volume.value, volume_confidence_interval.value = self._get_mesh_info(index, closed.value, n_inner_comps.value, area.value, volume.value, volume_confidence_interval.value) def get_info(self, closed, area, volume, volume_confidence_interval): """ Gets information about the surface item. :param closed: indicating if all meshes in item is closed :param area: Area :param volume: Volume :param volume_confidence_interval: Volume confidence interval :type closed: bool_ref :type area: float_ref :type volume: float_ref :type volume_confidence_interval: float_ref .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ closed.value, area.value, volume.value, volume_confidence_interval.value = self._get_info(closed.value, area.value, volume.value, volume_confidence_interval.value) def get_geometry_info(self, vertices, triangles): """ Get the total number of vertices and triangles of all mesh components in item. :param vertices: Total number of vertices :param triangles: Total number of triangles :type vertices: int_ref :type triangles: int_ref .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ vertices.value, triangles.value = self._get_geometry_info(vertices.value, triangles.value) def compute_extended_info(self, components, vertices, edges, triangles, inconsistent, invalid, intersectiona): """ Compute more information (including validation) about of all mesh components in the surface item. :param components: Number of inner components (recomputed) :param vertices: Total number of valid vertices :param edges: Total number of valid edges :param triangles: Total number of valid triangles :param inconsistent: Number of inconsistent triangles :param invalid: Number of invalid triangles :param intersectiona: Number of self intersections :type components: int_ref :type vertices: int_ref :type edges: int_ref :type triangles: int_ref :type inconsistent: int_ref :type invalid: int_ref :type intersectiona: int_ref .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ components.value, vertices.value, edges.value, triangles.value, inconsistent.value, invalid.value, intersectiona.value = self._compute_extended_info(components.value, vertices.value, edges.value, triangles.value, inconsistent.value, invalid.value, intersectiona.value) def compute_poly_line_intersections(self, vv_x, vv_y, vv_z, vv_x_out, vv_y_out, vv_z_out): """ Compute intersections of a 3D PolyLine with a `GXSURFACEITEM <geosoft.gxapi.GXSURFACEITEM>` Object :param vv_x: X coordinates. :param vv_y: Y coordinates. :param vv_z: Z coordinates. :param vv_x_out: X coordinates. :param vv_y_out: Y coordinates. :param vv_z_out: Z coordinates. :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_x_out: GXVV :type vv_y_out: GXVV :type vv_z_out: GXVV .. versionadded:: 9.7.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._compute_poly_line_intersections(vv_x, vv_y, vv_z, vv_x_out, vv_y_out, vv_z_out) def intersects_bounding_box(self, min_x, min_y, min_z, max_x, max_y, max_z): """ Checks intersections of a bounding box with a `GXSURFACEITEM <geosoft.gxapi.GXSURFACEITEM>` Object :param min_x: Min X :param min_y: Min Y :param min_z: Min Z :param max_x: Max X :param max_y: Max Y :param max_z: Max Z :type min_x: float :type min_y: float :type min_z: float :type max_x: float :type max_y: float :type max_z: float :rtype: bool .. versionadded:: 9.7.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._intersects_bounding_box(min_x, min_y, min_z, max_x, max_y, max_z) return ret_val ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXBF.rst .. _GXBF: GXBF class ================================== .. autoclass:: geosoft.gxapi.GXBF :members: .. _BF_BYTEORDER: BF_BYTEORDER constants ----------------------------------------------------------------------- Byte order for read/write .. autodata:: geosoft.gxapi.BF_BYTEORDER_LSB :annotation: .. autoattribute:: geosoft.gxapi.BF_BYTEORDER_LSB .. autodata:: geosoft.gxapi.BF_BYTEORDER_MSB :annotation: .. autoattribute:: geosoft.gxapi.BF_BYTEORDER_MSB .. _BF_CLOSE: BF_CLOSE constants ----------------------------------------------------------------------- Close Flags .. autodata:: geosoft.gxapi.BF_KEEP :annotation: .. autoattribute:: geosoft.gxapi.BF_KEEP .. autodata:: geosoft.gxapi.BF_DELETE :annotation: .. autoattribute:: geosoft.gxapi.BF_DELETE .. _BF_ENCODE: BF_ENCODE constants ----------------------------------------------------------------------- The way a string is encoded .. autodata:: geosoft.gxapi.BF_ENCODE_ANSI :annotation: .. autoattribute:: geosoft.gxapi.BF_ENCODE_ANSI .. autodata:: geosoft.gxapi.BF_ENCODE_UTF8 :annotation: .. autoattribute:: geosoft.gxapi.BF_ENCODE_UTF8 .. _BF_OPEN_MODE: BF_OPEN_MODE constants ----------------------------------------------------------------------- Open Status .. autodata:: geosoft.gxapi.BF_READ :annotation: .. autoattribute:: geosoft.gxapi.BF_READ .. autodata:: geosoft.gxapi.BF_READWRITE_NEW :annotation: .. autoattribute:: geosoft.gxapi.BF_READWRITE_NEW .. autodata:: geosoft.gxapi.BF_READWRITE_OLD :annotation: .. autoattribute:: geosoft.gxapi.BF_READWRITE_OLD .. autodata:: geosoft.gxapi.BF_READWRITE_APP :annotation: .. autoattribute:: geosoft.gxapi.BF_READWRITE_APP .. _BF_SEEK: BF_SEEK constants ----------------------------------------------------------------------- Seek Location .. autodata:: geosoft.gxapi.BF_SEEK_START :annotation: .. autoattribute:: geosoft.gxapi.BF_SEEK_START .. autodata:: geosoft.gxapi.BF_SEEK_CURRENT :annotation: .. autoattribute:: geosoft.gxapi.BF_SEEK_CURRENT .. autodata:: geosoft.gxapi.BF_SEEK_EOF :annotation: .. autoattribute:: geosoft.gxapi.BF_SEEK_EOF <file_sep>/geosoft/gxapi/GXLTB.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXLTB(gxapi_cy.WrapLTB): """ GXLTB class. An `GXLTB <geosoft.gxapi.GXLTB>` object is typically created from a CSV (comma-separated values) file, and is a table of information that may be accessed by row or column. The `GXLTB <geosoft.gxapi.GXLTB>` class is recommended for use with small tables produced from short lists (of the order of 1000's or records) such as the different geographic projections and their defining parameters. Large tables, such as those required for table-lookup functions, should be accessed using the `GXTB <geosoft.gxapi.GXTB>` class. **Note:** An `GXLTB <geosoft.gxapi.GXLTB>` ASCII table file has the following structure: / comments key_name,col_1,col_2,col_3,etc... /field names key_1,token,token,token,etc... /data lines key_2,token,token,token,etc... etc... The first column must be the key column (all entries unique). The header line is optional and can be used to find entries. Comment and empty lines are ignored. """ def __init__(self, handle=0): super(GXLTB, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXLTB <geosoft.gxapi.GXLTB>` :returns: A null `GXLTB <geosoft.gxapi.GXLTB>` :rtype: GXLTB """ return GXLTB() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def add_record(self, key, rec): """ Add a new record. :param key: Key name :param rec: Returned record number :type key: str :type rec: int_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the record exists, the existing record is cleared and the record number is returned. """ rec.value = self._add_record(key.encode(), rec.value) def contract(self, lt_bc): """ Contract the contents of two same-key and same-fields tables. :param lt_bc: Contract `GXLTB <geosoft.gxapi.GXLTB>` :type lt_bc: GXLTB :returns: x - Handle to `GXLTB <geosoft.gxapi.GXLTB>` object NULL - Error of some kind :rtype: GXLTB .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The "Key" of the child must be the same as the "Key" of the Master. The fields of two `GXLTB <geosoft.gxapi.GXLTB>` must be the same. Contracting takes place as follows: 1. The Master `GXLTB <geosoft.gxapi.GXLTB>` is copied to the New `GXLTB <geosoft.gxapi.GXLTB>`. 2. All records in the contract LIB are deleted from the New `GXLTB <geosoft.gxapi.GXLTB>` (if there are any) 3. The New `GXLTB <geosoft.gxapi.GXLTB>` is returned. """ ret_val = self._contract(lt_bc) return GXLTB(ret_val) @classmethod def create(cls, file, type, delim, key): """ Creates a `GXLTB <geosoft.gxapi.GXLTB>` object from a file. :param file: File name, .csv assumed, searched locally then in GEOSOFT. :param type: :ref:`LTB_TYPE` :param delim: :ref:`LTB_DELIM` :param key: Key to find if only one record required, "" to read entire table. :type file: str :type type: int :type delim: int :type key: str :returns: x - Handle to `GXLTB <geosoft.gxapi.GXLTB>` object NULL - Error of some kind :rtype: GXLTB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the file has no header, field names are assumed to be "0", "1", etc. """ ret_val = gxapi_cy.WrapLTB._create(GXContext._get_tls_geo(), file.encode(), type, delim, key.encode()) return GXLTB(ret_val) @classmethod def create_crypt(cls, file, type, delim, case_sensitive, key, crypt): """ Creates a `GXLTB <geosoft.gxapi.GXLTB>` object from an encrypted file. :param file: File name, .csv assumed, searched locally then in GEOSOFT. :param type: :ref:`LTB_TYPE` :param delim: :ref:`LTB_DELIM` :param case_sensitive: :ref:`LTB_CASE` :param key: Key to find if only one record required, "" to read entire table. :param crypt: Decryption Key :ref:`SYS_CRYPT_KEY` :type file: str :type type: int :type delim: int :type case_sensitive: int :type key: str :type crypt: str :returns: x - Handle to `GXLTB <geosoft.gxapi.GXLTB>` object NULL - Error of some kind :rtype: GXLTB .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the file has no header, field names are assumed to be "0", "1", etc. """ ret_val = gxapi_cy.WrapLTB._create_crypt(GXContext._get_tls_geo(), file.encode(), type, delim, case_sensitive, key.encode(), crypt.encode()) return GXLTB(ret_val) @classmethod def create_ex(cls, file, type, delim, case_sensitive, key): """ Creates a `GXLTB <geosoft.gxapi.GXLTB>` object from a file. :param file: File name, .csv assumed, searched locally then in GEOSOFT. :param type: :ref:`LTB_TYPE` :param delim: :ref:`LTB_DELIM` :param case_sensitive: :ref:`LTB_CASE` :param key: Key to find if only one record required, "" to read entire table. :type file: str :type type: int :type delim: int :type case_sensitive: int :type key: str :returns: x - Handle to `GXLTB <geosoft.gxapi.GXLTB>` object NULL - Error of some kind :rtype: GXLTB .. versionadded:: 6.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the file has no header, field names are assumed to be "0", "1", etc. """ ret_val = gxapi_cy.WrapLTB._create_ex(GXContext._get_tls_geo(), file.encode(), type, delim, case_sensitive, key.encode()) return GXLTB(ret_val) def delete_record(self, rec): """ Delete a record. :param rec: Record number to delete :type rec: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Record numbers after the deleted record will be reduced by 1. """ self._delete_record(rec) def get_con_lst(self, fld, match, match_type, lst): """ Populate a `GXLST <geosoft.gxapi.GXLST>` with `GXLTB <geosoft.gxapi.GXLTB>` names from matching fields. :param fld: Field :param match: String to match to field, must be lower-case :param match_type: :ref:`LTB_CONLST` :param lst: List to populate :type fld: int :type match: str :type match_type: int :type lst: GXLST .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The `GXLST <geosoft.gxapi.GXLST>` object will be in the order of the file. The `GXLST <geosoft.gxapi.GXLST>` names will be the `GXLTB <geosoft.gxapi.GXLTB>` key fields and the `GXLST <geosoft.gxapi.GXLST>` values will be the `GXLTB <geosoft.gxapi.GXLTB>` record numbers. """ self._get_con_lst(fld, match.encode(), match_type, lst) def get_lst(self, fld, lst): """ Populate an `GXLST <geosoft.gxapi.GXLST>` with `GXLTB <geosoft.gxapi.GXLTB>` names :param fld: Field to get, 0 for key field :param lst: List to populate :type fld: int :type lst: GXLST .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The `GXLST <geosoft.gxapi.GXLST>` object will be in the order of the file. The `GXLST <geosoft.gxapi.GXLST>` names will be the `GXLTB <geosoft.gxapi.GXLTB>` fields and the `GXLST <geosoft.gxapi.GXLST>` values will be the `GXLTB <geosoft.gxapi.GXLTB>` record numbers. """ self._get_lst(fld, lst) def get_lst2(self, fld_n, fld_v, lst): """ Populate an `GXLST <geosoft.gxapi.GXLST>` with `GXLTB <geosoft.gxapi.GXLTB>` names and values :param fld_n: Field for names, 0 for key field :param fld_v: Field for values, 0 for key field :param lst: List to populate :type fld_n: int :type fld_v: int :type lst: GXLST .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The `GXLST <geosoft.gxapi.GXLST>` object will be in the order of the file. The `GXLST <geosoft.gxapi.GXLST>` names will come from the `GXLTB <geosoft.gxapi.GXLTB>` name field and the `GXLST <geosoft.gxapi.GXLST>` values will come from value field specified. """ self._get_lst2(fld_n, fld_v, lst) def fields(self): """ Get number of fields. :returns: Number of fields in the `GXLTB <geosoft.gxapi.GXLTB>`. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._fields() return ret_val def find_field(self, field): """ Return the field number for the specified field. :param field: Field name :type field: str :returns: -1 if field does not exist. field number if field does exist. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._find_field(field.encode()) return ret_val def find_key(self, key): """ Return the key index of a record. :param key: Key name :type key: str :returns: -1 if key does not exist. record number if key does exist. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._find_key(key.encode()) return ret_val def get_field(self, field_num, field): """ Get a field name by index. :param field_num: Field number :param field: Returned field name :type field_num: int :type field: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the record or field are out of range, an empty string is returned. """ field.value = self._get_field(field_num, field.value.encode()) def get_int(self, record, field): """ Get a int entry from the `GXLTB <geosoft.gxapi.GXLTB>` :param record: Record number :param field: Field number :type record: int :type field: int :returns: If the record or field are out of range, an empty string or dummy value is returned. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_int(record, field) return ret_val def get_string(self, record, field, token): """ Get an entry from the `GXLTB <geosoft.gxapi.GXLTB>` :param record: Record number :param field: Field number :param token: Returned field token :type record: int :type field: int :type token: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the record or field are out of range, an empty string or dummy value is returned. """ token.value = self._get_string(record, field, token.value.encode()) def get_english_string(self, record, field, token): """ Get the English entry from the `GXLTB <geosoft.gxapi.GXLTB>` :param record: Record number :param field: Field number :param token: Returned field token :type record: int :type field: int :type token: str_ref .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the record or field are out of range, an empty string or dummy value is returned. """ token.value = self._get_english_string(record, field, token.value.encode()) def records(self): """ Get number of records in `GXLTB <geosoft.gxapi.GXLTB>`. :returns: Number of records in the `GXLTB <geosoft.gxapi.GXLTB>`. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._records() return ret_val def search(self, rec, fld, field): """ Search for a record containing field value :param rec: Search start record :param fld: Field number :param field: Search string (case sensitive) :type rec: int :type fld: int :type field: str :returns: -1 if search failed. record number if search succeeds :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._search(rec, fld, field.encode()) return ret_val def merge(self, lt_bc): """ Merge the contents of two same-key tables. :param lt_bc: Child `GXLTB <geosoft.gxapi.GXLTB>` :type lt_bc: GXLTB :returns: x - Handle to `GXLTB <geosoft.gxapi.GXLTB>` object NULL - Error of some kind :rtype: GXLTB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Merging takes place as follows: 1. The "Key" of the child must be the same as the "Key" of the Master. 2. The fields of the Master `GXLTB <geosoft.gxapi.GXLTB>` are collected in-order. 3. Any new fields of the Child `GXLTB <geosoft.gxapi.GXLTB>` are added to the end of the list. 4. A new `GXLTB <geosoft.gxapi.GXLTB>` is created to contain the new field list (in-order). 5. The Child table contents are added to the New `GXLTB <geosoft.gxapi.GXLTB>`. 6. The Master table contents are added/replace the New `GXLTB <geosoft.gxapi.GXLTB>`. 7. The New `GXLTB <geosoft.gxapi.GXLTB>` is returned. If the fields of the Master and Child are the same, steps 4, 5, 6 are replaced by: 4. The Master `GXLTB <geosoft.gxapi.GXLTB>` is copied to the New `GXLTB <geosoft.gxapi.GXLTB>`. 5. Any New records found in the child are added to the New `GXLTB <geosoft.gxapi.GXLTB>` """ ret_val = self._merge(lt_bc) return GXLTB(ret_val) def get_double(self, record, field): """ Get a real entry from the `GXLTB <geosoft.gxapi.GXLTB>` :param record: Record number :param field: Field number :type record: int :type field: int :returns: If the record or field are out of range, an empty string or dummy value is returned. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_double(record, field) return ret_val def save(self, file): """ Save `GXLTB <geosoft.gxapi.GXLTB>` changes to existing or new file :param file: File name, .csv assumed. If "", save to original file. :type file: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._save(file.encode()) def save_crypt(self, file, crypt): """ Save `GXLTB <geosoft.gxapi.GXLTB>` to a new file using encryption :param file: File name, .csv assumed. If "", save to original file. :param crypt: Encryption key :ref:`SYS_CRYPT_KEY` :type file: str :type crypt: str .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._save_crypt(file.encode(), crypt.encode()) def set_int(self, record, field, data): """ Set a long entry :param record: Record number :param field: Field number :param data: Entry :type record: int :type field: int :type data: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_int(record, field, data) def set_double(self, record, field, data): """ Set a double entry :param record: Record number :param field: Field number :param data: Entry :type record: int :type field: int :type data: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_double(record, field, data) def set_string(self, record, field, token): """ Set an entry :param record: Record number :param field: Field number :param token: Entry :type record: int :type field: int :type token: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_string(record, field, token.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/templates/gxapi_class.rst .. _{{ class_name }}: {{ class_name }} class ================================== .. autoclass:: geosoft.gxapi.{{ class_name }} :members: {% for definition in definitions.definition %} .. _{{ definition.name }}: {{ definition.name }} constant{% if not definition.single_constant %}s{% endif %} ----------------------------------------------------------------------- {% if not definition.single_constant and definition.description %} :: {{ definition.get_sphinx_docstring()|indent(3, true) }} {% endif %} {% for defined_value in definition.defined_value %} .. autoattribute:: geosoft.gxapi.{{ defined_value.name }} {% if definition.single_constant and definition.description %} :: {{ definition.get_sphinx_docstring()|indent(3, true) }} {% elif defined_value.description %} :: {{ defined_value.get_sphinx_docstring()|indent(3, true) }} {% endif %} {% endfor %} {% endfor %} <file_sep>/geosoft/gxpy/agg.py """ Geosoft aggregate images :Classes: ======================== ==================================================== :class:`Aggregate_image` image constructed from one or more grid/image layers ======================== ==================================================== Geosoft aggregates are "aggregations" of one or more grid layers that together create a georeferenced image that can be placed in a map view or rendered on a plane in a 3D view. .. seealso:: :class:`geosoft.gxapi.GXAGG` .. note:: Regression tests provide usage examples: `aggregate tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_agg.py>`_ """ import os import geosoft import geosoft.gxapi as gxapi from . import gx from . import vv as gxvv from . import grid as gxgrd from . import map as gxmap from . import view as gxview from . import group as gxgroup from . import geometry as gxgm from . import coordinate_system as gxcs __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) class AggregateException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.agg`. .. versionadded:: 9.2 """ pass ZONE_DEFAULT = 0 ZONE_LINEAR = 1 ZONE_NORMAL = 2 ZONE_EQUALAREA = 3 ZONE_SHADE = 4 ZONE_LOGLINEAR = 5 ZONE_LAST = 6 class Aggregate_image(gxgm.Geometry): """ The AGG class supports the creation of aggregate images from one or more grid data sets. Aggregates can be placed into a 2D or 3D view for display. :Constructors: :`open`: open an existing aggregate :`new`: create a new aggregate .. versionadded:: 9.2 """ def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): return self._create_name() def __enter__(self): return self def __exit__(self, typ, value, traceback): self.__del__() def __del__(self): if hasattr(self, '_gxagg'): self._gxagg = None def __init__(self): self._gxagg = None self._base_properties = None self._extent = None super().__init__() @classmethod def new(cls, grid_file=None, **kwargs): """ Create a new aggregate from a grid. :param grid_file: grid file name :meth:`add_layer` is called to add the grid as a layer. .. versionadded:: 9.2 """ agg = cls() agg._gxagg = gxapi.GXAGG.create() if grid_file is not None: agg.add_layer(grid_file, **kwargs) return agg @classmethod def open(cls, gxagg): """ Create an :class:`Aggregate_image` from a :class:`geosoft.gxapi.GXAGG` instance. :param gxagg: :class:`geosoft.gxapi.GXAGG` instance .. versionadded:: 9.2 """ agg = cls() if not isinstance(gxagg, gxapi.GXAGG): raise AggregateException(_t('A gxapi.GXAGG isstance is required.')) agg._gxagg = gxagg return agg def close(self): """Close an Aggregate, releases resources.""" self._gxagg = None def _set_properties(self): if self.layer_count > 0: with gxgrd.Grid.open(self.layer_file_names[0]) as g: self._base_properties = g.properties() self._extent = gxgm.Point2(g.extent) self._extent_2d = g.extent_cell_2d() @property def extent(self): return self._extent @property def extent_2d(self): return self._extent_2d @property def gxagg(self): """ The :class:`geosoft.gxapi.GXAGG` instance handle.""" return self._gxagg @property def layer_count(self): """Number of layers in the aggregate.""" return self.gxagg.num_layers() @property def brightness(self): """ Aggregate brightness between -1 (black) and +1 (white).""" return self.gxagg.get_brightness() @brightness.setter def brightness(self, adjustment): a = max(-1.0, min(adjustment, 1.0)) self.gxagg.change_brightness(a) @property def name(self): """Name of the Aggregate_image.""" return self._create_name() @property def layer_file_names(self): """ Return list of layer files in the aggregate. .. versionadded:: 9.2 """ vv = gxvv.GXvv(dtype='U1024') self.gxagg.list_img(vv.gxvv) return list(vv.np) @property def spatial_properties(self): """ Returns the spatial properties of the base layer in the aggregate. :return: (nx, ny, x0, y0, dx, dy, rot) .. versionadded:: 9.3.1 """ x0 = self._base_properties['x0'] y0 = self._base_properties['y0'] dx = self._base_properties['dx'] dy = self._base_properties['dy'] nx = self._base_properties['nx'] ny = self._base_properties['ny'] rot = self._base_properties['rot'] return nx, ny, x0, y0, dx, dy, rot @property def coordinate_system(self): """ Returns the aggregate coordinate_system, which is the same as the first layer. :return: pixel size in units of the coordinate system .. versionadded:: 9.3.1 """ return self._base_properties['coordinate_system'] def _layer_index(self, layer): if isinstance(layer, str): layer = layer.lower() for l in range(self.layer_count): if layer == self.layer_file_names[l].lower(): return l raise AggregateException(_t('Layer \'{}\' not found.'.format(layer))) if layer >= self.layer_count: raise AggregateException( _t('Layer \'{}\' ot of range for aggregate with {} layers.'.format(layer, self.layer_count))) return layer def _create_name(self): s = '' layernames = self.layer_file_names if not layernames: return s names = [] for fn in layernames: names.append(os.path.basename(fn).split('.')[0]) for fn in names: # ignore shaded layers if parent is here. if fn[-2:] == '_s': if fn[:-2] in names: continue i = str.rfind(fn[:-2], '_') if i > 1 and fn[:i] in names: continue s = s + fn + ', ' return s[:-2] def add_layer(self, grid_file, color_map=None, zone=None, shade=False, minimum=None, maximum=None, contour=None): """ Add an image layer to an aggregate :param grid_file: The name of a grid file (image or data) to add. :param color_map: :class:`gxpy.group.Color_map` instance, or the name of a file, which may be `.tbl`, `.zon`, `.itr`, or `.agg`. :param zone: Colour distribution method: ============== ====================================================================== ZONE_DEFAULT as set by user global default settings ZONE_LINEAR linearly distributed ZONE_NORMAL normal (Gaussian) distribution ZONE_EQUALAREA each color will occupy an equal area on the image ZONE_LOGLINEAR logarithmic linear distribution ZONE_LAST last used coloring for this grid file ZONE_SHADE Displays the shaded image version of the grid. The shaded image is a grid file will with '_s' appended to the file name. If it does not exist, a shaded image with illumination inclination and declination both set to 45 degrees is automatically created. ============== ====================================================================== :param shade: True, to add a shading layer :param minimum: Minimum data value. All grid values less than or equal to the minimum will be assigned the first color in the table. The default is calculated from the data. :param maximum: Maximum data value. All grid values greater than or equal to the maximum will be assigned the last color in the table. The default is calculated from the data. :param contour: Break colors on this interval, colors will be thinned if necessary. .. versionadded:: 9.2 """ if color_map is None: if zone == ZONE_SHADE: color_map = 'lgray.tbl' if (color_map is None) or (isinstance(color_map, str)): color_map = geosoft.gxpy.group.Color_map(color_map) color_map_file = color_map.save_file() try: if grid_file is not None: if zone is None: zone = ZONE_DEFAULT if minimum is None: minimum = gxapi.rDUMMY if maximum is None: maximum = gxapi.rDUMMY if contour is None: contour = gxapi.rDUMMY self.gxagg.layer_img_ex(grid_file, zone, color_map_file, minimum, maximum, contour) if shade and (zone != ZONE_SHADE): self.gxagg.layer_img(grid_file, ZONE_SHADE, 'lgray.tbl', gxapi.rDUMMY) finally: if os.path.exists(color_map_file): os.remove(color_map_file) with gxgrd.Grid.open(grid_file) as g: color_map.units = g.unit_of_measure if self._base_properties is None: self._set_properties() def layer_color_map(self, layer=0): """ Return the :class:`geosoft.gxpy.group.Color_map` of a layer. :param layer: layer number or layer name :returns: :class:`geosoft.gxpy.group.Color_map` .. versionadded:: 9.2 """ layer = self._layer_index(layer) itr = gxapi.GXITR.create() self.gxagg.get_layer_itr(layer, itr) cmap = geosoft.gxpy.group.Color_map(itr) cmap.title = os.path.basename(self.layer_file_names[layer]).split('.')[0] with gxgrd.Grid.open(self.layer_file_names[layer]) as g: cmap.unit_of_measure = g.unit_of_measure return cmap def layer_unit_of_measure(self, layer=0): """ Return the unit of measurement for the specified layer :param layer: layer number or layer name .. versionadded:: 9.3 """ layer = self._layer_index(layer) with gxgrd.Grid.open(self.layer_file_names[layer]) as g: uom = g.unit_of_measure return uom def figure_map(self, file_name=None, overwrite=False, title=None, legend_label=None, features=('SCALE', 'LEGEND', 'NEATLINE'), **kwargs): """ Create a figure map file from an aggregate. :param file_name: the name of the map, if None a default map is created. :param overwrite: True to overwrite existing image file :param title: Title added to the image :param legend_label: If plotting a legend make this the legned title. The default is the title in the first aggregate layer colour map. :param features: list of features to place on the map, default is ('SCALE', 'LEGEND', 'NEATLINE') =========== ========================================= 'SCALE' show a scale bar 'LEGEND' show an aggregate colour legend 'NEATLINE' draw a neat-line around the image 'ANNOT_XY' annotate map coordinates 'ANNOT_LL' annotate map Latitude, Longitude 'CONTOUR' contour the first layer in the aggregate =========== ========================================= :param kwargs: passed to `geosoft.gxpy.map.Map.new` .. versionadded:: 9.3 """ ref_grid_name = self.layer_file_names[0] ref_grid = gxgrd.Grid.open(ref_grid_name) # uppercase features, use a dict so we pop things we use and report error if isinstance(features, str): features = (features,) feature_list = {} if features is not None: for f in features: feature_list[f.upper()] = None if 'ALL' in feature_list: feature_list = {'ALL': None, 'LEGEND': None, 'CONTOUR': None} features = list(feature_list.keys()) # setup margins if not ('margins' in kwargs): bottom_margin = 1.0 if title: bottom_margin += len(title.split('\n')) * 1.0 if 'ALL' in feature_list or 'SCALE' in feature_list: bottom_margin += 1.2 right_margin = 1 if 'ALL' in feature_list or 'LEGEND' in feature_list: right_margin += 3.5 kwargs['margins'] = (1, right_margin, bottom_margin, 1) kwargs['coordinate_system'] = ref_grid.coordinate_system gmap = gxmap.Map.figure(ref_grid.extent_xy, file_name=file_name, overwrite=overwrite, features=features, title=title, **kwargs) with gxview.View.open(gmap, "data") as v: ref_grid = None gxgroup.Aggregate_group.new(v, self) if 'CONTOUR' in features: gxgroup.contour(v, 'contour', ref_grid_name) if 'LEGEND' in features: if self.layer_count > 1: cmap2 = self.layer_color_map(1) else: cmap2=None gxgroup.legend_color_bar(v, 'legend', title=legend_label, location=(1, 0), cmap=self.layer_color_map(0), cmap2=cmap2) return gmap def image_file(self, image_file=None, image_type=gxmap.RASTER_FORMAT_PNG, pix_width=None, display_area=None, pix_32_bit=False): """ Save the aggregate as a georeferenced image file. :param image_file: image file name. The extension should be consistent with the image_type. If not specified a temporary PNG file is created. :param image_type: image type, one ot the RASTER_FORMAT constants in `geosoft.gxpy.map`. :param pix_width: desired image width in pixels, default is the width of the aggregate base layer :param display_area: `geosoft.gxpy.geometry.Point2` instance, which defines the desired display area. The display area coordinate system can be different from the grid. :param pix_32_bit: make 32-bit image (with 8-bit alpha background) :return: image file name. .. versionadded:: 9.3.1 """ if self.layer_count == 0: raise AggregateException(_t('Aggregate has no layers')) if display_area is None: data_area = self.extent_2d coordinate_system = self.coordinate_system else: data_area = display_area.extent_xy if not gxcs.is_known(display_area.coordinate_system): coordinate_system = self.coordinate_system else: coordinate_system = display_area.coordinate_system if image_file is None: image_file = gx.gx().temp_file('.png') image_type = gxmap.RASTER_FORMAT_PNG nx, _, _, _, dx, *_ = self.spatial_properties if pix_width is None or pix_width <= 0: pix_width = nx with gxmap.Map.new(data_area=data_area, coordinate_system=coordinate_system, margins=(0, 0, 0, 0), inside_margin=0) as gmap: gmap.remove_on_close() with gxview.View.open(gmap, "data") as v: gxgroup.Aggregate_group.new(v, self) gmap.image_file(image_file, type=image_type, pix_width=pix_width, pix_32_bit=pix_32_bit) return image_file <file_sep>/geosoft/gxapi/GXDH.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXDB import GXDB from .GXMAP import GXMAP from .GXREG import GXREG ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXDH(gxapi_cy.WrapDH): """ GXDH class. This class is used for importing and interacting with Drill Hole data files. For detailed information on Drill Hole data, see the documentation for Wholeplot. **Note:** The `GXDH <geosoft.gxapi.GXDH>` class has some defines not used by any functions. `DH_DEFINE_PLAN <geosoft.gxapi.DH_DEFINE_PLAN>` :ref:`DH_DEFINE_SECT` """ def __init__(self, handle=0): super(GXDH, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXDH <geosoft.gxapi.GXDH>` :returns: A null `GXDH <geosoft.gxapi.GXDH>` :rtype: GXDH """ return GXDH() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # ArcGIS Target Functions @classmethod def is_esri(cls): """ Running inside ArcGIS? :returns: 0 - if No 1 - if Yes :rtype: int .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapDH._is_esri(GXContext._get_tls_geo()) return ret_val # Data processing/conversion methods def creat_chan_lst(self, lst): """ Fills a `GXLST <geosoft.gxapi.GXLST>` with available string and numeric channel code values. :param lst: `GXLST <geosoft.gxapi.GXLST>` to fill with channel code values. :type lst: GXLST .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Channel codes are in the format "[Assay] Au", where the name in the square brackets is descriptive part of the project database containing the given channel name. The above code might refer to the "Au" channel in the "Tutorial_Assay.gdb" database. """ self._creat_chan_lst(lst) def depth_data_lst(self, lst): """ Fills a `GXLST <geosoft.gxapi.GXLST>` with available channel code values from Depth databases. :param lst: `GXLST <geosoft.gxapi.GXLST>` to fill with channel code values. :type lst: GXLST .. versionadded:: 6.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Channel codes are in the format "[Assay] Au", where the name in the square brackets is descriptive part of the project database containing the given channel name. The above code might refer to the "Au" channel in the "Tutorial_Assay.gdb" database. """ self._depth_data_lst(lst) def from_to_data_lst(self, assay, lst): """ Fills a `GXLST <geosoft.gxapi.GXLST>` with available string and numeric channel code values from From-To databases. :param assay: Assay dataset ("" for all) :param lst: `GXLST <geosoft.gxapi.GXLST>` to fill with channel code values. :type assay: str :type lst: GXLST .. versionadded:: 6.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Channel codes are in the format "[Assay] Au", where the name in the square brackets is descriptive part of the project database containing the given channel name. The above code might refer to the "Au" channel in the "Tutorial_Assay.gdb" database. """ self._from_to_data_lst(assay.encode(), lst) def get_geology_contacts(self, lst, chan_code, geology, surface, gap, vv_x, vv_y, vv_z): """ Return XYZ locations of top or bottom geological surfaces :param lst: `GXLST <geosoft.gxapi.GXLST>` of holes to check :param chan_code: Channel code :param geology: Geology item :param surface: :ref:`DH_SURFACE` Surface selection (top or bottom) :param gap: Max gap to skip when compositing (`GS_R8DM <geosoft.gxapi.GS_R8DM>` for none) :param vv_x: X locations of the contact :param vv_y: Y locations of the contact :param vv_z: Z locations of the contact :type lst: GXLST :type chan_code: str :type geology: str :type surface: int :type gap: float :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 7.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** For the input `GXLST <geosoft.gxapi.GXLST>` of holes, returns XYZ location of top or bottom contact with the input geology. Those selected holes which do NOT have contacts, return `rDUMMY <geosoft.gxapi.rDUMMY>` for the corresponding locations. """ self._get_geology_contacts(lst, chan_code.encode(), geology.encode(), surface, gap, vv_x, vv_y, vv_z) def get_oriented_core_dip_dir(self, lst, alpha, beta, top_ref, dip, dip_dir): """ Converted alpha/beta values in oriented cores to dip/dip direction. :param lst: List of holes to process (e.g. from `hole_lst <geosoft.gxapi.GXDH.hole_lst>`) :param alpha: Channel code for input alpha data :param beta: Channel code for input beta data :param top_ref: 1: Top of core reference 0: Bottom of core reference :param dip: Channel name for output dip data :param dip_dir: Channel name for output dip direction :type lst: GXLST :type alpha: str :type beta: str :type top_ref: int :type dip: str :type dip_dir: str .. versionadded:: 6.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The input data are the oriented core alpha and beta values, using either top or bottom reference. The values for each hole in the `GXLST <geosoft.gxapi.GXLST>` are converted to "absolute" dip and dip-direction values, using the resurveyed hole orientations at each depth. The alpha and beta data must be from the same database, and the output dip and dip/dir channels are written to the same database. """ self._get_oriented_core_dip_dir(lst, alpha.encode(), beta.encode(), top_ref, dip.encode(), dip_dir.encode()) def get_unique_channel_items(self, chan_code, selected_holes, vv): """ Return a `GXVV <geosoft.gxapi.GXVV>` with unique items in a channel. :param chan_code: Channel code :param selected_holes: Selected holes (1), All holes (0) :param vv: `GXVV <geosoft.gxapi.GXVV>` filled with items (converted to this `GXVV <geosoft.gxapi.GXVV>` type) :type chan_code: str :type selected_holes: int :type vv: GXVV .. versionadded:: 7.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Finds and sorts all the unique non-dummy items for the selected channel. """ self._get_unique_channel_items(chan_code.encode(), selected_holes, vv) def get_unique_channel_items_from_collar(self, chan_name, selected_holes, vv): """ Return a `GXVV <geosoft.gxapi.GXVV>` with unique items in a channel. :param chan_name: Channel :param selected_holes: Selected holes (1), All holes (0) :param vv: `GXVV <geosoft.gxapi.GXVV>` filled with items (converted to this `GXVV <geosoft.gxapi.GXVV>` type) :type chan_name: str :type selected_holes: int :type vv: GXVV .. versionadded:: 7.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Finds and sorts all the unique non-dummy items for the selected channel. """ self._get_unique_channel_items_from_collar(chan_name.encode(), selected_holes, vv) def chan_type(self, chan_code): """ Return the data type for a channel code. :param chan_code: Channel code :type chan_code: str :returns: Channel data type :rtype: int .. versionadded:: 7.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Finds and sorts all the unique non-dummy items for the selected channel. """ ret_val = self._chan_type(chan_code.encode()) return ret_val def find_hole_intersection(self, hole, img, x, y, z): """ Return XYZ locations of the intersection of a hole with a DEM grid. :param hole: Hole index :param img: DEM Grid :param x: Returned X location :param y: Returned Y location :param z: Returned Z location :type hole: int :type img: GXIMG :type x: float_ref :type y: float_ref :type z: float_ref :returns: 1 if intersection found 0 if no intersection found :rtype: int .. versionadded:: 7.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Input the hole index and an `GXIMG <geosoft.gxapi.GXIMG>` object. Returns XYZ location of the hole intersection with the DEM. Interpolation inside the DEM uses the native `GXIMG <geosoft.gxapi.GXIMG>` interp method. If no intersection is found the returned XYZ locations are `rDUMMY <geosoft.gxapi.rDUMMY>`. """ ret_val, x.value, y.value, z.value = self._find_hole_intersection(hole, img, x.value, y.value, z.value) return ret_val def get_chan_code_info(self, chan_code, assay_db_index, chan): """ Return the assay database index and channel name from a channel code string. :param chan_code: Input channel code "[Assay] channel" :param assay_db_index: Returned assay database index :param chan: Channel name :type chan_code: str :type assay_db_index: int_ref :type chan: str_ref .. versionadded:: 7.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The input channel code is in the form "[Assay] channel" """ assay_db_index.value, chan.value = self._get_chan_code_info(chan_code.encode(), assay_db_index.value, chan.value.encode()) def grid_intersection(self, xi, yi, zi, dip, az, grid, xo, yo, zo): """ Algorithm to determine the intersection of a straight hole with a surface (DEM) grid. :param xi: Input location on hole X :param yi: Input location on hole Y :param zi: Input location on hole Z :param dip: Dip (positive up) in degrees :param az: Azimuth in degrees :param grid: DEM grid :param xo: Returned intersection point X :param yo: Returned intersection point Y :param zo: Returned intersection point Z :type xi: float :type yi: float :type zi: float :type dip: float :type az: float :type grid: str :type xo: float_ref :type yo: float_ref :type zo: float_ref :returns: 1 if an intersection is found, 0 if not. :rtype: int .. versionadded:: 7.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Given a point on the hole and the straight hole dip and azimuth, ocate (an) intersection point with the input DEM grid. """ ret_val, xo.value, yo.value, zo.value = self._grid_intersection(xi, yi, zi, dip, az, grid.encode(), xo.value, yo.value, zo.value) return ret_val def litho_grid_3d(self, chan_code, tpat, vox, cell_size, gap, non_contact_radius, gridding_type, reg, retain_grid_files): """ Create a lithology voxel grid with lith codes mapped to single values. :param chan_code: Lithology channel code :param tpat: Codes, colors etc. :param vox: Name of `GXVOX <geosoft.gxapi.GXVOX>` Persistent Storage file :param cell_size: Cell Size (`GS_R8DM <geosoft.gxapi.GS_R8DM>` for automatic calculation) :param gap: Max gap to skip when compositing (`GS_R8DM <geosoft.gxapi.GS_R8DM>` for none) :param non_contact_radius: Non-contact radius. :param gridding_type: Gridding type (0: Rangrid, 1: TinGrid) :param reg: Rangrid control `GXREG <geosoft.gxapi.GXREG>` (see `GXRGRD <geosoft.gxapi.GXRGRD>` class for parameters) :param retain_grid_files: Retain top/bottom grids? :type chan_code: str :type tpat: GXTPAT :type vox: str :type cell_size: float :type gap: float :type non_contact_radius: float :type gridding_type: int :type reg: GXREG :type retain_grid_files: int .. versionadded:: 7.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Values in the input channel are assigned the index of the corresponding item found in the input `GXTPAT <geosoft.gxapi.GXTPAT>`. The compositing gap refers to the size of gaps in the data (either a blank lithology or missing from-to interval) which will be ignored when compositing lithologies into contiguous from-to intervals. The non-contact radius is used to dummy out the level grids around holes where the gridded lithology is not found. If not specified (dummy) then half the distance to the nearest contacting hole is used. """ self._litho_grid_3d(chan_code.encode(), tpat, vox.encode(), cell_size, gap, non_contact_radius, gridding_type, reg, retain_grid_files) def numeric_chan_lst(self, lst): """ Fills a `GXLST <geosoft.gxapi.GXLST>` with available numeric channel code values. :param lst: `GXLST <geosoft.gxapi.GXLST>` to fill with channel code values. :type lst: GXLST .. versionadded:: 7.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Channel codes are in the format "[Assay] Au", where the name in the square brackets is descriptive part of the project database containing the given channel name. The above code might refer to the "Au" channel in the "Tutorial_Assay.gdb" database. """ self._numeric_chan_lst(lst) def numeric_from_to_data_lst(self, assay, lst): """ Fills a `GXLST <geosoft.gxapi.GXLST>` with available numeric channel code values from From-To databases.. :param assay: Assay dataset ("" for all) :param lst: `GXLST <geosoft.gxapi.GXLST>` to fill with channel code values. :type assay: str :type lst: GXLST .. versionadded:: 7.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Channel codes are in the format "[Assay] Au", where the name in the square brackets is descriptive part of the project database containing the given channel name. The above code might refer to the "Au" channel in the "Tutorial_Assay.gdb" database. """ self._numeric_from_to_data_lst(assay.encode(), lst) def punch_grid_holes(self, img, vv_x, vv_y, vv_z, blank_dist): """ Dummy out locations in a grid around non-contact holes. :param img: DEM grid :param vv_x: X locations of the contacts :param vv_y: Y locations of the contacts :param vv_z: Z locations of the contacts :param blank_dist: Blanking distance :type img: GXIMG :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type blank_dist: float .. versionadded:: 7.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Grid is dummied out to the blanking distance around holes where the input Z value is dummy. If a contacting hole is closer then twice the blanking distance, the blanking distance is reduced accordingly. Distances are measured horizontally (e.g. Z is ignored). If the blanking distance is zero or dummy, the distance is automatically set to half the distance to the closest hole intersection. """ self._punch_grid_holes(img, vv_x, vv_y, vv_z, blank_dist) def string_chan_lst(self, lst): """ Fills a `GXLST <geosoft.gxapi.GXLST>` with available string channel code values. :param lst: `GXLST <geosoft.gxapi.GXLST>` to fill with channel code values. :type lst: GXLST .. versionadded:: 7.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Channel codes are in the format "[Assay] Au", where the name in the square brackets is descriptive part of the project database containing the given channel name. The above code might refer to the "Au" channel in the "Tutorial_Assay.gdb" database. """ self._string_chan_lst(lst) def string_from_to_data_lst(self, assay, lst): """ Fills a `GXLST <geosoft.gxapi.GXLST>` with available string-type channel code values from From-To databases. :param assay: Assay dataset ("" for all) :param lst: `GXLST <geosoft.gxapi.GXLST>` to fill with channel code values. :type assay: str :type lst: GXLST .. versionadded:: 7.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Channel codes are in the format "[Geology] Lithology", where the name in the square brackets is descriptive part of the project database containing the given channel name. The above code might refer to the "Lithology" channel in the "Tutorial_Geology.gdb" database. """ self._string_from_to_data_lst(assay.encode(), lst) # Miscellaneous def h_assay_db(self, assay): """ Database for an assay data set. :param assay: Assay dataset number :type assay: int :returns: x - `GXDB <geosoft.gxapi.GXDB>` `DB_NULL <geosoft.gxapi.DB_NULL>` if no assay data (no error registered) :rtype: GXDB .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Works for both single and multiple `GXDB <geosoft.gxapi.GXDB>` Wholeplots. """ ret_val = self._h_assay_db(assay) return GXDB(ret_val) def h_assay_symb(self, assay, hole): """ Line/Group symbol for a specific assay data set hole. :param assay: Assay dataset number :param hole: Hole index number :type assay: int :type hole: int :returns: x - DB_SYMB `NULLSYMB <geosoft.gxapi.NULLSYMB>` if no survey data for this hole (no error registered) :rtype: int .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Works for both single and multiple `GXDB <geosoft.gxapi.GXDB>` Wholeplots. """ ret_val = self._h_assay_symb(assay, hole) return ret_val def h_collar_db(self): """ Database for the collar table. :returns: x - `GXDB <geosoft.gxapi.GXDB>` `DB_NULL <geosoft.gxapi.DB_NULL>` if no collar table (no error registered) :rtype: GXDB .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Works for both single and multiple `GXDB <geosoft.gxapi.GXDB>` Wholeplots. """ ret_val = self._h_collar_db() return GXDB(ret_val) def h_collar_symb(self): """ Line/Group symbol for the collar table. :returns: x - DB_SYMB `NULLSYMB <geosoft.gxapi.NULLSYMB>` if no collar table (no error registered) :rtype: int .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Works for both single and multiple `GXDB <geosoft.gxapi.GXDB>` Wholeplots. """ ret_val = self._h_collar_symb() return ret_val def h_dip_az_survey_db(self): """ Database for the Dip-Azimuth survey data :returns: x - `GXDB <geosoft.gxapi.GXDB>` `DB_NULL <geosoft.gxapi.DB_NULL>` if no dip-azimuth survey data (no error registered) :rtype: GXDB .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Works for both single and multiple `GXDB <geosoft.gxapi.GXDB>` Wholeplots. """ ret_val = self._h_dip_az_survey_db() return GXDB(ret_val) def h_dip_az_survey_symb(self, hole): """ Line/Group symbol for a specific hole Dip-Azimuth survey. :param hole: Hole index number :type hole: int :returns: x - DB_SYMB `NULLSYMB <geosoft.gxapi.NULLSYMB>` if no Dip-Azimuth survey data for this hole (no error registered) :rtype: int .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Works for both single and multiple `GXDB <geosoft.gxapi.GXDB>` Wholeplots. """ ret_val = self._h_dip_az_survey_symb(hole) return ret_val def h_en_survey_db(self): """ Database for the East-North survey data :returns: x - `GXDB <geosoft.gxapi.GXDB>` `DB_NULL <geosoft.gxapi.DB_NULL>` if no East-North survey data (no error registered) :rtype: GXDB .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Works for both single and multiple `GXDB <geosoft.gxapi.GXDB>` Wholeplots. """ ret_val = self._h_en_survey_db() return GXDB(ret_val) def h_en_survey_symb(self, hole): """ Line/Group symbol for a specific hole East-North survey. :param hole: Hole index number :type hole: int :returns: x - DB_SYMB `NULLSYMB <geosoft.gxapi.NULLSYMB>` if no EN survey data for this hole (no error registered) :rtype: int .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Works for both single and multiple `GXDB <geosoft.gxapi.GXDB>` Wholeplots. """ ret_val = self._h_en_survey_symb(hole) return ret_val def add_survey_table(self, hole): """ Add a survey table for a new hole. :param hole: Hole index :type hole: int .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The information is created from the collar table info. If the survey info already exists, does nothing. """ self._add_survey_table(hole) def assay_hole_lst(self, assay_db, lst): """ Populate an `GXLST <geosoft.gxapi.GXLST>` with holes in an assay database :param assay_db: Index of the assay database :param lst: `GXLST <geosoft.gxapi.GXLST>` handle :type assay_db: int :type lst: GXLST .. versionadded:: 6.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._assay_hole_lst(assay_db, lst) def assay_lst(self, lst): """ Return the `GXLST <geosoft.gxapi.GXLST>` of from-to and point assay datasets :param lst: `GXLST <geosoft.gxapi.GXLST>` to be populated :type lst: GXLST .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Assay dataset name is given as `LST_ITEM_NAME <geosoft.gxapi.LST_ITEM_NAME>` Assay dataset number is given as `LST_ITEM_VALUE <geosoft.gxapi.LST_ITEM_VALUE>` Returns an empty `GXLST <geosoft.gxapi.GXLST>` if no datasets. """ self._assay_lst(lst) @classmethod def auto_select_holes(cls, flag): """ Use automatic hole selection based on slice. :param flag: Turn on (TRUE) or off (FALSE) :type flag: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapDH._auto_select_holes(GXContext._get_tls_geo(), flag) def clean(self): """ Delete extraneous holes from project databases. .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Removes from Project databases any lines not connected to a line found in the collar table list. If all the database lines would be removed, the database is simply deleted. """ self._clean() def composite_db(self, mast_db, comp_db, hol_sel, int_sel, fix_int, lith_ch, int_file, wt_ch, rej1_val, rej2_val, rej3_val, rej3_op, rej3_ch): """ Make a composite database :param mast_db: Input assay `GXDB <geosoft.gxapi.GXDB>` object :param comp_db: Output composite `GXDB <geosoft.gxapi.GXDB>` object :param hol_sel: :ref:`DH_COMPSTDB_HOLSEL` :param int_sel: :ref:`DH_COMPSTDB_INTSEL` :param fix_int: Fixed interval length :param lith_ch: Name of lithology cannel :param int_file: Name of interval file :param wt_ch: Name of Weight channel :param rej1_val: dRej1Val for intervals short than, (`GS_R8DM <geosoft.gxapi.GS_R8DM>` for no action) :param rej2_val: dRej2Val for intervals gap greater than, (`GS_R8DM <geosoft.gxapi.GS_R8DM>` for no action) :param rej3_val: dRej3Val for Rej3Ch with Rej3Op, (`GS_R8DM <geosoft.gxapi.GS_R8DM>` for no action) :param rej3_op: dRej3Op: 0: >, 1: >=, 2: <, 3: <= :param rej3_ch: Name of Rej3Ch channel :type mast_db: GXDB :type comp_db: GXDB :type hol_sel: int :type int_sel: int :type fix_int: float :type lith_ch: str :type int_file: str :type wt_ch: str :type rej1_val: float :type rej2_val: float :type rej3_val: float :type rej3_op: int :type rej3_ch: str .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._composite_db(mast_db, comp_db, hol_sel, int_sel, fix_int, lith_ch.encode(), int_file.encode(), wt_ch.encode(), rej1_val, rej2_val, rej3_val, rej3_op, rej3_ch.encode()) def compute_hole_xyz(self, hole): """ Computes XYZ for survey and assay data for a single hole. :param hole: Hole index :type hole: int .. versionadded:: 7.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._compute_hole_xyz(hole) def compute_sel_extent(self, e_min, e_max, n_min, n_max, z_min, z_max): """ Computes the extents for selected holes. :param e_min: East Min :param e_max: East Max :param n_min: North Min :param n_max: North Max :param z_min: Elev Min :param z_max: Elev Max :type e_min: float_ref :type e_max: float_ref :type n_min: float_ref :type n_max: float_ref :type z_min: float_ref :type z_max: float_ref .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ e_min.value, e_max.value, n_min.value, n_max.value, z_min.value, z_max.value = self._compute_sel_extent(e_min.value, e_max.value, n_min.value, n_max.value, z_min.value, z_max.value) def compute_xyz(self): """ Computes XYZ for survey and assay data. .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._compute_xyz() @classmethod def convert_old_line_names(cls, db, lst): """ Convert old "DD001.Assay" type lines to "DD001" :param db: `GXDH <geosoft.gxapi.GXDH>` object :param lst: Names to convert (call `GXDB.symb_lst <geosoft.gxapi.GXDB.symb_lst>`). :type db: GXDB :type lst: GXLST .. versionadded:: 6.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The input `GXLST <geosoft.gxapi.GXLST>` must be filled using a function like `GXDB.symb_lst <geosoft.gxapi.GXDB.symb_lst>`, which puts the name and symbol into the `GXLST <geosoft.gxapi.GXLST>` items. Any names with a period are truncated at the period, and the line name in the database is changed to the new name (just the hole name). The `GXLST <geosoft.gxapi.GXLST>` is modified to have the new names. A value is put into the `GXDB <geosoft.gxapi.GXDB>` `GXREG <geosoft.gxapi.GXREG>` "DH_CONVERTED_NAMES" parameter so this process is done only once on a database. DO NOT use on old-style single-database Wholeplot projects. """ gxapi_cy.WrapDH._convert_old_line_names(GXContext._get_tls_geo(), db, lst) @classmethod def create(cls, db): """ Create `GXDH <geosoft.gxapi.GXDH>`. :param db: Name of current database :type db: str :returns: `GXDH <geosoft.gxapi.GXDH>` Object :rtype: GXDH .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapDH._create(GXContext._get_tls_geo(), db.encode()) return GXDH(ret_val) def create_default_job(self, ini, type): """ Create a default job from scratch. :param ini: File name of the INI file to create (forces correct suffix) :param type: :ref:`DH_PLOT` :type ini: str :type type: int .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._create_default_job(ini.encode(), type) @classmethod def create_external(cls, db): """ Create a `GXDH <geosoft.gxapi.GXDH>` from an external process (no montaj running). :param db: Name of example project database :type db: str :returns: `GXDH <geosoft.gxapi.GXDH>` Object :rtype: GXDH .. versionadded:: 5.1.6 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The regular `create <geosoft.gxapi.GXDH.create>` assumes a workspace is open and creates the project from the databases which are currently loaded. This function instead creates the project from all projects in the input databases's directory. """ ret_val = gxapi_cy.WrapDH._create_external(GXContext._get_tls_geo(), db.encode()) return GXDH(ret_val) @classmethod def current(cls): """ Creates a drill project from current environment. :returns: `GXDH <geosoft.gxapi.GXDH>` Object :rtype: GXDH .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If no `GXDH <geosoft.gxapi.GXDH>` database is open the Open `GXDH <geosoft.gxapi.GXDH>` Project `GXGUI <geosoft.gxapi.GXGUI>` will be displayed which may be cancelled by the user in which case the GX will terminate with cancel. """ ret_val = gxapi_cy.WrapDH._current(GXContext._get_tls_geo()) return GXDH(ret_val) @classmethod def datamine_to_csv(cls, file, proj): """ Convert a Datamine drillhole file to CSV files ready for import. :param file: Datamine database file to import (``*.dm``) :param proj: Drillhole project name :type file: str :type proj: str .. versionadded:: 6.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Creates three CSV files and the accompanying template files ready for batch ASCII import into a drill project. Project_Collar.csv, .i3 Project_Survey.csv, .i3 Project_Assay.csv, .i3 """ gxapi_cy.WrapDH._datamine_to_csv(GXContext._get_tls_geo(), file.encode(), proj.encode()) def delete_holes(self, lst): """ Delete a list of holes from the project. :param lst: `GXLST <geosoft.gxapi.GXLST>` of holes to delete :type lst: GXLST .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Removes all lines in the input `GXLST <geosoft.gxapi.GXLST>` from `GXDH <geosoft.gxapi.GXDH>` project databases. If all the database lines would be removed, the database is simply deleted. """ self._delete_holes(lst) def export_file(self, file, type): """ Exports a Drill Hole database to an external file. :param file: File name :param type: :ref:`DH_EXP` :type file: str :type type: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._export_file(file.encode(), type) def export_geodatabase_lst(self, lst, file, pre, feat_class, overwrite): """ Exports whole or part of a Drill Hole database to an ArcGIS Geodatabase as feature class(es). :param lst: Hole Names in the Name and Value parts of the `GXLST <geosoft.gxapi.GXLST>` :param file: File name (.pdb folder for File Geodatabase or .sde connector for SDE) :param pre: String to prefix dataset names with :param feat_class: Feature class name to export (pass empty for all or name of table, will contain the name of the output dataset for if a rename occurs) :param overwrite: Overwrite existing feature classes? ``False`` will create copies. :type lst: GXLST :type file: str :type pre: str :type feat_class: str_ref :type overwrite: int .. versionadded:: 7.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** A table with metadata about the created feature classes will be written to the Geodatabase. This table will have the same name with the postfix "_Metadata" attached """ feat_class.value = self._export_geodatabase_lst(lst, file.encode(), pre.encode(), feat_class.value.encode(), overwrite) def export_las(self, assay_db, hole, interval, file): """ Exports a Drill Hole database to a LAS v2 file. :param assay_db: Assay database index :param hole: Hole index :param interval: Interval for output :param file: File name :type assay_db: int :type hole: int :type interval: float :type file: str .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._export_las(assay_db, hole, interval, file.encode()) def export_lst(self, lst, file, type): """ Exports a `GXLST <geosoft.gxapi.GXLST>` of holes in a Drill Hole database to an external file. :param lst: Hole Names in the Name and Value parts of the `GXLST <geosoft.gxapi.GXLST>` :param file: File name :param type: :ref:`DH_EXP` :type lst: GXLST :type file: str :type type: int .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Use functions like `GXDB.selected_line_lst <geosoft.gxapi.GXDB.selected_line_lst>` to construct the `GXLST <geosoft.gxapi.GXLST>` """ self._export_lst(lst, file.encode(), type) def flush_select(self): """ Flush all selections to database selection engine. .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._flush_select() def get_databases_vv(self, gvv): """ Get the names of the project databases in a `GXVV <geosoft.gxapi.GXVV>`. :param gvv: `GXVV <geosoft.gxapi.GXVV>` of type -`STR_FILE <geosoft.gxapi.STR_FILE>` :type gvv: GXVV .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._get_databases_vv(gvv) def get_databases_sorted_vv(self, gvv): """ Get the names of the project databases in a `GXVV <geosoft.gxapi.GXVV>`, same as `get_databases_vv <geosoft.gxapi.GXDH.get_databases_vv>` but the list is sorted alphabetically. :param gvv: `GXVV <geosoft.gxapi.GXVV>` of type -`STR_FILE <geosoft.gxapi.STR_FILE>` :type gvv: GXVV .. versionadded:: 8.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._get_databases_sorted_vv(gvv) def get_data_type(self, db, type): """ Get the type of data in a Wholeplot database. :param db: `GXDB <geosoft.gxapi.GXDB>` Handle :param type: :ref:`DH_DATA` :type db: GXDB :type type: int_ref .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Returns `DH_DATA_UNKNOWN <geosoft.gxapi.DH_DATA_UNKNOWN>` if it can't determine the type. """ type.value = self._get_data_type(db, type.value) def get_default_section(self, az, x1, x2, l, w): """ Computes default section azimuths, extents for selected holes. :param az: Azimuth of section (returned) :param x1: Corner X (Easting) of section (returned) :param x2: Corner Y (Northing) of section (returned) :param l: Section length (returned) :param w: Section width (returned) :type az: float_ref :type x1: float_ref :type x2: float_ref :type l: float_ref :type w: float_ref .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ az.value, x1.value, x2.value, l.value, w.value = self._get_default_section(az.value, x1.value, x2.value, l.value, w.value) def get_hole_group(self, hole, assay): """ Get the Group symbol for this hole/table combination. :param hole: Hole index :param assay: Table Name :type hole: int :type assay: str :returns: Hole Symbol :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._get_hole_group(hole, assay.encode()) return ret_val def get_hole_survey(self, hole, vv_x, vv_y, vv_z, vv_d): """ Get the Survey information of a Hole. :param hole: Hole index :param vv_x: X :param vv_y: Y :param vv_z: Z :param vv_d: Depth :type hole: int :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_d: GXVV .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._get_hole_survey(hole, vv_x, vv_y, vv_z, vv_d) def get_hole_survey_ex(self, hole, vv_x, vv_y, vv_z, vv_d, thin): """ Get the Survey information of a Hole. :param hole: Hole index :param vv_x: X :param vv_y: Y :param vv_z: Z :param vv_d: Depth :param thin: Thin nearly co-linear segments? :type hole: int :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_d: GXVV :type thin: bool .. versionadded:: 9.5 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._get_hole_survey_ex(hole, vv_x, vv_y, vv_z, vv_d, thin) def get_hole_survey_from_to(self, db, line, vv_x, vv_y, vv_z, vv_d, vv_l, thin): """ Get the Survey information of a Hole using From/To database. :param db: From/To Database :param line: Line handle for hole :param vv_x: X :param vv_y: Y :param vv_z: Z :param vv_d: Depth :param vv_l: From To Segment lengths :param thin: Thin nearly co-linear segments? :type db: GXDB :type line: int :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_d: GXVV :type vv_l: GXVV :type thin: bool .. versionadded:: 9.5 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._get_hole_survey_from_to(db, line, vv_x, vv_y, vv_z, vv_d, vv_l, thin) @classmethod def desurvey(cls, desurvey_method, order, dip_convention, inc, thin, locations, vv_depth, vv_dip, vv_az, x, y, z, top, bottom, vv_x, vv_y, vv_z, vv_d): """ Calculate survey locations and depth from a hole survey. :param desurvey_method: :ref:`DH_DESURVEY` :param order: Polynomial order :param dip_convention: :ref:`DIP_CONVENTION` :param inc: Depth increment :param thin: Thin nearly co-linear segments? :param locations: Calculate at predefined depths (inc ignored) :param vv_depth: Survey Depth :param vv_dip: Survey Dip :param vv_az: Survey Azimuth :param x: Hole X :param y: Hole Y :param z: Hole Z :param top: Hole Top :param bottom: Hole Bottom :param vv_x: Out X :param vv_y: Out Y :param vv_z: Out Z :param vv_d: Out Depth :type desurvey_method: int :type order: int :type dip_convention: int :type inc: float :type thin: bool :type locations: bool :type vv_depth: GXVV :type vv_dip: GXVV :type vv_az: GXVV :type x: float :type y: float :type z: float :type top: float :type bottom: float :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_d: GXVV .. versionadded:: 9.7 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapDH._desurvey(GXContext._get_tls_geo(), desurvey_method, order, dip_convention, inc, thin, locations, vv_depth, vv_dip, vv_az, x, y, z, top, bottom, vv_x, vv_y, vv_z, vv_d) @classmethod def convert_oriented_core_dip_dir_for_hole_survey(cls, vv_depth, vv_alpha, vv_beta, vv_survey_x, vv_survey_y, vv_survey_z, vv_survey_depth, top_ref): """ Converted alpha/beta values in oriented cores to dip/dip direction. :param vv_depth: Depth VV :param vv_alpha: Alpha VV :param vv_beta: Beta VV :param vv_survey_x: Desurveyed X :param vv_survey_y: Desurveyed Y :param vv_survey_z: Desurveyed Z :param vv_survey_depth: Desurveyed Depth :param top_ref: 1: Top of core reference 0: Bottom of core reference :type vv_depth: GXVV :type vv_alpha: GXVV :type vv_beta: GXVV :type vv_survey_x: GXVV :type vv_survey_y: GXVV :type vv_survey_z: GXVV :type vv_survey_depth: GXVV :type top_ref: int .. versionadded:: 9.7 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The input data are the oriented core alpha and beta values, using either top or bottom reference. The values alpha and beta are converted in place to "absolute" dip and dip-direction values, using the desurveyed hole orientations at each depth. """ gxapi_cy.WrapDH._convert_oriented_core_dip_dir_for_hole_survey(GXContext._get_tls_geo(), vv_depth, vv_alpha, vv_beta, vv_survey_x, vv_survey_y, vv_survey_z, vv_survey_depth, top_ref) @classmethod def desurvey_from_to(cls, desurvey_method, order, dip_convention, inc, thin, vv_from, vv_to, vv_depth, vv_dip, vv_az, x, y, z, top, bottom, vv_x, vv_y, vv_z, vv_d, vv_l): """ Calculate survey locations and depth from a hole survey using from/to values :param desurvey_method: :ref:`DH_DESURVEY` :param order: Polynomial order :param dip_convention: :ref:`DIP_CONVENTION` :param inc: Inc :param thin: Thin nearly co-linear segments? :param vv_from: From Values :param vv_to: To Values :param vv_depth: Survey Depth :param vv_dip: Survey Dip :param vv_az: Survey Azimuth :param x: Hole X :param y: Hole Y :param z: Hole Z :param top: Hole Top :param bottom: Hole Bottom :param vv_x: Out X :param vv_y: Out Y :param vv_z: Out Z :param vv_d: Out Depth :param vv_l: From/To segment lengths in output VVs matching each from/to pair :type desurvey_method: int :type order: int :type dip_convention: int :type inc: float :type thin: bool :type vv_from: GXVV :type vv_to: GXVV :type vv_depth: GXVV :type vv_dip: GXVV :type vv_az: GXVV :type x: float :type y: float :type z: float :type top: float :type bottom: float :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_d: GXVV :type vv_l: GXVV .. versionadded:: 9.7 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapDH._desurvey_from_to(GXContext._get_tls_geo(), desurvey_method, order, dip_convention, inc, thin, vv_from, vv_to, vv_depth, vv_dip, vv_az, x, y, z, top, bottom, vv_x, vv_y, vv_z, vv_d, vv_l) def get_ipj(self, ipj): """ Get the project `GXIPJ <geosoft.gxapi.GXIPJ>`. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` Handle :type ipj: GXIPJ .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The projection for the project is the projection stored in the DH_EAST channel in the collar table. """ self._get_ipj(ipj) def get_map_names_vv(self, vv): """ Get plotted map names. :param vv: Returned map names (string type `GXVV <geosoft.gxapi.GXVV>`) :type vv: GXVV .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** This will return the currently plotted map name(s) in a `GXVV <geosoft.gxapi.GXVV>`. This should only be called after a call to `wholeplot <geosoft.gxapi.GXDH.wholeplot>`. The `GXVV <geosoft.gxapi.GXVV>` size is set to the number of maps created. """ self._get_map_names_vv(vv) def get_map(self, index): """ Get a plotting map :param index: Map Index :type index: int :returns: `GXMAP <geosoft.gxapi.GXMAP>` Object :rtype: GXMAP .. versionadded:: 8.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._get_map(index) return GXMAP(ret_val) def get_num_maps(self): """ Get the number plotting maps :returns: Number of plotting maps :rtype: int .. versionadded:: 8.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._get_num_maps() return ret_val def get_reg(self): """ Get the `GXREG <geosoft.gxapi.GXREG>` Object used in this project. :returns: `GXREG <geosoft.gxapi.GXREG>` Object :rtype: GXREG .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._get_reg() return GXREG(ret_val) def get_selected_holes_vv(self, vv): """ Populate a `GXVV <geosoft.gxapi.GXVV>` with the indices of all selected holes :param vv: Returned hole indices (must be type INT) :type vv: GXVV .. versionadded:: 8.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._get_selected_holes_vv(vv) @classmethod def get_table_default_chan_lst(cls, lst, type): """ Return list of default channels by collar/assay/survey table type. :param lst: `GXLST <geosoft.gxapi.GXLST>` handle :param type: :ref:`DH_DATA` :type lst: GXLST :type type: int .. versionadded:: 7.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Fills a `GXLST <geosoft.gxapi.GXLST>` with the default channel names created according to type (Collar, Survey, Assay). Value is in the `LST_ITEM_NAME <geosoft.gxapi.LST_ITEM_NAME>` part. """ gxapi_cy.WrapDH._get_table_default_chan_lst(GXContext._get_tls_geo(), lst, type) def hole_lst(self, lst): """ Populate an `GXLST <geosoft.gxapi.GXLST>` with the list of the selected holes :param lst: `GXLST <geosoft.gxapi.GXLST>` handle :type lst: GXLST .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._hole_lst(lst) def hole_lst2(self, lst): """ Populate an `GXLST <geosoft.gxapi.GXLST>` with the list of all the holes :param lst: `GXLST <geosoft.gxapi.GXLST>` handle :type lst: GXLST .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._hole_lst2(lst) def add_hole(self, hole): """ Add a hole and return it's index. :param hole: Name of hole :type hole: str :returns: x - Hole index :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._add_hole(hole.encode()) return ret_val def clean_will_delete_db(self): """ See if "cleaning" will delete project databases. :returns: 1 if calling `clean <geosoft.gxapi.GXDH.clean>` will remove all "lines" from one of the `GXDH <geosoft.gxapi.GXDH>` project databases. :rtype: int .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._clean_will_delete_db() return ret_val def compositing_tool_gui(self, map, x, y1, y2): """ Annotate a strip log map using the compositing tool. :param map: Current strip log map :param x: X location on map of selected strip :param y1: Y End of hole interval in view coords :param y2: Y Other end of hole interval in view coords :type map: GXMAP :type x: float :type y1: float :type y2: float :returns: :ref:`DH_COMP_CHOICE` :rtype: int .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If any of the input X or Y values are dummies the tool uses default values. """ ret_val = self._compositing_tool_gui(map, x, y1, y2) return ret_val @classmethod def create_collar_table(cls, project, chan, db): """ Create a collar table `GXDB <geosoft.gxapi.GXDB>` with channels set up. :param project: Project name :param chan: Number of channels :param db: Collar table name (returned) :type project: str :type chan: int :type db: str_ref .. versionadded:: 5.1.6 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The database name will be of the form "d:\\directory\\Project_Collar.gdb" """ db.value = gxapi_cy.WrapDH._create_collar_table(GXContext._get_tls_geo(), project.encode(), chan, db.value.encode()) @classmethod def create_collar_table_dir(cls, project, dir, chan, db): """ Create a collar table in the specified directory. :param project: Project name :param dir: Directory to create project in :param chan: Number of channels :param db: Collar table name (returned) :type project: str :type dir: str :type chan: int :type db: str_ref .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The database name will be of the form "d:\\directory\\Project_Collar.gdb" """ db.value = gxapi_cy.WrapDH._create_collar_table_dir(GXContext._get_tls_geo(), project.encode(), dir.encode(), chan, db.value.encode()) def delete_will_delete_db(self, lst): """ See if deleting holes will delete project databases. :param lst: `GXLST <geosoft.gxapi.GXLST>` of holes to delete :type lst: GXLST :returns: 1 if deleting the `GXLST <geosoft.gxapi.GXLST>` of holes will remove all "lines" from one of the `GXDH <geosoft.gxapi.GXDH>` project databases. :rtype: int .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._delete_will_delete_db(lst) return ret_val def find_hole(self, hole): """ Find a hole and return it's index. :param hole: Name of hole :type hole: str :returns: x - Hole index -1 - Not found :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._find_hole(hole.encode()) return ret_val def get_collar_table_db(self, db): """ Get the name of the database containing the collar table. :param db: Returned file name :type db: str_ref .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ db.value = self._get_collar_table_db(db.value.encode()) def get_info(self, hole, name, data): """ Get Collar Information. :param hole: Hole index :param name: Name of information :param data: Buffer to place information :type hole: int :type name: str :type data: str_ref .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If the DH_ELEV channel is requested it will also search for the DH_RL channel, which is the new name for the collar elevation. """ data.value = self._get_info(hole, name.encode(), data.value.encode()) def get_project_name(self, project): """ Get the Wholeplot project name. :param project: Returned string :type project: str_ref .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ project.value = self._get_project_name(project.value.encode()) @classmethod def get_section_id(cls, azimuth, east, north, id): """ Create a section ID based on its location :param azimuth: Section Azimuth :param east: Section Easting :param north: Section Northing :param id: Section ID :type azimuth: float :type east: float :type north: float :type id: str_ref .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ id.value = gxapi_cy.WrapDH._get_section_id(GXContext._get_tls_geo(), azimuth, east, north, id.value.encode()) @classmethod def get_template_blob(cls, db, templ, imp_type): """ Retrieve the import template from the database. :param db: `GXDB <geosoft.gxapi.GXDB>` Handle :param templ: Name of template file to extract to. :param imp_type: The stored import template type :ref:`DH_DATA` :type db: GXDB :type templ: str :type imp_type: int_ref :returns: 0: No template stored in the database 1: Template retrieved and written to a file. :rtype: int .. versionadded:: 6.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The template can be retrieved in order to refresh the database with a call to the DHIMPORT.GX. The import types correspond to the DHIMPORT.IMPTYPE variable: 0: ASCII, 1: Database/XLS, 2: ODBC If no template blob exists, templ """ ret_val, imp_type.value = gxapi_cy.WrapDH._get_template_blob(GXContext._get_tls_geo(), db, templ.encode(), imp_type.value) return ret_val @classmethod def get_template_blob_no_source_resolve(cls, db, templ, imp_type): """ Retrieve the import template from the database. :param db: `GXDB <geosoft.gxapi.GXDB>` Handle :param templ: Name of template file to extract to. :param imp_type: The stored import template type :ref:`DH_DATA` :type db: GXDB :type templ: str :type imp_type: int_ref :returns: 0: No template stored in the database 1: Template retrieved and written to a file. :rtype: int .. versionadded:: 9.7.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The template can be retrieved in order to refresh the database with a call to the DHIMPORT.GX. The import types correspond to the DHIMPORT.IMPTYPE variable: 0: ASCII, 1: Database/XLS, 2: ODBC If no template blob exists, templ """ ret_val, imp_type.value = gxapi_cy.WrapDH._get_template_blob_no_source_resolve(GXContext._get_tls_geo(), db, templ.encode(), imp_type.value) return ret_val @classmethod def get_template_info(cls, templ, data_type, file, table): """ Retrieve the file, `GXDH <geosoft.gxapi.GXDH>` Table name and type from an import template. :param templ: Template name :param data_type: :ref:`DH_DATA` :param file: File name (blank for ODBC, or undefined). :param table: Table name (blank for `DH_DATA_UNKNOWN <geosoft.gxapi.DH_DATA_UNKNOWN>`, or undefined). :type templ: str :type data_type: int_ref :type file: str_ref :type table: str_ref .. versionadded:: 6.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** As of version 6.0, the import templates (``*.i3, *.i4``) produced by the Wholeplot import wizards contain the following lines: FILE assay.txt (except for ODBC) DRILLTYPE 3 DRILLTABLE Assay The FILE is normally the input file name, except for ODBC, where it is not defined. The DRILLTYPE is one of DH_DATA_XXX, and the DRILLTABLE is the name of the Wholeplot database table; e.g. Project_Assay.gdb in the above case. The DRILLTABLE is only included in the template for `DH_DATA_FROMTO <geosoft.gxapi.DH_DATA_FROMTO>` and `DH_DATA_POINT <geosoft.gxapi.DH_DATA_POINT>`, but this function will return the appropriate table names (e.g. Collar, Survey, ENSurvey) for the other types. If the DRILLTYPE is NOT found in the template, a value of `DH_DATA_UNKNOWN <geosoft.gxapi.DH_DATA_UNKNOWN>` is returned for the data type; likely an indication that this is not a new-style template produced by Wholeplot. """ data_type.value, file.value, table.value = gxapi_cy.WrapDH._get_template_info(GXContext._get_tls_geo(), templ.encode(), data_type.value, file.value.encode(), table.value.encode()) @classmethod def get_template_info_ex(cls, templ, data_type, file, table, lst): """ Retrieve the file, `GXDH <geosoft.gxapi.GXDH>` Table name, type and channel list from an import template. :param templ: Template name :param data_type: :ref:`DH_DATA` :param file: File name (blank for ODBC, or undefined). :param table: Table name (blank for `DH_DATA_UNKNOWN <geosoft.gxapi.DH_DATA_UNKNOWN>`, or undefined). :param lst: Channel list (returned) :type templ: str :type data_type: int_ref :type file: str_ref :type table: str_ref :type lst: GXLST .. versionadded:: 7.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** As of version 6.0, the import templates (``*.i3, *.i4``) produced by the Wholeplot import wizards contain the following lines: FILE assay.txt (except for ODBC) DRILLTYPE 3 DRILLTABLE Assay The FILE is normally the input file name, except for ODBC, where it is not defined. The DRILLTYPE is one of DH_DATA_XXX, and the DRILLTABLE is the name of the Wholeplot database table; e.g. Project_Assay.gdb in the above case. The DRILLTABLE is only included in the template for `DH_DATA_FROMTO <geosoft.gxapi.DH_DATA_FROMTO>` and `DH_DATA_POINT <geosoft.gxapi.DH_DATA_POINT>`, but this function will return the appropriate table names (e.g. Collar, Survey, ENSurvey) for the other types. If the DRILLTYPE is NOT found in the template, a value of `DH_DATA_UNKNOWN <geosoft.gxapi.DH_DATA_UNKNOWN>` is returned for the data type; likely an indication that this is not a new-style template produced by Wholeplot. This version also returns a list of the channels in the template checks can be made to see if the import will exceed the database channel limit. """ data_type.value, file.value, table.value = gxapi_cy.WrapDH._get_template_info_ex(GXContext._get_tls_geo(), templ.encode(), data_type.value, file.value.encode(), table.value.encode(), lst) def get_units(self, units, conv_factor): """ Get the positional units and conversion factor to m. :param units: Units (i.e. "m") :param conv_factor: Conversion (units/m) :type units: str_ref :type conv_factor: float_ref .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ units.value, conv_factor.value = self._get_units(units.value.encode(), conv_factor.value) @classmethod def have_current(cls): """ Returns ``True`` if a drill project is loaded :rtype: bool .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapDH._have_current(GXContext._get_tls_geo()) return ret_val @classmethod def have_current2(cls, db): """ Returns ``True`` if a drill project is loaded, and the collar database if it is loaded. :param db: Collar table name (returned) :type db: str_ref :rtype: bool .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val, db.value = gxapi_cy.WrapDH._have_current2(GXContext._get_tls_geo(), db.value.encode()) return ret_val def holes(self): """ Return number of holes. :returns: x - Number of holes :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._holes() return ret_val @classmethod def hole_select_from_list_gui(cls, lst, sel_lst): """ Select/Deselect holes using the two-panel selection tool. :param lst: All holes :param sel_lst: Selected holes :type lst: GXLST :type sel_lst: GXLST :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 7.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapDH._hole_select_from_list_gui(GXContext._get_tls_geo(), lst, sel_lst) return ret_val def hole_selection_tool_gui(self): """ Select/Deselect holes using plan map tool. :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._hole_selection_tool_gui() return ret_val def modify3d_gui(self, ini, page): """ Modify parameters for a 3D plot. :param ini: Job Name (``*.in3``) :param page: Page to open `GXGUI <geosoft.gxapi.GXGUI>` on :type ini: str :type page: int_ref :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 5.1.6 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val, page.value = self._modify3d_gui(ini.encode(), page.value) return ret_val def edit_classification_table_file_gui(self, chan, class_file, fill_patterns, colors_only): """ Edit a symbol color/pattern CSV file :param chan: Channel :param class_file: CSV filename (in/out can be blank) :param fill_patterns: 0 - Collar Symbols -1 - Rock Patterns :param colors_only: 0 - Symbols/patterns (2D) -1 - Colors only (3D) :type chan: str :type class_file: str_ref :type fill_patterns: int :type colors_only: int :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 9.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val, class_file.value = self._edit_classification_table_file_gui(chan.encode(), class_file.value.encode(), fill_patterns, colors_only) return ret_val def modify_crooked_section_holes_gui(self, ini, page): """ Modify parameters to replot holes and hole data to an existing crooked section map. :param ini: Job Name (``*.ins``) :param page: Tab page ID. :type ini: str :type page: int_ref :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Will plot to an empty crooked section. """ ret_val, page.value = self._modify_crooked_section_holes_gui(ini.encode(), page.value) return ret_val def modify_fence_gui(self, ini, page): """ Modify parameters for a section plot. :param ini: Job Name (``*.ins``) :param page: :ref:`DH_SECT_PAGE` :type ini: str :type page: int_ref :returns: 0 - Ok 1 - Interactively define a fence. -1 - User Cancelled :rtype: int .. versionadded:: 7.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The fence section function. """ ret_val, page.value = self._modify_fence_gui(ini.encode(), page.value) return ret_val def modify_hole_traces_3d_gui(self, ini, page): """ Modify parameters for a hole traces plot to an existing 3D view. :param ini: Job Name :param page: Page to open `GXGUI <geosoft.gxapi.GXGUI>` on :type ini: str :type page: int_ref :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val, page.value = self._modify_hole_traces_3d_gui(ini.encode(), page.value) return ret_val def modify_hole_traces_gui(self, ini, page): """ Modify parameters for a hole traces plot to a current map. :param ini: Job Name :param page: Page to open `GXGUI <geosoft.gxapi.GXGUI>` on :type ini: str :type page: int_ref :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val, page.value = self._modify_hole_traces_gui(ini.encode(), page.value) return ret_val def modify_hole_traces_gui2(self, ini, plot_type, page): """ Modify parameters for a hole traces plot to a current plan or section view. :param ini: Job Name :param plot_type: :ref:`DH_PLOT` One of `DH_PLOT_PLAN <geosoft.gxapi.DH_PLOT_PLAN>` or `DH_PLOT_SECTION <geosoft.gxapi.DH_PLOT_SECTION>` :param page: Page to open `GXGUI <geosoft.gxapi.GXGUI>` on :type ini: str :type plot_type: int :type page: int_ref :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 8.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Currently supports `DH_PLOT_PLAN <geosoft.gxapi.DH_PLOT_PLAN>` and `DH_PLOT_SECTION <geosoft.gxapi.DH_PLOT_SECTION>` """ ret_val, page.value = self._modify_hole_traces_gui2(ini.encode(), plot_type, page.value) return ret_val def modify_plan_gui(self, ini, page): """ Modify parameters for a plan plot. :param ini: Job Name (``*.inp``) :param page: :ref:`DH_SECT_PAGE` :type ini: str :type page: int_ref :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val, page.value = self._modify_plan_gui(ini.encode(), page.value) return ret_val def modify_plan_holes_gui(self, ini, page): """ Modify parameters to replot holes and hole data to an existing plan map. :param ini: Job Name (``*.ins``) :param page: Tab Page ID :type ini: str :type page: int_ref :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 7.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Modifies only hole trace, hole data, topo, voxel slice data. """ ret_val, page.value = self._modify_plan_holes_gui(ini.encode(), page.value) return ret_val @classmethod def modify_rock_codes_gui(cls, file): """ Modify/create a rock codes file. :param file: File name :type file: str :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapDH._modify_rock_codes_gui(GXContext._get_tls_geo(), file.encode()) return ret_val @classmethod def modify_rock_codes_gui2(cls, db, file): """ Modify/create a rock codes file, channel population option. :param db: Database :param file: File name :type db: GXDB :type file: str :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Same as above, but passes the current database so that the "Populate from channel" button can be used to automatically populate the rock code list. The database should be a Wholeplot database. """ ret_val = gxapi_cy.WrapDH._modify_rock_codes_gui2(GXContext._get_tls_geo(), db, file.encode()) return ret_val def modify_section_gui(self, ini, page): """ Modify parameters for a section plot. :param ini: Job Name (``*.ins``) :param page: :ref:`DH_SECT_PAGE` :type ini: str :type page: int_ref :returns: 0 - Ok 1 - Interactively define a NS section 2 - Interactively define an EW section 3 - Interactively define an angled section -1 - User Cancelled :rtype: int .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The stacked section function uses the same control file format, but the plotting of profiles and plan views is disabled, and if multiple sections are requested, they are plotted in a stack on the left side of the same map, not to individual maps. """ ret_val, page.value = self._modify_section_gui(ini.encode(), page.value) return ret_val def modify_section_holes_gui(self, ini, page): """ Modify parameters to replot holes and hole data to an existing section map. :param ini: Job Name (``*.ins``) :param page: Tab page ID. :type ini: str :type page: int_ref :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 7.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Works for both regular and stacked sections. Modifies only hole trace, hole data, topo, voxel slice data. """ ret_val, page.value = self._modify_section_holes_gui(ini.encode(), page.value) return ret_val def modify_stacked_section_gui(self, ini, page): """ Modify parameters for a section plot. :param ini: Job Name (``*.ins``) :param page: :ref:`DH_SECT_PAGE` :type ini: str :type page: int_ref :returns: 0 - Ok 1 - Interactively define a NS section 2 - Interactively define an EW section 3 - Interactively define an angled section -1 - User Cancelled :rtype: int .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The stacked section function uses the same control file format, but the plotting of profiles and plan views is disabled, and if multiple sections are requested, they are plotted in a stack on the left side of the same map, not to individual maps. """ ret_val, page.value = self._modify_stacked_section_gui(ini.encode(), page.value) return ret_val def modify_strip_log_gui(self, ini, page): """ Modify parameters for a strip log plot. :param ini: Job Name (``*.inl``) :param page: :ref:`DH_SECT_PAGE` :type ini: str :type page: int_ref :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val, page.value = self._modify_strip_log_gui(ini.encode(), page.value) return ret_val @classmethod def modify_structure_codes_gui(cls, file): """ Modify/create a structure codes file. :param file: File name :type file: str :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapDH._modify_structure_codes_gui(GXContext._get_tls_geo(), file.encode()) return ret_val @classmethod def modify_structure_codes_gui2(cls, db, file): """ Modify/create a structure codes file, channel population option. :param db: Database :param file: File name :type db: GXDB :type file: str :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Same as above, but passes the current database so that the "Populate from channel" button can be used to automatically populate the structure code list. The database should be a Wholeplot database. """ ret_val = gxapi_cy.WrapDH._modify_structure_codes_gui2(GXContext._get_tls_geo(), db, file.encode()) return ret_val @classmethod def import2(cls, project, db, line, hole, table, type, log): """ Imports data into a Drill Hole Database (obsolete). :param project: Drill project name :param db: `GXDB <geosoft.gxapi.GXDB>` Handle :param line: Line :param hole: Hole channel :param table: Table :param type: :ref:`DH_DATA` :param log: Log file name :type project: str :type db: GXDB :type line: int :type hole: int :type table: str :type type: int :type log: str .. versionadded:: 7.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapDH._import2(GXContext._get_tls_geo(), project.encode(), db, line, hole, table.encode(), type, log.encode()) def import_las(self, assay, file, interval, interp, wa): """ Imports LAS Data into a `GXDH <geosoft.gxapi.GXDH>` database :param assay: Assay database to use :param file: LAS file name :param interval: Averaging/desampling interval (cm) :param interp: Interpolation method :param wa: Log file handle :type assay: str :type file: str :type interval: float :type interp: int :type wa: GXWA .. versionadded:: 6.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The argument for the assay database is the file name without the project name and underscore, e.g. for "Project_Assay.gdb" use "Assay" """ self._import_las(assay.encode(), file.encode(), interval, interp, wa) def num_assays(self): """ Number of assay datasets. :returns: The number of assay datasets. :rtype: int .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Works for both single and multiple `GXDB <geosoft.gxapi.GXDB>` Wholeplots. """ ret_val = self._num_assays() return ret_val def num_selected_holes(self): """ Returns number of selected holes. :returns: The number of selected holes :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._num_selected_holes() return ret_val def qa_dip_az_curvature_lst(self, lst, tolerance, wa): """ Do QA/QC Curvature checking on Dip Azimuth data for holes in a `GXLST <geosoft.gxapi.GXLST>`. :param lst: `GXLST <geosoft.gxapi.GXLST>` of holes (name, index) :param tolerance: Dip/Azimuth curvature tolerance (degree per meter) :param wa: `GXWA <geosoft.gxapi.GXWA>` Handle to write to :type lst: GXLST :type tolerance: float :type wa: GXWA :returns: The number of holes found and checked. :rtype: int .. versionadded:: 7.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Checks all holes with Dip-Azimuth survey data """ ret_val = self._qa_dip_az_curvature_lst(lst, tolerance, wa) return ret_val def qa_dip_az_survey_lst(self, lst, wa): """ Do QA/QC on Dip/Az Survey data for holes in a `GXLST <geosoft.gxapi.GXLST>`. :param lst: `GXLST <geosoft.gxapi.GXLST>` of holes (Name, Index) :param wa: `GXWA <geosoft.gxapi.GXWA>` Handle to write to :type lst: GXLST :type wa: GXWA :returns: The number of holes found and checked. :rtype: int .. versionadded:: 7.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Error if no Dip-Azimuth survey database, or if a requested hole does not exist in the drill project. """ ret_val = self._qa_dip_az_survey_lst(lst, wa) return ret_val def qa_east_north_curvature_lst(self, lst, tolerance, wa): """ Do QA/QC Curvature checking on Dip Azimuth data for holes in a `GXLST <geosoft.gxapi.GXLST>`. :param lst: `GXLST <geosoft.gxapi.GXLST>` of holes (name, index) :param tolerance: Dip/Azimuth curvature tolerance (degree per meter) :param wa: `GXWA <geosoft.gxapi.GXWA>` Handle :type lst: GXLST :type tolerance: float :type wa: GXWA :returns: The number of holes found and checked. :rtype: int .. versionadded:: 7.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Checks all holes with East-North survey data """ ret_val = self._qa_east_north_curvature_lst(lst, tolerance, wa) return ret_val def qa_east_north_survey_lst(self, lst, wa): """ Do QA/QC on East/North Survey data for holes in a `GXLST <geosoft.gxapi.GXLST>`. :param lst: `GXLST <geosoft.gxapi.GXLST>` of holes (Name, Index) :param wa: `GXWA <geosoft.gxapi.GXWA>` Handle to write to :type lst: GXLST :type wa: GXWA :returns: The number of holes found and checked. :rtype: int .. versionadded:: 7.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Error if no East-North survey database, or if a requested hole does not exist in the drill project. """ ret_val = self._qa_east_north_survey_lst(lst, wa) return ret_val def slice_selection_tool_gui(self, aoix1, aoiy1, aoix2, aoiy2, aoix3, aoiy3, aoix4, aoiy4, x1, y1, x2, y2): """ Select a slice with the holes in context. An optional 4 point area of interest (AOI) can be added to be represented in the UI too. :param aoix1: 1st Corner of AOI - X :param aoiy1: 1st Corner of AOI - Y :param aoix2: 2nd Corner of AOI - X :param aoiy2: 2nd Corner of AOI - Y :param aoix3: 3rd Corner of AOI - X :param aoiy3: 3rd Corner of AOI - Y :param aoix4: 4th Corner of AOI - X :param aoiy4: 4th Corner of AOI - Y :param x1: Returned slice 1st point - X :param y1: Returned slice 1st point - Y :param x2: Returned slice 2nd point - X :param y2: Returned slice 2nd point - Y :type aoix1: float :type aoiy1: float :type aoix2: float :type aoiy2: float :type aoix3: float :type aoiy3: float :type aoix4: float :type aoiy4: float :type x1: float_ref :type y1: float_ref :type x2: float_ref :type y2: float_ref :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val, x1.value, y1.value, x2.value, y2.value = self._slice_selection_tool_gui(aoix1, aoiy1, aoix2, aoiy2, aoix3, aoiy3, aoix4, aoiy4, x1.value, y1.value, x2.value, y2.value) return ret_val def update_survey_from_collar(self, hole): """ Update the Survey table from the collar info. :param hole: Hole index :type hole: int :returns: 0 - No change; there is no survey table, the table was empty, or values were same as collar 1 - Survey table updated; values changed and there is just one row. 2 - Survey table unchanged; there was more than one row in the table, and values were different :rtype: int .. versionadded:: 7.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Call when the collar values are edited to update the survey table values. If the survey contains more than one row, then no changes are applied, and no warning or error is registered. """ ret_val = self._update_survey_from_collar(hole) return ret_val def load_data_parameters_ini(self, db, dir): """ Load data parameters from INI files.. :param db: Source database :param dir: Directory to store INI files :type db: GXDB :type dir: str .. versionadded:: 6.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Wholeplot data graphing parameters for each channel are stored in the channel `GXREG <geosoft.gxapi.GXREG>`. This function lets a user transfer pre-defined settings to individual INI files (eg. cu.ini). """ self._load_data_parameters_ini(db, dir.encode()) def load_plot_parameters(self, ini, type): """ Load parameters from a Job into the Drill object. :param ini: The job file file to read :param type: :ref:`DH_PLOT` :type ini: str :type type: int .. versionadded:: 6.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._load_plot_parameters(ini.encode(), type) def load_select(self, file): """ Load selections to from a file. :param file: File Name :type file: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._load_select(file.encode()) def mask_ply(self, pply, ipj, tol, mask, select, append): """ Set mask channel based on view selection polygon. :param pply: Masking polygon :param ipj: Projection from data to polygon coordinates :param tol: Slice thickness - `rDUMMY <geosoft.gxapi.rDUMMY>` for no limiting thickness :param mask: Name of mask channel :param select: :ref:`DH_HOLES` :param append: :ref:`DH_MASK` :type pply: GXPLY :type ipj: GXIPJ :type tol: float :type mask: str :type select: int :type append: int .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Data values inside the polygon area, and within the slice thickness have their mask channel values set to 1. If the specified mask channel does not exist, it is created. `DH_MASK_NEW <geosoft.gxapi.DH_MASK_NEW>` --- Mask is created new for each selected hole `DH_MASK_APPEND <geosoft.gxapi.DH_MASK_APPEND>` --- Current selection is added to previous. """ self._mask_ply(pply, ipj, tol, mask.encode(), select, append) @classmethod def open(cls, db): """ Open `GXDH <geosoft.gxapi.GXDH>` from collar database and load all associated databases. :param db: Name of collar database :type db: str :returns: `GXDH <geosoft.gxapi.GXDH>` Object :rtype: GXDH .. versionadded:: 7.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapDH._open(GXContext._get_tls_geo(), db.encode()) return GXDH(ret_val) def open_job(self, job, type): """ Open a `GXDH <geosoft.gxapi.GXDH>` plotting job :param job: Job file name :param type: :ref:`DH_PLOT` :type job: str :type type: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._open_job(job.encode(), type) def plot_hole_traces(self, map, job): """ Plot hole traces to a regular (plan) map. :param map: Map handle :param job: Parameter file (INI) name :type map: GXMAP :type job: str .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Both the hole traces and data can be plotted. The DHPLANHOLES GX uses the default plan map parameter file "_plan.inp". """ self._plot_hole_traces(map, job.encode()) def plot_hole_traces_3d(self, mview, job): """ Plot hole traces to an existing 3D map view. :param mview: Existing 3D map view :param job: Parameter file (INI) name (normally ``*.in3``) :type mview: GXMVIEW :type job: str .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Both the hole traces and data can be plotted. The DH3DHOLES GX uses the default 3D map parameter file "_3D.in3". """ self._plot_hole_traces_3d(mview, job.encode()) def plot_symbols_3d(self, mview, job): """ Plot 3D symbols to an existing 3D map view. :param mview: Existing 3D map view :param job: Parameter file (INI) name (normally ``*.in3``) :type mview: GXMVIEW :type job: str .. versionadded:: 9.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._plot_symbols_3d(mview, job.encode()) def qa_collar(self, wa): """ Do QA/QC on Hole Collar data. :param wa: `GXWA <geosoft.gxapi.GXWA>` Handle :type wa: GXWA .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._qa_collar(wa) def qa_collar_lst(self, lst, wa): """ Do QA/QC on Hole Collar data - `GXLST <geosoft.gxapi.GXLST>` of holes. :param lst: `GXLST <geosoft.gxapi.GXLST>` of holes (Name, Index) :param wa: `GXWA <geosoft.gxapi.GXWA>` Handle :type lst: GXLST :type wa: GXWA .. versionadded:: 7.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._qa_collar_lst(lst, wa) def qa_dip_az_curvature(self, wa, tolerance): """ Do QA/QC Curvature checking on Dip Azimuth data. :param wa: `GXWA <geosoft.gxapi.GXWA>` Handle :param tolerance: Dip/Azimuth curvature tolerance (degree per meter) :type wa: GXWA :type tolerance: float .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Checks all holes with Dip-Azimuth survey data """ self._qa_dip_az_curvature(wa, tolerance) def qa_dip_az_curvature2(self, wa, tolerance, hole): """ Do QA/QC Curvature checking on Dip Azimuth data for a single hole. :param wa: `GXWA <geosoft.gxapi.GXWA>` Handle :param tolerance: Dip/Azimuth curvature tolerance (degree per meter) :param hole: Hole name :type wa: GXWA :type tolerance: float :type hole: str .. versionadded:: 6.4.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Checks single hole with Dip-Azimuth survey data """ self._qa_dip_az_curvature2(wa, tolerance, hole.encode()) def qa_dip_az_survey(self, db, wa, line, hole): """ Do QA/QC on Dip/Az Survey data. :param db: `GXDB <geosoft.gxapi.GXDB>` Handle :param wa: `GXWA <geosoft.gxapi.GXWA>` Handle :param line: Line :param hole: Current hole Name :type db: GXDB :type wa: GXWA :type line: int :type hole: str .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Error if no Dip-Azimuth survey database, or if the requested line does not exist in the database. """ self._qa_dip_az_survey(db, wa, line, hole.encode()) def qa_east_north_curvature(self, wa, tolerance): """ Do QA/QC Curvature checking on Dip Azimuth data. :param wa: `GXWA <geosoft.gxapi.GXWA>` Handle :param tolerance: Dip/Azimuth curvature tolerance (degree per meter) :type wa: GXWA :type tolerance: float .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Checks all holes with East-North survey data """ self._qa_east_north_curvature(wa, tolerance) def qa_east_north_curvature2(self, wa, tolerance, hole): """ Do QA/QC Curvature checking on Dip Azimuth data for a single hole. :param wa: `GXWA <geosoft.gxapi.GXWA>` Handle :param tolerance: Dip/Azimuth curvature tolerance (degree per meter) :param hole: Hole name :type wa: GXWA :type tolerance: float :type hole: str .. versionadded:: 6.4.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Checks single holes with East-North survey data """ self._qa_east_north_curvature2(wa, tolerance, hole.encode()) def qa_east_north_survey(self, db, wa, line, hole): """ Do QA/QC on East/North Survey data. :param db: `GXDB <geosoft.gxapi.GXDB>` Handle :param wa: `GXWA <geosoft.gxapi.GXWA>` Handle :param line: Line :param hole: Current hole Name :type db: GXDB :type wa: GXWA :type line: int :type hole: str .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Error if no East-North survey database, or if the requested line does not exist in the database. """ self._qa_east_north_survey(db, wa, line, hole.encode()) def qa_from_to_data(self, db, wa, line, hole): """ Do QA/QC on From/To data. :param db: `GXDB <geosoft.gxapi.GXDB>` Handle :param wa: `GXWA <geosoft.gxapi.GXWA>` Handle :param line: Line :param hole: Current hole Name :type db: GXDB :type wa: GXWA :type line: int :type hole: str .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._qa_from_to_data(db, wa, line, hole.encode()) def qa_point_data(self, db, wa, line, hole): """ Do QA/QC on Point data. :param db: `GXDB <geosoft.gxapi.GXDB>` Handle :param wa: `GXWA <geosoft.gxapi.GXWA>` Handle :param line: Line :param hole: Current hole Name :type db: GXDB :type wa: GXWA :type line: int :type hole: str .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._qa_point_data(db, wa, line, hole.encode()) def qa_write_unregistered_holes(self, db, wa): """ Write out unregistered holes in a database. :param db: `GXDB <geosoft.gxapi.GXDB>` Handle (not the collar table) :param wa: `GXWA <geosoft.gxapi.GXWA>` Handle :type db: GXDB :type wa: GXWA .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Looks at each line in a database and sees if it is listed in the collar tables' hole list. """ self._qa_write_unregistered_holes(db, wa) def replot_holes(self, job, plot_type): """ Replot holes on an existing drill map. :param job: Parameter (INI) name :param plot_type: :ref:`DH_PLOT` :type job: str :type plot_type: int .. versionadded:: 7.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The parameter file must correspond to the plot Type. The hDH->hMAP value must be set first, using `set_map <geosoft.gxapi.GXDH.set_map>`. Overwrites existing hole and hole data groups. Replots the legend if the legend is enabled. This should only be used on a slightly modified version of the INI file used to create the existing map, or things may not work out (e.g. bad locations etc). """ self._replot_holes(job.encode(), plot_type) def plot_holes_on_section(self, job, plot_type, view): """ Plot the currently selected holes on an existing section view. :param job: Parameter (INI) name :param plot_type: :ref:`DH_PLOT` Section plot type (`DH_PLOT_SECTION <geosoft.gxapi.DH_PLOT_SECTION>` or `DH_PLOT_SECTION_CROOKED <geosoft.gxapi.DH_PLOT_SECTION_CROOKED>` :param view: View name :type job: str :type plot_type: int :type view: str .. versionadded:: 8.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Plot the currently selected holes to a section view. """ self._plot_holes_on_section(job.encode(), plot_type, view.encode()) def re_survey_east_north(self, hole, vv_x, vv_y, vv_z, vv_d, east, north, elev, top, bot): """ Resurvey an East-North-RL survey. :param hole: Hole ID (for error messages) :param vv_x: Input East :param vv_y: Input North :param vv_z: Input RL :param vv_d: Returned depths down the hole :param east: Input collar East :param north: Input collar North :param elev: Input collar RL :param top: Input top of hole depth :param bot: Returned bottom depth :type hole: str :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_d: GXVV :type east: float :type north: float :type elev: float :type top: float :type bot: float_ref .. versionadded:: 5.1.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Re-interpolates in X, Y and Z to proper depth interval and returns depths for each point """ bot.value = self._re_survey_east_north(hole.encode(), vv_x, vv_y, vv_z, vv_d, east, north, elev, top, bot.value) def re_survey_pol_fit(self, hole, vv_dip, vv_az, vv_depth, east, north, elev, top, bot, inc, dip_conv, order, vv_x, vv_y, vv_z, vv_d): """ Use the polynomial fit resurveying method. :param hole: Hole ID (used for error messages) :param vv_dip: Dip :param vv_az: Azimuth :param vv_depth: Depth :param east: Collar X (easting) (depth = 0) :param north: Collar Y (northing)(depth = 0) :param elev: Collar Z (elevation) (depth = 0) :param top: Minimum hole depth to start output values :param bot: Maximum hole depth for output values :param inc: Increment for output values :param dip_conv: :ref:`DIP_CONVENTION` :param order: Polynomial order :param vv_x: X (Easting) - Output :param vv_y: Y (Northin) - Output :param vv_z: Z (Elevation) - Output :param vv_d: Depths - Output :type hole: str :type vv_dip: GXVV :type vv_az: GXVV :type vv_depth: GXVV :type east: float :type north: float :type elev: float :type top: float :type bot: float :type inc: float :type dip_conv: int :type order: int :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_d: GXVV .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Uses the polynomial fit method to calculate (X, Y, Z) locations down the hole from azimuth, dip, depth values. The collar is assumed to be at zero depth, and depth is the measure distance down the hole (even if it's horizontal). A negative dip convention means vertical down is -90 degrees. The polynomial order must be in the range 1-20, with 5 being adequate for most smoothly curving holes. The order is reduced to no more than the number of input points. """ self._re_survey_pol_fit(hole.encode(), vv_dip, vv_az, vv_depth, east, north, elev, top, bot, inc, dip_conv, order, vv_x, vv_y, vv_z, vv_d) def re_survey_rad_curve(self, hole, vv_dip, vv_az, vv_depth, east, north, elev, top, bot, inc, dip_conv, vv_x, vv_y, vv_z, vv_d): """ Use radius of curvature resurveying method. :param hole: Hole ID (used for error messages) :param vv_dip: Dip :param vv_az: Azimuth :param vv_depth: Depth :param east: Collar X (easting) (depth = 0) :param north: Collar Y (northing)(depth = 0) :param elev: Collar Z (elevation) (depth = 0) :param top: Minimum hole depth to start output values :param bot: Maximum hole depth for output values :param inc: Increment for output values :param dip_conv: :ref:`DIP_CONVENTION` :param vv_x: X (Easting) - Output :param vv_y: Y (Northin) - Output :param vv_z: Z (Elevation) - Output :param vv_d: Depths - Output :type hole: str :type vv_dip: GXVV :type vv_az: GXVV :type vv_depth: GXVV :type east: float :type north: float :type elev: float :type top: float :type bot: float :type inc: float :type dip_conv: int :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_d: GXVV .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Uses the Radius of curvature method to calculate (X, Y, Z) locations down the hole from azimuth, dip, depth values. The collar is assumed to be at zero depth, and depth is the measure distance down the hole (even if it's horizontal). A negative dip convention means vertical down is -90 degrees. """ self._re_survey_rad_curve(hole.encode(), vv_dip, vv_az, vv_depth, east, north, elev, top, bot, inc, dip_conv, vv_x, vv_y, vv_z, vv_d) def re_survey_straight(self, hole, dip, az, east, north, elev, top, bot, inc, dip_conv, vv_x, vv_y, vv_z, vv_d): """ Resurvey a straight hole. :param hole: Hole ID (used for error messages) :param dip: Collar Dip :param az: Collar Azimuth :param east: Collar X (easting) (depth = 0) :param north: Collar Y (northing)(depth = 0) :param elev: Collar Z (elevation) (depth = 0) :param top: Minimum hole depth to start output values :param bot: Maximum hole depth for output values :param inc: Increment for output values :param dip_conv: :ref:`DIP_CONVENTION` :param vv_x: X (Easting) - Output :param vv_y: Y (Northin) - Output :param vv_z: Z (Elevation) - Output :param vv_d: Depths - Output :type hole: str :type dip: float :type az: float :type east: float :type north: float :type elev: float :type top: float :type bot: float :type inc: float :type dip_conv: int :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_d: GXVV .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Assumes a straight hole to calculate (X, Y, Z) locations down the hole from azimuth, dip, depth values. The collar is assumed to be at zero depth, and depth is the measure distance down the hole (even if it's horizontal). A negative dip convention means vertical down is -90 degrees. """ self._re_survey_straight(hole.encode(), dip, az, east, north, elev, top, bot, inc, dip_conv, vv_x, vv_y, vv_z, vv_d) def re_survey_straight_seg(self, hole, vv_dip, vv_az, vv_depth, east, north, elev, top, bot, inc, dip_conv, vv_x, vv_y, vv_z, vv_d): """ Resurvey a hole with straight segments between locations. :param hole: Hole ID (used for error messages) :param vv_dip: Dip :param vv_az: Azimuth :param vv_depth: Depth :param east: Collar X (easting) (depth = 0) :param north: Collar Y (northing)(depth = 0) :param elev: Collar Z (elevation) (depth = 0) :param top: Minimum hole depth to start output values :param bot: Maximum hole depth for output values :param inc: Increment for output values :param dip_conv: :ref:`DIP_CONVENTION` :param vv_x: X (Easting) - Output :param vv_y: Y (Northin) - Output :param vv_z: Z (Elevation) - Output :param vv_d: Depths - Output :type hole: str :type vv_dip: GXVV :type vv_az: GXVV :type vv_depth: GXVV :type east: float :type north: float :type elev: float :type top: float :type bot: float :type inc: float :type dip_conv: int :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_d: GXVV .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Calculate (X, Y, Z) locations down the hole from azimuth, dip, depth values, assuming each segment is straight, and the hole bends at each successive azimuth, dip, depth value. The collar is assumed to be at zero depth, and depth is the measure distance down the hole (even if it's horizontal). A negative dip convention means vertical down is -90 degrees. """ self._re_survey_straight_seg(hole.encode(), vv_dip, vv_az, vv_depth, east, north, elev, top, bot, inc, dip_conv, vv_x, vv_y, vv_z, vv_d) def save_data_parameters_ini(self, db, dir): """ Save data parameters to INI files.. :param db: Source database :param dir: Directory to store INI files :type db: GXDB :type dir: str .. versionadded:: 6.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Wholeplot data graphing parameters for each channel are stored in the channel `GXREG <geosoft.gxapi.GXREG>`. This function lets a user transfer pre-defined settings to individual INI files (eg. cu.ini). As of v6.3, the `GXDH <geosoft.gxapi.GXDH>` object is NOT required for this function, and is, in fact, ignored. """ self._save_data_parameters_ini(db, dir.encode()) def save_job(self, job, type): """ Save a `GXDH <geosoft.gxapi.GXDH>` plotting job :param job: Job file name :param type: :ref:`DH_PLOT` :type job: str :type type: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._save_job(job.encode(), type) def save_select(self, file): """ Saves current selections to a file. :param file: File Name :type file: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._save_select(file.encode()) def section_window_size_mm(self, xmm, ymm): """ Deterine the size, in mm, of the section window :param xmm: X size in mm. :param ymm: Y size in mm. :type xmm: float_ref :type ymm: float_ref .. versionadded:: 6.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Given the current selection of windows (e.g. legend, plan), paper size and orientation, return the size in mm of the window used for plotting the section. """ xmm.value, ymm.value = self._section_window_size_mm(xmm.value, ymm.value) def select_all_holes(self): """ Select all the holes in a Drill hole project. .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._select_all_holes() def select_holes(self, gvv, sel): """ Select holes by hole indices. :param gvv: INT `GXVV <geosoft.gxapi.GXVV>` with hole indices. :param sel: 0 - deselect, 1 - select :type gvv: GXVV :type sel: int .. versionadded:: 6.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Indices less than 0 are skipped. This lets you use this function after a call to `GXLST.find_items <geosoft.gxapi.GXLST.find_items>`, which returns -1 for indices not located. """ self._select_holes(gvv, sel) def select_name(self, mask, sel, mode): """ Select holes using a name mask. :param mask: Mask :param sel: 0 - deselect, 1 - select :param mode: 0 - overwrite, 1 - append :type mask: str :type sel: int :type mode: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Overwrite mode - all selections tested and selected or not selected Append mode - only holes matching the mask are selected or not selected. """ self._select_name(mask.encode(), sel, mode) def select_ply(self, pply): """ Select all holes in `GXPLY <geosoft.gxapi.GXPLY>` (Polygon) object. :param pply: Polygon object :type pply: GXPLY .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** This function operates the same as the `select_ply2 <geosoft.gxapi.GXDH.select_ply2>` method with parameters ``(1, 0, 0)`` """ self._select_ply(pply) def select_ply2(self, pply, select, inside, new_mode): """ Select holes in `GXPLY <geosoft.gxapi.GXPLY>` (Polygon) object with options. :param pply: Polygon object :param select: Select (0) or Deselect (1) :param inside: Region (0: inside, 1: outside) :param new_mode: Mode (0: Append, 1: New) :type pply: GXPLY :type select: int :type inside: int :type new_mode: int .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The various selection options give the following results: New/Select/inside: Unselect all holes, then select all holes inside the polygon. New/Select/outside: Unselect all holes, then select all holes outside the polygon. New/Deselect/inside: Select all holes, then deselect all holes inside the polygon. New/Deselect/outside: Select all holes, then deselect all holes outside the polygon. Append/Select/inside: Select all holes inside the polygon. Leave selections outside as is. Append/Select/outside: Select all holes outside the polygon. Leave selections inside as is. Append/Deselect/inside: Deselect all holes inside the polygon Leave selections outside as is. Append/Deselect/outside: Deselect all holes outside the polygon. Leave selections inside as is. """ self._select_ply2(pply, select, inside, new_mode) def set_crooked_section_ipj(self, ipj): """ Pass the Crooked projection required for plotting to a crooked section. :param ipj: Crooked Section `GXIPJ <geosoft.gxapi.GXIPJ>` :type ipj: GXIPJ .. versionadded:: 7.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** This might be extracted from an existing crooked section view, or created from a database line. """ self._set_crooked_section_ipj(ipj) def set_current_view_name(self, cur_view): """ Set the current map view name. :param cur_view: View name :type cur_view: str .. versionadded:: 7.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Can be used to specify the name of the view to plot into. """ self._set_current_view_name(cur_view.encode()) def set_info(self, hole, name, data): """ Set Collar Information. :param hole: Hole index :param name: Name of information :param data: Information :type hole: int :type name: str :type data: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If the DH_ELEV channel is requested it will also search for the DH_RL channel, which is the new name for the collar elevation. """ self._set_info(hole, name.encode(), data.encode()) def set_ipj(self, ipj): """ Set the project `GXIPJ <geosoft.gxapi.GXIPJ>`. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` Handle :type ipj: GXIPJ .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The projection for the project is the projection stored in the DH_EAST channel in the collar table. This function sets the projection of the (DH_EAST, DH_NORTH) channel pairs in each of the project databases to the input `GXIPJ <geosoft.gxapi.GXIPJ>`. The input `GXIPJ <geosoft.gxapi.GXIPJ>` cannot be a geographic coordinate system or this call will fail with an error message. """ self._set_ipj(ipj) def set_map(self, map): """ Store the current `GXMAP <geosoft.gxapi.GXMAP>` to the `GXDH <geosoft.gxapi.GXDH>` object. :param map: `GXIPJ <geosoft.gxapi.GXIPJ>` Handle :type map: GXMAP .. versionadded:: 7.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Use this before calling the ReplotHoles functions, so that, instead of creating a new map, the plotting functions use the existing one. """ self._set_map(map) def set_new_ipj(self, db): """ Set a new project database projection to collar table projection. :param db: Project database name :type db: str .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Gets the `GXIPJ <geosoft.gxapi.GXIPJ>` of the collar table current x channel and copies it into the named database (as long as it is in the project!) """ self._set_new_ipj(db.encode()) def set_selected_holes_vv(self, vv, append): """ Set hole selection using hole indices. :param vv: Input hole indices (must be type INT) :param append: 0 - overwrite, 1 - append :type vv: GXVV :type append: int .. versionadded:: 8.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._set_selected_holes_vv(vv, append) @classmethod def set_template_blob(cls, db, templ, imp_type): """ Store the import template to the database. :param db: `GXDB <geosoft.gxapi.GXDB>` Handle :param templ: Import template name :param imp_type: :ref:`DH_DATA` :type db: GXDB :type templ: str :type imp_type: int .. versionadded:: 6.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The template can later be retrieved in order to refresh the database with a call to the DHIMPORT.GX. The import types correspond to the DHIMPORT.IMPTYPE variable: 0: ASCII, 1: Database/XLS, 2: ODBC """ gxapi_cy.WrapDH._set_template_blob(GXContext._get_tls_geo(), db, templ.encode(), imp_type) @classmethod def update_template_blob(cls, db, templDestination, templSource, imp_type): """ Update the import template and store to the database if necessary. :param db: `GXDB <geosoft.gxapi.GXDB>` Handle :param templDestination: Destination template name :param templSource: Source template name :param imp_type: :ref:`DH_DATA` :type db: GXDB :type templDestination: str :type templSource: str :type imp_type: int .. versionadded:: 9.10 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The template will be updated using another template. The import types correspond to the DHIMPORT.IMPTYPE variable: 0: ASCII, 1: Database/XLS, 2: ODBC """ gxapi_cy.WrapDH._update_template_blob(GXContext._get_tls_geo(), db, templDestination.encode(), templSource.encode(), imp_type) def significant_intersections_db(self, mast_db, comp_db, hol_sel, assay_ch, cut_off_grade, clip_grade, min_composite_thickness, min_composite_grade, max_internal_dilution_length, min_internal_dilution_grade, grade_for_missing_assays): """ Make a report of Significant Intersections :param mast_db: Input assay `GXDB <geosoft.gxapi.GXDB>` object :param comp_db: Output composite `GXDB <geosoft.gxapi.GXDB>` object :param hol_sel: :ref:`DH_COMPSTDB_HOLSEL` :param assay_ch: The primary assay channel. :param cut_off_grade: Minimum Cut off grade for Primary Assay :param clip_grade: Maximum Cut off grade for Primary Assay :param min_composite_thickness: Minimum Composite Length :param min_composite_grade: Minimum Composite thickness :param max_internal_dilution_length: Maximum Internal Dilution :param min_internal_dilution_grade: Minimum diluted grade :param grade_for_missing_assays: Grade for Missing Assays :type mast_db: GXDB :type comp_db: GXDB :type hol_sel: int :type assay_ch: str :type cut_off_grade: float :type clip_grade: float :type min_composite_thickness: float :type min_composite_grade: float :type max_internal_dilution_length: float :type min_internal_dilution_grade: float :type grade_for_missing_assays: float .. versionadded:: 7.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._significant_intersections_db(mast_db, comp_db, hol_sel, assay_ch.encode(), cut_off_grade, clip_grade, min_composite_thickness, min_composite_grade, max_internal_dilution_length, min_internal_dilution_grade, grade_for_missing_assays) def test_import_las(self, assay, file, interval, wa, warn): """ Tests import of LAS Data for problems. :param assay: Assay table name :param file: LAS file name :param interval: Averaging/desampling interval :param wa: Log file handle :param warn: 1 returned if problems found :type assay: str :type file: str :type interval: float :type wa: GXWA :type warn: int_ref .. versionadded:: 6.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** See `import_las <geosoft.gxapi.GXDH.import_las>`. Determines if the import of the LAS data will result in data being overwritten, interpolated or resampled. Warnings are written to a log file, as in sImportLAS_DH. Warnings are not registered in cases where data is merely extended at the start or the end with dummies to match a different interval down the hole. """ warn.value = self._test_import_las(assay.encode(), file.encode(), interval, wa, warn.value) def un_select_all_holes(self): """ Unselect all the holes in a Drill hole project. .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._un_select_all_holes() def un_selected_hole_lst(self, lst): """ Populate an `GXLST <geosoft.gxapi.GXLST>` with the list of the unselected holes :param lst: `GXLST <geosoft.gxapi.GXLST>` handle :type lst: GXLST .. versionadded:: 6.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._un_selected_hole_lst(lst) def update_collar_table(self): """ Update all collar table information. .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._update_collar_table() def update_hole_extent(self, hole): """ Update extents for one hole. :param hole: Hole index :type hole: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._update_hole_extent(hole) def wholeplot(self, job, plot_type): """ Run a Wholeplot plot job. :param job: Parameter (INI) name :param plot_type: :ref:`DH_PLOT` :type job: str :type plot_type: int .. versionadded:: 5.1.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The parameter file must correspond to the plot Type. The INI file contains settings for all of the non-database data related parameters (e.g. Map template, scale, boundaries, section definitions, hole trace parameters etc...) """ self._wholeplot(job.encode(), plot_type) def surface_intersections(self, output_db, input_geosurface_or_grid, hole_selection): """ Determine intersections of drillholes with a surface. :param output_db: Output `GXDB <geosoft.gxapi.GXDB>` Handle :param input_geosurface_or_grid: Input surface file :param hole_selection: Selected holes (1), All holes (0) :type output_db: GXDB :type input_geosurface_or_grid: str :type hole_selection: int .. versionadded:: 8.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._surface_intersections(output_db, input_geosurface_or_grid.encode(), hole_selection) @classmethod def get_mx_deposit_rights_info(cls, has_rights, base_url, api_version_prefix, api_key, user_key, database_id): """ Get MX Deposit Service API information via Geosoft ID rights. :param has_rights: Does Geosoft ID have rights to access MX Deposit? :param base_url: Base URL :param api_version_prefix: API Version Prefix :param api_key: API Key :param user_key: User Key :param database_id: Database ID :type has_rights: bool_ref :type base_url: str_ref :type api_version_prefix: str_ref :type api_key: str_ref :type user_key: str_ref :type database_id: str_ref .. versionadded:: 9.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ has_rights.value, base_url.value, api_version_prefix.value, api_key.value, user_key.value, database_id.value = gxapi_cy.WrapDH._get_mx_deposit_rights_info(GXContext._get_tls_geo(), has_rights.value, base_url.value.encode(), api_version_prefix.value.encode(), api_key.value.encode(), user_key.value.encode(), database_id.value.encode()) @classmethod def navigate_to_mx_deposit(cls, select_type, select_id): """ Navigate to MX Deposit portal :param select_type: Selection Type :param select_id: Selection ID :type select_type: str :type select_id: str .. versionadded:: 9.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapDH._navigate_to_mx_deposit(GXContext._get_tls_geo(), select_type.encode(), select_id.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/om-extensions/chanadd.py ''' Add a constant value to a channel on all selected lines. This is a sample Python extension that shows how to work with saved parameters and a Geosoft database. ''' import geosoft.gxpy as gxpy import geosoft.gxpy.project as gxprj import geosoft.gxpy.utility as gxu def rungx(): # api version gxpy.utility.check_version('9.2') # get the current database db = gxpy.gdb.Geosoft_gdb.open() # project parameters group = 'CHANADD' p_chan = 'CHANNEL' p_addval = 'ADDVAL' # get previous parameters from the parameter block, initializing to start-up defaults '' and 0.0 parms = gxu.get_parameters(group, {p_chan: '', p_addval: 0.0}) # if interactive, get user input if not gxprj.running_script(): try: # get channel to process from list of database channels chan = gxprj.get_user_input( 'Channel to process', 'Channel:', kind='list', default=parms.get(p_chan), items=sorted([k for k in db.list_channels().keys()])) # value to add to the channel addval = gxprj.get_user_input( 'Value to add to the data', 'value to add:', kind='float', default=parms.get(p_addval)) except gxprj.ProjectException: exit() # save parameters to new user settings parms[p_chan] = chan parms[p_addval] = addval gxu.save_parameters(group, parms) # work through the data a line at a time - get a list of selected lines lines = db.list_lines() # for each line, get the data, add a value, return the data to the line for l in lines: # print to the console to reflect progress print('line {}...'.format(str(l))) # get the data and determine the dummy to the data type data, ch, fid = db.read_line(l, channels=chan) dummy = gxu.gx_dummy(data.dtype) # make a dummy mask so we can replace dummies after processing dMask = gxu.dummy_mask(data) # process - add the value, then replace the dummies sum = data + addval sum[dMask] = dummy # write the data back to the database db.write_channel(l, chan, sum, fid) # pause the console so user can review input("Press return to continue...") <file_sep>/docs/GXIGRF.rst .. _GXIGRF: GXIGRF class ================================== .. autoclass:: geosoft.gxapi.GXIGRF :members: <file_sep>/docs/GXCHIMERA.rst .. _GXCHIMERA: GXCHIMERA class ================================== .. autoclass:: geosoft.gxapi.GXCHIMERA :members: .. _CHIMERA_MAX_CHAN: CHIMERA_MAX_CHAN constants ----------------------------------------------------------------------- Maximum channels in Chimera database .. autodata:: geosoft.gxapi.CHIMERA_MAX_CHAN :annotation: .. autoattribute:: geosoft.gxapi.CHIMERA_MAX_CHAN .. _CHIMERA_PLOT: CHIMERA_PLOT constants ----------------------------------------------------------------------- Chimera plot type .. autodata:: geosoft.gxapi.CHIMERA_PLOT_ROSE :annotation: .. autoattribute:: geosoft.gxapi.CHIMERA_PLOT_ROSE .. autodata:: geosoft.gxapi.CHIMERA_PLOT_PIE :annotation: .. autoattribute:: geosoft.gxapi.CHIMERA_PLOT_PIE .. autodata:: geosoft.gxapi.CHIMERA_PLOT_BAR :annotation: .. autoattribute:: geosoft.gxapi.CHIMERA_PLOT_BAR <file_sep>/examples/stand-alone/hello_world.py # This sample stand-alone Python script shows a minimal use of the Pythonic gxpy module to # create a Geosoft context and say hello to the user. # This example can be run stand-alon or as a Oasis montaj extension. import geosoft.gxpy as gxpy # gxpy methods # running as an extension from Oasis montaj will execute rungx() def rungx(): gxpy.utility.check_version('9.2') # say hello with gxpy.gx.gx() as gxc: gxpy.utility.display_message("GX Python", "Hello {}".format(gxc.gid)) # running as stand-alone program if __name__ == "__main__": gxpy.utility.check_version('9.2') # Stand-alone programs must create a GX context before calling Geosoft methods. with gxpy.gx.GXpy() as gxc: # The context has a member 'gid' which contains the user's Geosoft ID. # Say hello to the user print("Hello {}".format(gxc.gid)) <file_sep>/examples/tutorial/Geosoft Databases/import_csv.py import numpy as np import geosoft.gxpy as gxpy import geosoft.gxpy.gdb as gxdb #create context gxc = gxpy.gx.GXpy() # Open csv-format data file and skip the first line, which is a comment line f = open('mag_data.csv', 'r') f.readline() # the second line contains the channel/field names, from which we create a list of channel names channel_names = f.readline().strip().split(',') #the rest of the file contains data, which we load into a numpy float array data = np.loadtxt(f, delimiter=',') #create a new database from list of channels and numpy data. All data is stored in a single line. gdb = gxdb.Geosoft_gdb.new('mag_data', overwrite=True) line_name = gxdb.create_line_name() gdb.write_line(line_name, data, channel_names) # set the coordinate system to 'NAD 83 / UTM zone 15N' gdb.coordinate_system = 'NAD83 / UTM zone 15N' # set the mag data units to 'nT' gxdb.Channel(gdb, 'mag').unit_of_measure = 'nT' print(gdb.list_lines()) # {'L0': 100020} print(gdb.list_channels()) # {'mag': 100523, 'X': 100520, 'Y': 100521, 'Z': 100522} print(gdb.xyz_channels) # ('X', 'Y', 'Z') exit() <file_sep>/examples/geosoft_research/self_organizing_maps/python/som_om_qt5.py # -*- coding: utf-8 -*- """ Created on Sun Jan 5 10:15:34 2014 INSTALLATION This script depends on PyGt5, which **cannot** be run from ESRI ArcGIS Pro Python. To install PyQt5, open a command window as administrator and navigate to your Python folder (the folder that contains python.exe): scripts\pip install pyqt5 see https://www.riverbankcomputing.com/software/pyqt/intro for PyQt copyright. @author: <NAME>, Geosoft, 2014-17 """ #TODO: save classification map #TODO: add some Help #TODO: graphics - show the som adjusting as it goes. #import pydevd #pydevd.settrace('localhost', port=34765, stdoutToServer=True, stderrToServer=True) import os import sys import math import json import numpy as np import argparse as argp from PyQt5 import QtGui, QtWidgets import geosoft.gxpy.gx as gxp import geosoft.gxpy.gdb as gxgdb import geosoft.gxpy.utility as gxu try: import mvar except: # this depends on the modules folder being up two folders from this source file modules_folder = os.path.split(os.path.split(os.path.split(__file__)[0])[0])[0] sys.path.append(modules_folder) import modules.mvar as mvar from som_om_ui_qt5 import Ui_som_om def _(s): return s def decimate(data,maxn): ndata = len(data) if (ndata > maxn) and (maxn > 0): nth = math.ceil(ndata/maxn) base = np.arange(0,len(data)) select = (base%nth)==0 return(data[select]) else: return(data) class SOMException(RuntimeError): pass ############################################################################################### class SomDialog(QtWidgets.QDialog, Ui_som_om): def __init__(self, gdb, settings): super(SomDialog, self).__init__(None) self.setupUi(self) self.gdb = gdb self.settings = settings self.class_err = settings['CLASS_ERR'] self.filter = settings['FILTER'] self.stopRequest = False self.savedVal = {} indata = settings['INPUT_DATA'] self.chans = sorted(indata, key=str.lower) self.norms = [indata[c] for c in self.chans] sf = mvar.similarity_functions() self.som_param = settings.get('SOM_PARAMETERS', (4, 2, sf[0])) #connect slots self.classButton.clicked.connect(self.classify) self.outClass.textChanged.connect(self.outClassChanged) self.norm.currentIndexChanged.connect(self.allNorms) self.stopButton.clicked.connect(self.stopIt) self.initialiseDialog() def refresh(self): def channorm(chanList,normList,chan,norm): if len(chan): chanList.setCurrentIndex(chanList.findText(chan)) normList.setCurrentIndex(norm) #clear channel lists self.chan_1.clear() self.chan_2.clear() self.chan_3.clear() self.chan_4.clear() self.chan_5.clear() self.chan_6.clear() self.chan_7.clear() self.chan_8.clear() self.chan_9.clear() self.chan_10.clear() self.chan_11.clear() self.chan_12.clear() self.chan_13.clear() self.chan_14.clear() self.chan_15.clear() self.chan_16.clear() self.filterChan.clear() #set channel lists to database channels chans = self.gdb.list_channels() chans[''] = None for c in sorted(chans.keys(), key=lambda k: k.lower()): self.chan_1.addItem(c) self.chan_2.addItem(c) self.chan_3.addItem(c) self.chan_4.addItem(c) self.chan_5.addItem(c) self.chan_6.addItem(c) self.chan_7.addItem(c) self.chan_8.addItem(c) self.chan_9.addItem(c) self.chan_10.addItem(c) self.chan_11.addItem(c) self.chan_12.addItem(c) self.chan_13.addItem(c) self.chan_14.addItem(c) self.chan_15.addItem(c) self.chan_16.addItem(c) self.filterChan.addItem(c) # set norm list for n in ['no','normal','lognorm']: self.norm.addItem(n) self.norm_1.addItem(n) self.norm_2.addItem(n) self.norm_3.addItem(n) self.norm_4.addItem(n) self.norm_5.addItem(n) self.norm_6.addItem(n) self.norm_7.addItem(n) self.norm_8.addItem(n) self.norm_9.addItem(n) self.norm_10.addItem(n) self.norm_11.addItem(n) self.norm_12.addItem(n) self.norm_13.addItem(n) self.norm_14.addItem(n) self.norm_15.addItem(n) self.norm_16.addItem(n) #set default channels chans = self.chans norms = self.norms n = len(chans) if n >= 1: channorm(self.chan_1,self.norm_1,chans[0],norms[0]) if n >= 2: channorm(self.chan_2,self.norm_2,chans[1],norms[1]) if n >= 3: channorm(self.chan_3,self.norm_3,chans[2],norms[2]) if n >= 4: channorm(self.chan_4,self.norm_4,chans[3],norms[3]) if n >= 5: channorm(self.chan_5,self.norm_5,chans[4],norms[4]) if n >= 6: channorm(self.chan_6,self.norm_6,chans[5],norms[5]) if n >= 7: channorm(self.chan_7,self.norm_7,chans[6],norms[6]) if n >= 8: channorm(self.chan_8,self.norm_8,chans[8],norms[8]) if n >= 9: channorm(self.chan_9,self.norm_9,chans[8],norms[8]) if n >= 10: channorm(self.chan_10,self.norm_10,chans[9],norms[9]) if n >= 11: channorm(self.chan_11,self.norm_11,chans[10],norms[10]) if n >= 12: channorm(self.chan_12,self.norm_12,chans[11],norms[11]) if n >= 13: channorm(self.chan_13,self.norm_13,chans[12],norms[12]) if n >= 14: channorm(self.chan_14,self.norm_14,chans[13],norms[13]) if n >= 15: channorm(self.chan_15,self.norm_15,chans[14],norms[14]) if n >= 16: channorm(self.chan_16,self.norm_16,chans[15],norms[15]) #output channels self.outClass.setText(self.class_err[0]) self.outError.setText(self.class_err[1]) #filter self.filterChan.setCurrentIndex(self.filterChan.findText(self.filter[0])) self.filterVal.setText(self.filter[1]) #database name self.databaseName.setText(self.gdb.file_name) def results(self): indata = {} for i in range(len(self.chans)): indata[self.chans[i]] = self.norms[i] self.settings['INPUT_DATA'] = indata self.settings['FILTER'] = self.filter self.settings['CLASS_ERR'] = self.class_err self.settings['SOM_PARAMETERS'] = self.som_param return self.settings def initialiseDialog(self): self.refresh() self.stopB(False) # similarity sf = mvar.similarity_functions() for i in sf: self.similarity_func.addItem(str(i)) self.similarity_func.setCurrentIndex(sf.index(self.som_param[2])) # classifications lc = mvar.SOM.list_dim() for i in lc: self.nClasses.addItem(str(i)) self.nClasses.setCurrentIndex(lc.index(self.som_param[0])) self.anomPercent.setText(str(self.som_param[1])) def stopB(self,b): self.stopButton.setEnabled(b) self.classButton.setEnabled(not b) if not b: self.stopRequest = False def stopIt(self): self.stopRequest = True self.progLabel.setText('Stopping...') def outClassChanged(self): outClass = self.outClass.text().strip() if outClass: self.outError.setText(outClass+'_eud') def allNorms(self,index): self.norm_1.setCurrentIndex(index) self.norm_2.setCurrentIndex(index) self.norm_3.setCurrentIndex(index) self.norm_4.setCurrentIndex(index) self.norm_5.setCurrentIndex(index) self.norm_6.setCurrentIndex(index) self.norm_7.setCurrentIndex(index) self.norm_8.setCurrentIndex(index) self.norm_9.setCurrentIndex(index) self.norm_10.setCurrentIndex(index) self.norm_11.setCurrentIndex(index) self.norm_12.setCurrentIndex(index) self.norm_13.setCurrentIndex(index) self.norm_14.setCurrentIndex(index) self.norm_15.setCurrentIndex(index) self.norm_16.setCurrentIndex(index) def classify(self): def progress(label, value=None, som=None): self.progLabel.setText(label) if value != None: self.progressBar.setValue(int(value)) QtWidgets.qApp.processEvents() def stop_check(): QtWidgets.qApp.processEvents() return self.stopRequest def addChan(cb,cn,c,n): cc = cb.currentText() if len(cc) == 0: return if cc in c: return c.append(cc) n.append(cn.currentIndex()) chan = [] norm = [] addChan(self.chan_1 ,self.norm_1 , chan, norm) addChan(self.chan_2 ,self.norm_2 , chan, norm) addChan(self.chan_3 ,self.norm_3 , chan, norm) addChan(self.chan_4 ,self.norm_4 , chan, norm) addChan(self.chan_5 ,self.norm_5 , chan, norm) addChan(self.chan_6 ,self.norm_6 , chan, norm) addChan(self.chan_7 ,self.norm_7 , chan, norm) addChan(self.chan_8 ,self.norm_8 , chan, norm) addChan(self.chan_9 ,self.norm_9 , chan, norm) addChan(self.chan_10,self.norm_10, chan, norm) addChan(self.chan_11,self.norm_11, chan, norm) addChan(self.chan_12,self.norm_12, chan, norm) addChan(self.chan_13,self.norm_13, chan, norm) addChan(self.chan_14,self.norm_14, chan, norm) addChan(self.chan_15,self.norm_15, chan, norm) addChan(self.chan_16,self.norm_16, chan, norm) self.chans = chan self.norms = norm self.som_param = (int(self.nClasses.currentText()), min(max(0.0,float(self.anomPercent.text())),95.0), self.similarity_func.currentText()) self.filter = (self.filterChan.currentText().strip(), self.filterVal.text().strip()) if (len(self.filter[0]) == 0) or (len(self.filter[1]) == 0): self.filter = ('','') self.class_err = (self.outClass.text(), self.outError.text()) gdbChans = self.gdb.list_channels() if (self.class_err[0] in gdbChans) or (self.class_err[1] in gdbChans): butts = QtWidgets.QMessageBox.Yes butts |= QtWidgets.QMessageBox.No response = QtWidgets.QMessageBox.question(self,"Field exist in database", '"{}" or "{}" exists. Overwrite?' .format(self.class_err[0],self.class_err[1]), buttons=butts) if response != QtWidgets.QMessageBox.Yes: return self.stopB(True) try: mvar.SOMgdb( self.gdb, chan, normalize=norm, ch_filter=self.filter, dim=self.som_param[0], per=self.som_param[1], similarity=self.som_param[2], progress=progress, stop=stop_check, class_err=self.class_err) except Exception as e: QtWidgets.QMessageBox.information(self, "Classification failed", '{}'.format(e), buttons=QtWidgets.QMessageBox.Ok) raise self.done(0) ############################################################################################### if __name__ == '__main__': ''' Self-Organizing maps ''' # get command line parameters parser = argp.ArgumentParser(description=_("SOM analysis of data in a Geosoft database")) args = parser.parse_args() print("GeoSOM copyright 2016 Geosoft Inc.\n") gxc = gxp.GXpy() settings = gxu.get_shared_dict() print(settings) #input('continue...') # defaults if 'CLASS_ERR' not in settings: settings['CLASS_ERR'] = ('Class', 'EuD') if 'FILTER' not in settings: settings['FILTER'] = ('', '') if 'SOM_PARAMETERS' not in settings: settings['SOM_PARAMETERS'] = (4, 2, mvar.similarity_functions()[0]) gdb_name = os.path.normpath(settings['GDB_NAME']) gdb = gxgdb.Geosoft_gdb.open(gdb_name) #launch GUI app = QtWidgets.QApplication([]) form = SomDialog(gdb, settings) form.show() app.exec_() results = form.results() gxu.set_shared_dict(results) <file_sep>/geosoft/gxapi/GXContext.py from . import gxapi_cy from geosoft.gxapi import GXAPIError, int_ref import os import inspect import threading import winreg _tls = threading.local() class GXContext: """ The main GX execution context. A single instance of this object must be created per thread and persist before using any other class in the :py:mod:`.geosoft.gxapi` module. .. seealso:: Method :func:`.gxpy.gx.GXpy` """ def __enter__(self): return self def __exit__(self, type, value, traceback): self.__del__() def __del__(self): if self._release_tls_geo: global _tls tls_geo = getattr(_tls, '_gxa_geo', None) if tls_geo is not None: del _tls._gxa_geo _tls._gxa_geo = None tls_geo._destroy() def __init__(self, wrapper): global _tls tls_geo = getattr(_tls, '_gxa_geo', None) if tls_geo is None: _tls._gxa_geo = wrapper self._release_tls_geo = True else: self._release_tls_geo = False @classmethod def _try_get_tls_geo(cls): global _tls return getattr(_tls, '_gxa_geo', None) @classmethod def _get_tls_geo(cls): tls_geo = cls._try_get_tls_geo() if tls_geo is None: raise GXAPIError("A GXContext instance has not been created for current thread yet, " "or the original context has been released.") return tls_geo @classmethod def create(cls, application, version, wind_id=0, flags=0, key='Core', per_user_key=False, redist_override=False, redist_dir=None, user_dir=None, temp_dir=None): """ Creates the GX execution context (will return the current one if it exists). :param application: Calling application name" :param version: Calling application version :param parent_wnd_id: Calling application main window handle (HWND cast to unsigned on Windows) as an int (default 0) :param flags: 0 default; 64 suppresses text progress messages; 128 suppresses GUI progress window :param key: Default Geosoft registry key (in absence of geosoft.key file) to use to discover GX developer common redistributables or Desktop Applications software (default 'Core') :param per_user_key: Use per-user registry instead of local machine (default False) :param redist_override: Override registry mechanism to discover redistributables with redist_dir, user_dir and temp_dir parameters. (default False) :param redist_dir: Path containing the redistributable files, i.e. containing bin, csv and other folders. Only used if redist_override is True (default None) :param user_dir: Writable path to directory containing the user redistributable files. Only used if redist_override is True (default None). :param temp_dir: Path to use for temporary files. Only used if redist_override is True (default None) :type application: str :type version: str :type parent_wnd_id: int :type flags: int :type key: str :type per_user_key: bool :type redist_override: bool :type redist_dir: str :type user_dir: str :type temp_dir: str :returns: A GX execution context. :rtype: GXContext .. versionadded:: 9.1 """ global _tls tls_geo = getattr(_tls, '_gxa_geo', None) if tls_geo is None: if not cls._geodist_init: if redist_override: cls._set_geosoft_redist_overrides(redist_dir, user_dir, temp_dir) else: geosoft_dir, _, _ = cls.get_key_based_product_dirs(key, per_user_key) cls._geosoft_dist_init(geosoft_dir) p_geo = gxapi_cy.WrapPGeo() p_geo._create(application, version, wind_id, flags) return GXContext(p_geo) else: return GXContext(tls_geo) @classmethod def get_key_based_product_dirs(cls, key='Core', per_user_key=False): """ Gets key product folders based on geosoft.key file and registry :param key: Default Geosoft registry key (in absence of geosoft.key file) to use to discover GX developer common redistributables or Desktop Applications software (default 'Core') :param per_user_key: Use per-user registry instead of local machine (default False) :returns: product_install_dir, user_dir, temp_dir .. versionadded:: 9.7 """ key_file = os.path.join(os.path.dirname(inspect.getfile(cls)), 'geosoft.key') if os.path.exists(key_file): with open(key_file) as f: key = f.read().strip() reg_hive = winreg.HKEY_CURRENT_USER if per_user_key else winreg.HKEY_LOCAL_MACHINE env_key = winreg.OpenKey(reg_hive, 'Software\Geosoft\{}\Environment'.format(key), 0, winreg.KEY_READ) try: product_install_dir, _ = winreg.QueryValueEx(env_key, 'GEOSOFT') user_dir, _ = winreg.QueryValueEx(env_key, 'GEOSOFT2') temp_dir, _ = winreg.QueryValueEx(env_key, 'GEOTEMP') return product_install_dir, user_dir, temp_dir finally: winreg.CloseKey(env_key) @classmethod def _create_internal(cls, internal_p_geo): global _tls tls_geo = getattr(_tls, '_gxa_geo', None) if tls_geo is None: p_geo = gxapi_cy.WrapPGeo() p_geo._create_internal(internal_p_geo) return GXContext(p_geo) else: return GXContext(tls_geo) @classmethod def _internal_p(cls): p_geo = GXContext._get_tls_geo() return p_geo._internal_p() _geodist_init = False @classmethod def _geosoft_dist_init(cls, dist_dir, dll_name='geodist.dll'): gxapi_cy.WrapPGeo.geosoft_dist_init(dist_dir, dll_name) cls._geodist_init = True @classmethod def _set_geosoft_redist_overrides(cls, redist_dir, user_dir, temp_dir, dll_name='geodist.dll'): gxapi_cy.WrapPGeo.set_geosoft_redist_overrides(redist_dir, user_dir, temp_dir, dll_name) cls._geodist_init = True @classmethod def _redirect_std_streams(cls): gxapi_cy.WrapPGeo.gx_redirect_std_streams() def get_main_wnd_id(self): """ Get the main window handle (0 if not available). :returns: Window handle as an int (HWND cast to unsigned on Windows) :rtype: int .. versionadded:: 9.1 """ p_geo = GXContext._get_tls_geo() return p_geo.get_main_wnd() def get_active_wnd_id(self): """ Get currently active window (main window, floating document or other popup, 0 if not available). :returns: Window handle as an int (HWND cast to unsigned on Windows) :rtype: int .. versionadded:: 9.1 """ p_geo = GXContext._get_tls_geo() return p_geo.get_active_wnd() def enable_application_windows(self, enable): """ Used by to prevent user interaction while showing modal windows with APIs where it might be hard to use proper window parenting (e.g. in Python with PyQt, tkinter, wxPython etc.). Take care to enable window prior to any calls that need user interaction, e.g. The :class:`geosoft.gxapi.GXEMAP` digitization methods. :param enable: True to enable, False to disable keyboard and mouse interaction :type enable: bool .. versionadded:: 9.1 """ p_geo = GXContext._get_tls_geo() return p_geo.enable_application_windows(enable) def has_ui_console(self): """ Checks if a console owned by UI applications is available :returns: True if the parent has UI console. :rtype: bool .. versionadded:: 9.1 """ p_geo = GXContext._get_tls_geo() return p_geo.has_ui_console() def is_ui_console_visible(self): """ Checks if a console owned by UI applications is visible :returns: True if the UI console is visible. :rtype: bool .. versionadded:: 9.1 """ p_geo = GXContext._get_tls_geo() return p_geo.is_ui_console_visible() def show_ui_console(self, show): """ Shows or hides console owned by UI applications. Showing the console Will also bring the window to the front if behind other application windows. Has no effect on consoles owning standalone scripts. :param show: True to show False to Hide :type show: bool .. versionadded:: 9.1 """ p_geo = GXContext._get_tls_geo() return p_geo.show_ui_console(show) def clear_ui_console(self): """ Clears the console owned by UI applications. Has no effect on consoles owning standalone scripts. .. versionadded:: 9.1 """ p_geo = GXContext._get_tls_geo() return p_geo.clear_ui_console() <file_sep>/geosoft/gxpy/tests/script_gx.py import geosoft.gxpy.project as gxprj import geosoft.gxapi as gxa # This GX is used in a test, during script recording, and the script should only contain its parameters # but not ones from user_input.gx def rungx(): float_val = gxa.GXSYS.get_double("SCRIPT_GX", "VALUE") if gxa.GXSYS.interactive() != 0: ret_val = gxprj.get_user_input('SCRIPT GX (interactive)', 'Float', kind='float', default=float_val) print('float return: {}'.format(ret_val)) gxa.GXSYS.set_double("SCRIPT_GX", "VALUE", ret_val) else: gxa.GXSYS.set_interactive(1) # Restore interactive mod to make dialogs appear gxprj.user_message('SCRIPT GX (non interactive)', "Received value: {}".format(float_val)) <file_sep>/docs/GXMVU.rst .. _GXMVU: GXMVU class ================================== .. autoclass:: geosoft.gxapi.GXMVU :members: .. _EMLAY_GEOMETRY: EMLAY_GEOMETRY constants ----------------------------------------------------------------------- Type of Geometry .. autodata:: geosoft.gxapi.EMLAY_V_COPLANAR :annotation: .. autoattribute:: geosoft.gxapi.EMLAY_V_COPLANAR .. autodata:: geosoft.gxapi.EMLAY_H_COPLANAR :annotation: .. autoattribute:: geosoft.gxapi.EMLAY_H_COPLANAR .. autodata:: geosoft.gxapi.EMLAY_V_COAXIAL :annotation: .. autoattribute:: geosoft.gxapi.EMLAY_V_COAXIAL .. _ARROW_ALIGNMENT: ARROW_ALIGNMENT constants ----------------------------------------------------------------------- Direction of alignment .. autodata:: geosoft.gxapi.ARROW_ALIGNMENT_HORIZONTAL :annotation: .. autoattribute:: geosoft.gxapi.ARROW_ALIGNMENT_HORIZONTAL .. autodata:: geosoft.gxapi.ARROW_ALIGNMENT_VERTICAL :annotation: .. autoattribute:: geosoft.gxapi.ARROW_ALIGNMENT_VERTICAL .. _BARCHART_LABEL: BARCHART_LABEL constants ----------------------------------------------------------------------- Place to draw bar labels .. autodata:: geosoft.gxapi.BARCHART_LABEL_NO :annotation: .. autoattribute:: geosoft.gxapi.BARCHART_LABEL_NO .. autodata:: geosoft.gxapi.BARCHART_LABEL_BELOWX :annotation: .. autoattribute:: geosoft.gxapi.BARCHART_LABEL_BELOWX .. autodata:: geosoft.gxapi.BARCHART_LABEL_ABOVEX :annotation: .. autoattribute:: geosoft.gxapi.BARCHART_LABEL_ABOVEX .. autodata:: geosoft.gxapi.BARCHART_LABEL_PEND :annotation: .. autoattribute:: geosoft.gxapi.BARCHART_LABEL_PEND .. autodata:: geosoft.gxapi.BARCHART_LABEL_NEND :annotation: .. autoattribute:: geosoft.gxapi.BARCHART_LABEL_NEND .. autodata:: geosoft.gxapi.BARCHART_LABEL_ALTERNAT1 :annotation: .. autoattribute:: geosoft.gxapi.BARCHART_LABEL_ALTERNAT1 .. autodata:: geosoft.gxapi.BARCHART_LABEL_ALTERNAT2 :annotation: .. autoattribute:: geosoft.gxapi.BARCHART_LABEL_ALTERNAT2 .. _COLORBAR_LABEL: COLORBAR_LABEL constants ----------------------------------------------------------------------- Label text orientation .. autodata:: geosoft.gxapi.COLORBAR_LABEL_HORIZONTAL :annotation: .. autoattribute:: geosoft.gxapi.COLORBAR_LABEL_HORIZONTAL .. autodata:: geosoft.gxapi.COLORBAR_LABEL_VERTICAL :annotation: .. autoattribute:: geosoft.gxapi.COLORBAR_LABEL_VERTICAL .. _COLORBAR_STYLE: COLORBAR_STYLE constants ----------------------------------------------------------------------- Label text orientation .. autodata:: geosoft.gxapi.COLORBAR_STYLE_NONE :annotation: .. autoattribute:: geosoft.gxapi.COLORBAR_STYLE_NONE .. autodata:: geosoft.gxapi.COLORBAR_STYLE_MAXMIN :annotation: .. autoattribute:: geosoft.gxapi.COLORBAR_STYLE_MAXMIN .. _MVU_ORIENTATION: MVU_ORIENTATION constants ----------------------------------------------------------------------- Orientation (of whatever) .. autodata:: geosoft.gxapi.MVU_ORIENTATION_VERTICAL :annotation: .. autoattribute:: geosoft.gxapi.MVU_ORIENTATION_VERTICAL .. autodata:: geosoft.gxapi.MVU_ORIENTATION_HORIZONTAL :annotation: .. autoattribute:: geosoft.gxapi.MVU_ORIENTATION_HORIZONTAL .. _MVU_DIVISION_STYLE: MVU_DIVISION_STYLE constants ----------------------------------------------------------------------- Orientation (of whatever) .. autodata:: geosoft.gxapi.MVU_DIVISION_STYLE_NONE :annotation: .. autoattribute:: geosoft.gxapi.MVU_DIVISION_STYLE_NONE .. autodata:: geosoft.gxapi.MVU_DIVISION_STYLE_LINES :annotation: .. autoattribute:: geosoft.gxapi.MVU_DIVISION_STYLE_LINES .. autodata:: geosoft.gxapi.MVU_DIVISION_STYLE_TICS :annotation: .. autoattribute:: geosoft.gxapi.MVU_DIVISION_STYLE_TICS .. _MVU_ARROW: MVU_ARROW constants ----------------------------------------------------------------------- Type Arrow. These definitions are used as binary flags, and can be used together by passing sums. .. autodata:: geosoft.gxapi.MVU_ARROW_SOLID :annotation: .. autoattribute:: geosoft.gxapi.MVU_ARROW_SOLID .. autodata:: geosoft.gxapi.MVU_ARROW_FIXED :annotation: .. autoattribute:: geosoft.gxapi.MVU_ARROW_FIXED .. _MVU_FLIGHT_COMPASS: MVU_FLIGHT_COMPASS constants ----------------------------------------------------------------------- Compass direction .. autodata:: geosoft.gxapi.MVU_FLIGHT_COMPASS_NONE :annotation: .. autoattribute:: geosoft.gxapi.MVU_FLIGHT_COMPASS_NONE .. autodata:: geosoft.gxapi.MVU_FLIGHT_COMPASS_EAST :annotation: .. autoattribute:: geosoft.gxapi.MVU_FLIGHT_COMPASS_EAST .. autodata:: geosoft.gxapi.MVU_FLIGHT_COMPASS_NORTH :annotation: .. autoattribute:: geosoft.gxapi.MVU_FLIGHT_COMPASS_NORTH .. autodata:: geosoft.gxapi.MVU_FLIGHT_COMPASS_WEST :annotation: .. autoattribute:: geosoft.gxapi.MVU_FLIGHT_COMPASS_WEST .. autodata:: geosoft.gxapi.MVU_FLIGHT_COMPASS_SOUTH :annotation: .. autoattribute:: geosoft.gxapi.MVU_FLIGHT_COMPASS_SOUTH .. _MVU_FLIGHT_DUMMIES: MVU_FLIGHT_DUMMIES constants ----------------------------------------------------------------------- Show Dummies .. autodata:: geosoft.gxapi.MVU_FLIGHT_DUMMIES_NOTINCLUDED :annotation: .. autoattribute:: geosoft.gxapi.MVU_FLIGHT_DUMMIES_NOTINCLUDED .. autodata:: geosoft.gxapi.MVU_FLIGHT_DUMMIES_INCLUDED :annotation: .. autoattribute:: geosoft.gxapi.MVU_FLIGHT_DUMMIES_INCLUDED .. _MVU_FLIGHT_LOCATE: MVU_FLIGHT_LOCATE constants ----------------------------------------------------------------------- Line label locations .. autodata:: geosoft.gxapi.MVU_FLIGHT_LOCATE_NONE :annotation: .. autoattribute:: geosoft.gxapi.MVU_FLIGHT_LOCATE_NONE .. autodata:: geosoft.gxapi.MVU_FLIGHT_LOCATE_END :annotation: .. autoattribute:: geosoft.gxapi.MVU_FLIGHT_LOCATE_END .. autodata:: geosoft.gxapi.MVU_FLIGHT_LOCATE_ABOVE :annotation: .. autoattribute:: geosoft.gxapi.MVU_FLIGHT_LOCATE_ABOVE .. autodata:: geosoft.gxapi.MVU_FLIGHT_LOCATE_BELOW :annotation: .. autoattribute:: geosoft.gxapi.MVU_FLIGHT_LOCATE_BELOW .. autodata:: geosoft.gxapi.MVU_FLIGHT_DIRECTION :annotation: .. autoattribute:: geosoft.gxapi.MVU_FLIGHT_DIRECTION .. _MVU_VOX_SURFACE_METHOD: MVU_VOX_SURFACE_METHOD constants ----------------------------------------------------------------------- TODO .. autodata:: geosoft.gxapi.MVU_VOX_SURFACE_METHOD_MARCHING_CUBES :annotation: .. autoattribute:: geosoft.gxapi.MVU_VOX_SURFACE_METHOD_MARCHING_CUBES .. _MVU_VOX_SURFACE_OPTION: MVU_VOX_SURFACE_OPTION constants ----------------------------------------------------------------------- TODO .. autodata:: geosoft.gxapi.MVU_VOX_SURFACE_OPTION_OPEN :annotation: .. autoattribute:: geosoft.gxapi.MVU_VOX_SURFACE_OPTION_OPEN .. autodata:: geosoft.gxapi.MVU_VOX_SURFACE_OPTION_CLOSED :annotation: .. autoattribute:: geosoft.gxapi.MVU_VOX_SURFACE_OPTION_CLOSED .. _MVU_TEXTBOX: MVU_TEXTBOX constants ----------------------------------------------------------------------- Type of Box .. autodata:: geosoft.gxapi.MVU_TEXTBOX_LEFT :annotation: .. autoattribute:: geosoft.gxapi.MVU_TEXTBOX_LEFT .. autodata:: geosoft.gxapi.MVU_TEXTBOX_CENTER :annotation: .. autoattribute:: geosoft.gxapi.MVU_TEXTBOX_CENTER .. autodata:: geosoft.gxapi.MVU_TEXTBOX_RIGHT :annotation: .. autoattribute:: geosoft.gxapi.MVU_TEXTBOX_RIGHT .. _MVU_VPOINT: MVU_VPOINT constants ----------------------------------------------------------------------- Head Acuteness .. autodata:: geosoft.gxapi.MVU_VPOINT_SHARP :annotation: .. autoattribute:: geosoft.gxapi.MVU_VPOINT_SHARP .. autodata:: geosoft.gxapi.MVU_VPOINT_MEDIUM :annotation: .. autoattribute:: geosoft.gxapi.MVU_VPOINT_MEDIUM .. autodata:: geosoft.gxapi.MVU_VPOINT_BLUNT :annotation: .. autoattribute:: geosoft.gxapi.MVU_VPOINT_BLUNT .. _MVU_VPOS: MVU_VPOS constants ----------------------------------------------------------------------- Head Position .. autodata:: geosoft.gxapi.MVU_VPOS_HEAD :annotation: .. autoattribute:: geosoft.gxapi.MVU_VPOS_HEAD .. autodata:: geosoft.gxapi.MVU_VPOS_MIDDLE :annotation: .. autoattribute:: geosoft.gxapi.MVU_VPOS_MIDDLE .. autodata:: geosoft.gxapi.MVU_VPOS_TAIL :annotation: .. autoattribute:: geosoft.gxapi.MVU_VPOS_TAIL .. _MVU_VSIZE: MVU_VSIZE constants ----------------------------------------------------------------------- Head Size .. autodata:: geosoft.gxapi.MVU_VSIZE_NOHEAD :annotation: .. autoattribute:: geosoft.gxapi.MVU_VSIZE_NOHEAD .. autodata:: geosoft.gxapi.MVU_VSIZE_SMALLHEAD :annotation: .. autoattribute:: geosoft.gxapi.MVU_VSIZE_SMALLHEAD .. autodata:: geosoft.gxapi.MVU_VSIZE_MEDIUMHEAD :annotation: .. autoattribute:: geosoft.gxapi.MVU_VSIZE_MEDIUMHEAD .. autodata:: geosoft.gxapi.MVU_VSIZE_LARGEHEAD :annotation: .. autoattribute:: geosoft.gxapi.MVU_VSIZE_LARGEHEAD .. autodata:: geosoft.gxapi.MVU_VSIZE_NOTAIL :annotation: .. autoattribute:: geosoft.gxapi.MVU_VSIZE_NOTAIL .. _MVU_VSTYLE: MVU_VSTYLE constants ----------------------------------------------------------------------- Head Style .. autodata:: geosoft.gxapi.MVU_VSTYLE_LINES :annotation: .. autoattribute:: geosoft.gxapi.MVU_VSTYLE_LINES .. autodata:: geosoft.gxapi.MVU_VSTYLE_BARB :annotation: .. autoattribute:: geosoft.gxapi.MVU_VSTYLE_BARB .. autodata:: geosoft.gxapi.MVU_VSTYLE_TRIANGLE :annotation: .. autoattribute:: geosoft.gxapi.MVU_VSTYLE_TRIANGLE <file_sep>/docs/templates/geosoft.gxpy.mod.rst geosoft.gxpy.{{ module[0] }} submodule ============================================= .. automodule:: geosoft.gxpy.{{ module[0] }} :members: :undoc-members: :show-inheritance: <file_sep>/docs/GXGU.rst .. _GXGU: GXGU class ================================== .. autoclass:: geosoft.gxapi.GXGU :members: .. _EM_ERR: EM_ERR constants ----------------------------------------------------------------------- Error Scaling .. autodata:: geosoft.gxapi.EM_ERR_UNSCALED :annotation: .. autoattribute:: geosoft.gxapi.EM_ERR_UNSCALED .. autodata:: geosoft.gxapi.EM_ERR_LOGSCALING :annotation: .. autoattribute:: geosoft.gxapi.EM_ERR_LOGSCALING .. _EM_INV: EM_INV constants ----------------------------------------------------------------------- Type of Inversion .. autodata:: geosoft.gxapi.EM_INV_INPHASE :annotation: .. autoattribute:: geosoft.gxapi.EM_INV_INPHASE .. autodata:: geosoft.gxapi.EM_INV_QUADRATURE :annotation: .. autoattribute:: geosoft.gxapi.EM_INV_QUADRATURE .. autodata:: geosoft.gxapi.EM_INV_BOTH :annotation: .. autoattribute:: geosoft.gxapi.EM_INV_BOTH .. _EMPLATE_DOMAIN: EMPLATE_DOMAIN constants ----------------------------------------------------------------------- Type of Domain .. autodata:: geosoft.gxapi.EMPLATE_FREQUENCY :annotation: .. autoattribute:: geosoft.gxapi.EMPLATE_FREQUENCY .. autodata:: geosoft.gxapi.EMPLATE_TIME :annotation: .. autoattribute:: geosoft.gxapi.EMPLATE_TIME .. _EMPLATE_TX: EMPLATE_TX constants ----------------------------------------------------------------------- Orientation .. autodata:: geosoft.gxapi.EMPLATE_TX_X :annotation: .. autoattribute:: geosoft.gxapi.EMPLATE_TX_X .. autodata:: geosoft.gxapi.EMPLATE_TX_Y :annotation: .. autoattribute:: geosoft.gxapi.EMPLATE_TX_Y .. autodata:: geosoft.gxapi.EMPLATE_TX_Z :annotation: .. autoattribute:: geosoft.gxapi.EMPLATE_TX_Z .. _GU_DAARC500_DATATYPE: GU_DAARC500_DATATYPE constants ----------------------------------------------------------------------- Supported serial data types for import .. autodata:: geosoft.gxapi.GU_DAARC500_UNKNOWN :annotation: .. autoattribute:: geosoft.gxapi.GU_DAARC500_UNKNOWN .. autodata:: geosoft.gxapi.GU_DAARC500_GENERIC_ASCII :annotation: .. autoattribute:: geosoft.gxapi.GU_DAARC500_GENERIC_ASCII .. autodata:: geosoft.gxapi.GU_DAARC500_GPS :annotation: .. autoattribute:: geosoft.gxapi.GU_DAARC500_GPS .. autodata:: geosoft.gxapi.GU_DAARC500_GR820_256D :annotation: .. autoattribute:: geosoft.gxapi.GU_DAARC500_GR820_256D .. autodata:: geosoft.gxapi.GU_DAARC500_GR820_256DU :annotation: .. autoattribute:: geosoft.gxapi.GU_DAARC500_GR820_256DU .. autodata:: geosoft.gxapi.GU_DAARC500_GR820_512DU :annotation: .. autoattribute:: geosoft.gxapi.GU_DAARC500_GR820_512DU .. autodata:: geosoft.gxapi.GU_DAARC500_NAV :annotation: .. autoattribute:: geosoft.gxapi.GU_DAARC500_NAV .. _PEAKEULER_XY: PEAKEULER_XY constants ----------------------------------------------------------------------- Fit Options .. autodata:: geosoft.gxapi.PEAKEULER_XY_NOFIT :annotation: .. autoattribute:: geosoft.gxapi.PEAKEULER_XY_NOFIT .. autodata:: geosoft.gxapi.PEAKEULER_XY_FIT :annotation: .. autoattribute:: geosoft.gxapi.PEAKEULER_XY_FIT <file_sep>/examples/tutorial/Geosoft Databases/import_csv_split.py import numpy as np import geosoft.gxpy as gxpy import geosoft.gxpy.gdb as gxdb import geosoft.gxapi as gxapi #create context gxc = gxpy.gx.GXpy() # Open csv-format data file and skip the first line, which is a comment line f = open('mag_data.csv', 'r') f.readline() # the second line contains the channel/field names, from which we create a list of channel names channel_names = f.readline().strip().split(',') #the rest of the file contains data, which we load into a numpy float array data = np.loadtxt(f, delimiter=',') #create a new database from list of channels and numpy data. All data is stored in a single line. gdb = gxdb.Geosoft_gdb.new('mag_data_split', overwrite=True) line_name = gxdb.create_line_name() gdb.write_line(line_name, data, channel_names) # set the coordinate system to 'NAD 83 / UTM zone 15N' gdb.coordinate_system = 'NAD83 / UTM zone 15N' # set the mag data units to 'nT' gxdb.Channel(gdb, 'mag').unit_of_measure = 'nT' print(list(gdb.list_lines())) # ['L0'] print(list(gdb.list_channels())) # ['mag', 'X', 'Y', 'Z'] print(gdb.xyz_channels) # ('X', 'Y', 'Z') # split the line into sections knowing lines are E-W, and separated by 200 m. # see https://geosoftinc.github.io/gxpy/9.2/python/GXDU.html?highlight=split_line_xy2#geosoft.gxapi.GXDU.split_line_xy2 # starting line number for split lines split_line_number_start = gxapi.int_ref() split_line_number_start.value = 1 # create instances to the lines and channels needed by the split_line_xy2 function line = gxdb.Line(gdb, 'L0') x_channel = gxdb.Channel(gdb, 'X') y_channel = gxdb.Channel(gdb, 'Y') # lock items as required line.lock = gxdb.SYMBOL_LOCK_READ x_channel.lock = gxdb.SYMBOL_LOCK_WRITE y_channel.lock = gxdb.SYMBOL_LOCK_WRITE # split the original line into segments, based on a lateral distance tolerance of 100 m. gxapi.GXDU.split_line_xy2( gdb.gxdb, line.symbol, gxdb.Channel(gdb, 'X').symbol, gxdb.Channel(gdb, 'Y').symbol, 1, 100.0, gxapi.rDUMMY, gxapi.DU_SPLITLINE_SEQUENTIAL, split_line_number_start, 1, 1) #delete the original line as it is no longer needed gdb.delete_line('L0') # print a list of the new lines print(list(gdb.list_lines())) # ['L1', 'L2', 'L3', 'L4', 'L5', 'L6', ... exit() <file_sep>/geosoft/gxpy/system.py """ Geosoft system functions. .. note:: Regression tests provide usage examples: `Tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_system.py>`_ """ import time import inspect import os import gc import shutil import zipfile import threading import sys from itertools import count import geosoft __version__ = geosoft.__version__ def translate(s): """ Translate string to user language.""" return s def _t(s): return translate(s) def _logit(fn, *args, **kw): """function console printing decorator""" def logger(*args, **kw): ret = fn(*args, **kw) print('{} called with args({}), kwargs({}); returns({})'.format(fn.__name__, args, kw, ret)) return ret return logger def app_name(): """ Returns application script name. .. versionadded:: 9.1 """ return os.path.normpath(sys.argv[0]) def func_name(stack=0): """ Returns function name. :param stack: depth into the calling stack, 0 (default) is this function, 1 is parent, etc. :returns: function name, None if too deep into the stack .. versionchanged:: 9.2 added stack .. versionadded:: 9.1 """ try: func = inspect.stack()[stack+1][3] return func except: return None def call_location(stack=0): """ Returns function call location including file and line number as a string :param stack: depth into the calling stack, 0 (default) is this function, 1 is parent, etc. :returns: string formatted as '<file>, line XX in <function>', empty string if too deep into the stack .. versionadded:: 9.2 """ try: stack_location = inspect.stack()[stack+1] file, line, func = stack_location[1:4] return '{}, line {} in function {}.'.format(file, line, func) except: return '' def _parallel_foreach(f, l, threads=3, return_=False): """ Apply f to each element of l, in parallel, called by parallel_map(). From: http://wiki.scipy.org/Cookbook/Multithreading """ if threads > 1: iteratorlock = threading.Lock() exceptions = [] if return_: n = 0 d = {} i = zip(count(), l.__iter__()) else: i = l.__iter__() def runall(): while True: iteratorlock.acquire() try: try: if exceptions: return v = next(i) finally: iteratorlock.release() except StopIteration: return try: if return_: n, x = v d[n] = f(x) else: f(v) except: e = sys.exc_info() iteratorlock.acquire() try: exceptions.append(e) finally: iteratorlock.release() threadlist = [threading.Thread(target=runall) for j in range(threads)] for t in threadlist: t.start() for t in threadlist: t.join() if exceptions: a, b, c = exceptions[0] raise (a, b, c) if return_: r = sorted(d.items()) return [v for (n, v) in r] else: if return_: return [f(v) for v in l] else: for v in l: f(v) return def parallel_map(f, l, threads=None): """ A parallel equivalent of the map() built-in Python function (it supports only one iterable argument though). :param f: function to run in parallel f(). Must be thread-safe, of course. :param l: iterable list of arguments to pass to each thread. Use tuples for multiple arguments. :param threads: number of threads to use, default is number of cores on computer :returns: list of results from each call to f(), in order of iterable l. :example: .. code:: import gxpy.system as gsys def func(ab): ''' :param ab: tuple (a,b) :returns: a+b ''' return ab[0] + ab[1] # create list of 20 argument sets to calculate in parallel data = [(1+i, 2+i) for i in range(20)] # print results of running function in parallel print(gsys.parallel_map(func, data)) # prints: [3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41] # same thing using a lambda function print(gsys.parallel_map(lambda ab: ab[0] + ab[1], data)) .. versionadded:: 9.1 """ if threads is None: threads = os.cpu_count() return _parallel_foreach(f, l, threads=threads, return_=True) ############# # classes class GXSysException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.system`. .. versionadded:: 9.1 """ pass def wait_on_file(fileName, wait=100, retries=10): """ Working with large files on systems that cache the file can cause a situation where the file is not yet completely written out before an attempt is made to open a file that has just been closed. Call this function to wait for the file to be available. Best to do this right after you know that you may have written out a large file, or in a try/except around a file open. :param fileName: :wait: time in milliseconds to wait between retries :retries: maximum number of retries :raises: GX_SysException if fail to get read access to the file. .. versionadded:: 9.1 """ tries = 0 while True: if os.access(fileName, os.W_OK): return if tries >= retries: raise GXSysException(_t('Unable to access {}').format(fileName)) tries += 1 time.sleep(wait / 1000.0) def _unzip(zip_file_name, folder): with zipfile.ZipFile(zip_file_name) as zf: zf.extractall(folder) files = zf.namelist() return files def unzip(zip_file_name, folder=None, report=None, checkready=25): """ Decompress and write the content of a zip file to a folder. :param zip_file_name: zip file name, must have extension :param folder: folder to write results, create it it does not exist :param report: ignored :param checkready: time in 1/10 second to check completion of each file, default 25 :returns: (folder that contains unzipped files, list of files) .. versionadded:: 9.1 """ # get full path zip_file_name = os.path.abspath(zip_file_name) # if no folder, determine based on zip file name if folder is None: folder = os.path.splitext(zip_file_name)[0] # create a folder if not os.path.exists(folder): os.makedirs(folder) files = None try: files = _unzip(zip_file_name, folder) except: raise GXSysException(_t('Cannot process zip file {}').format(zip_file_name)) finally: # check that files are ready for access if files and checkready > 0: for n in files: wait_on_file(os.path.join(folder, n), wait=100, retries=int(checkready * 100)) return folder, files def remove_dir(directory, wait=200, tries=10): """ Robust directory removal, with timed retries to allow for OS timing lags. If you need to use this you may have a coding error in which you are not properly releasing a resource. :param directory: directory name, must be a directory :param wait: wait between retries in milliseconds :param tries: number of times to retry .. versionadded:: 9.1 """ if os.path.isdir(directory): t = 0 while True: try: shutil.rmtree(directory) return except: t += 1 if t >= tries: raise time.sleep(wait / 1000.0) <file_sep>/geosoft/gxpy/tests/test_grid_fft.py import unittest import os import numpy as np import math import geosoft.gxpy.system as gsys import geosoft.gxpy.grid as gxgrd import geosoft.gxpy.grid_fft as gxfft from base import GXPYTest class Test(GXPYTest): @classmethod def setUpClass(cls): cls.setUpGXPYTest() cls.folder, files = gsys.unzip(os.path.join(os.path.dirname(cls._test_case_py), 'testgrids.zip'), folder=cls._gx.temp_folder()) cls.mag = os.path.join(cls.folder, 'mag.grd') def test_create(self): self.start() with gxfft.GridFFT(self.mag) as fft: fft.result_grid() pspec = fft.radially_averaged_spectrum() self.assertEqual(len(pspec), 169) self.assertAlmostEqual(pspec[1, gxfft.I_WAVENUMBER], 1000.0 / ((fft.source_transform.nx - 2) * fft.source_transform.dx)) self.assertAlmostEqual(fft.du, 1.0 / ((fft.source_transform.nx - 2) * fft.source_transform.dx)) self.assertAlmostEqual(fft.dv, 1.0 / (fft.source_transform.ny * fft.source_transform.dy)) def test_create_MC(self): self.start() with gxfft.GridFFT(self.mag, fill_method=gxfft.FILL_MINIMUM_CURVATURE) as fft: fft.result_grid() pspec = fft.radially_averaged_spectrum() self.assertEqual(len(pspec), 169) self.assertAlmostEqual(pspec[1, gxfft.I_WAVENUMBER], 1000.0 / ((fft.source_transform.nx - 2) * fft.source_transform.dx)) self.assertAlmostEqual(fft.du, 1.0 / ((fft.source_transform.nx - 2) * fft.source_transform.dx)) self.assertAlmostEqual(fft.dv, 1.0 / (fft.source_transform.ny * fft.source_transform.dy)) def test_filter(self): self.start() gxgrd.Grid.open(self.mag, mode=gxgrd.FILE_READWRITE).coordinate_system='NAD27 / UTM zone 15N' with gxfft.GridFFT(self.mag) as fft: fft.filter(filters=['CNUP 500']) up = fft.result_grid(file_name='result', overwrite=True) self.assertEqual(str(up.coordinate_system), 'NAD27 / UTM zone 15N') self.assertAlmostEqual(up.statistics()['variance'], 15442.23622462059, 0) fft.filter(filters=['DRVZ 1'], trn=gxfft.TRN_FILTERED) vd = fft.result_grid(file_name='up500vd', overwrite=True) self.assertAlmostEqual(vd.statistics()['variance'], 0.02167, 3) pspec = fft.radially_averaged_spectrum() self.assertAlmostEqual(pspec[0, gxfft.I_WAVENUMBER], 0.) self.assertAlmostEqual(pspec[1, gxfft.I_WAVENUMBER], fft.dv * 1000., 6) up.close(discard=True) vd.close(discard=True) def test_filter_double(self): self.start() grd = gxgrd.Grid.open(self.mag, mode=gxgrd.FILE_READWRITE, dtype=np.float64) with gxfft.GridFFT(grd) as fft: fft.filter(filters=['CNUP 500'], mag_inclination=-66.58, mag_declination='8.26', mag_strength=59041) up = fft.result_grid(file_name='result', overwrite=True) self.assertEqual(up.dtype, np.float64) self.assertAlmostEqual(up.statistics()['variance'], 15441.51060320867, 0) fft.filter(filters=['DRVZ 1'], trn=gxfft.TRN_FILTERED) vd = fft.result_grid(file_name='up500vd', overwrite=True) self.assertAlmostEqual(vd.statistics()['variance'], 0.0217, 3) pspec = fft.radially_averaged_spectrum(gxfft.TRN_FILTERED) self.assertAlmostEqual(pspec[0, gxfft.I_WAVENUMBER], 0.) self.assertAlmostEqual(pspec[1, gxfft.I_LOG_POWER], 15.332930000000001, 0) up.close(discard=True) vd.close(discard=True) def test_spectrum_grids(self): self.start() with gxfft.GridFFT(self.mag) as fft: source_spec = fft.spectrum_grid() self.assertAlmostEqual(source_spec.statistics()['variance'], 16.017015790664406, 0) fft.filter([('CNUP', 1000)]) filter_spec = fft.spectrum_grid(gxfft.TRN_FILTERED) self.assertAlmostEqual(filter_spec.statistics()['variance'], 136.6656119975559, 0) def test_custom_filter(self): self.start() distance = 500 with gxfft.GridFFT(self.mag, buffer=10, expand=15) as fft: for vrow in range(fft.nv): u, v, r, i = fft.read_uv_row(vrow) w = np.sqrt(u**2 + v**2) continuation_filter = np.exp(-2. * math.pi * distance * w) r *= continuation_filter i *= continuation_filter fft.write_uv_row(r, i, vrow, trn=gxfft.TRN_FILTERED) self.assertAlmostEqual(fft.result_grid().statistics()['sd'], 99.68591520777781, 0) ############################################################################################### if __name__ == '__main__': unittest.main() <file_sep>/docs/GXEDB.rst .. _GXEDB: GXEDB class ================================== .. autoclass:: geosoft.gxapi.GXEDB :members: .. _MAX_PROF_WND: MAX_PROF_WND constants ----------------------------------------------------------------------- The following value should be kept synchronized with the value defined in src\\geoguilib\\stdafx.h .. autodata:: geosoft.gxapi.MAX_PROF_WND :annotation: .. autoattribute:: geosoft.gxapi.MAX_PROF_WND .. _EDB_PATH: EDB_PATH constants ----------------------------------------------------------------------- Four forms .. autodata:: geosoft.gxapi.EDB_PATH_FULL :annotation: .. autoattribute:: geosoft.gxapi.EDB_PATH_FULL .. autodata:: geosoft.gxapi.EDB_PATH_DIR :annotation: .. autoattribute:: geosoft.gxapi.EDB_PATH_DIR .. autodata:: geosoft.gxapi.EDB_PATH_NAME_EXT :annotation: .. autoattribute:: geosoft.gxapi.EDB_PATH_NAME_EXT .. autodata:: geosoft.gxapi.EDB_PATH_NAME :annotation: .. autoattribute:: geosoft.gxapi.EDB_PATH_NAME .. _EDB_PROF: EDB_PROF constants ----------------------------------------------------------------------- Profile data .. autodata:: geosoft.gxapi.EDB_PROF_I_CHANNEL :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROF_I_CHANNEL .. autodata:: geosoft.gxapi.EDB_PROF_I_LINE_STYLE :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROF_I_LINE_STYLE .. autodata:: geosoft.gxapi.EDB_PROF_I_LINE_WEIGHT :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROF_I_LINE_WEIGHT .. autodata:: geosoft.gxapi.EDB_PROF_I_SYMBOL :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROF_I_SYMBOL .. autodata:: geosoft.gxapi.EDB_PROF_I_SYMBOL_WEIGHT :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROF_I_SYMBOL_WEIGHT .. autodata:: geosoft.gxapi.EDB_PROF_I_COLOR :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROF_I_COLOR .. autodata:: geosoft.gxapi.EDB_PROF_I_WRAP :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROF_I_WRAP .. autodata:: geosoft.gxapi.EDB_PROF_I_BREAK_ON_DUMMY :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROF_I_BREAK_ON_DUMMY .. autodata:: geosoft.gxapi.EDB_PROF_I_GRID_LINE :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROF_I_GRID_LINE .. autodata:: geosoft.gxapi.EDB_PROF_R_GRID_LINE_INTERVAL :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROF_R_GRID_LINE_INTERVAL .. autodata:: geosoft.gxapi.EDB_PROF_I_LOG :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROF_I_LOG .. autodata:: geosoft.gxapi.EDB_PROF_R_LOG_MINIMUM :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROF_R_LOG_MINIMUM .. autodata:: geosoft.gxapi.EDB_PROF_I_SAMESCALE :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROF_I_SAMESCALE .. autodata:: geosoft.gxapi.EDB_PROF_I_SOURCELINE :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROF_I_SOURCELINE .. autodata:: geosoft.gxapi.EDB_PROF_I_SCALEOPTION :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROF_I_SCALEOPTION .. autodata:: geosoft.gxapi.EDB_PROF_I_SAMERANGE :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROF_I_SAMERANGE .. _EDB_PROFILE_SCALE: EDB_PROFILE_SCALE constants ----------------------------------------------------------------------- Profile Scale Options .. autodata:: geosoft.gxapi.EDB_PROFILE_SCALE_LINEAR :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROFILE_SCALE_LINEAR .. autodata:: geosoft.gxapi.EDB_PROFILE_SCALE_LOG :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROFILE_SCALE_LOG .. autodata:: geosoft.gxapi.EDB_PROFILE_SCALE_LOGLINEAR :annotation: .. autoattribute:: geosoft.gxapi.EDB_PROFILE_SCALE_LOGLINEAR .. _EDB_REMOVE: EDB_REMOVE constants ----------------------------------------------------------------------- How to handle pending changes in document .. autodata:: geosoft.gxapi.EDB_REMOVE_SAVE :annotation: .. autoattribute:: geosoft.gxapi.EDB_REMOVE_SAVE .. autodata:: geosoft.gxapi.EDB_REMOVE_PROMPT :annotation: .. autoattribute:: geosoft.gxapi.EDB_REMOVE_PROMPT .. autodata:: geosoft.gxapi.EDB_REMOVE_DISCARD :annotation: .. autoattribute:: geosoft.gxapi.EDB_REMOVE_DISCARD .. _EDB_UNLOAD: EDB_UNLOAD constants ----------------------------------------------------------------------- What type of prompt .. autodata:: geosoft.gxapi.EDB_UNLOAD_NO_PROMPT :annotation: .. autoattribute:: geosoft.gxapi.EDB_UNLOAD_NO_PROMPT .. autodata:: geosoft.gxapi.EDB_UNLOAD_SINGLE_PROMPT :annotation: .. autoattribute:: geosoft.gxapi.EDB_UNLOAD_SINGLE_PROMPT .. autodata:: geosoft.gxapi.EDB_UNLOAD_MULTI_PROMPT :annotation: .. autoattribute:: geosoft.gxapi.EDB_UNLOAD_MULTI_PROMPT .. _EDB_WINDOW_POSITION: EDB_WINDOW_POSITION constants ----------------------------------------------------------------------- Window Positioning Options .. autodata:: geosoft.gxapi.EDB_WINDOW_POSITION_DOCKED :annotation: .. autoattribute:: geosoft.gxapi.EDB_WINDOW_POSITION_DOCKED .. autodata:: geosoft.gxapi.EDB_WINDOW_POSITION_FLOATING :annotation: .. autoattribute:: geosoft.gxapi.EDB_WINDOW_POSITION_FLOATING .. _EDB_WINDOW_STATE: EDB_WINDOW_STATE constants ----------------------------------------------------------------------- Window State Options .. autodata:: geosoft.gxapi.EDB_WINDOW_RESTORE :annotation: .. autoattribute:: geosoft.gxapi.EDB_WINDOW_RESTORE .. autodata:: geosoft.gxapi.EDB_WINDOW_MINIMIZE :annotation: .. autoattribute:: geosoft.gxapi.EDB_WINDOW_MINIMIZE .. autodata:: geosoft.gxapi.EDB_WINDOW_MAXIMIZE :annotation: .. autoattribute:: geosoft.gxapi.EDB_WINDOW_MAXIMIZE .. _EDB_YAXIS_DIRECTION: EDB_YAXIS_DIRECTION constants ----------------------------------------------------------------------- Window State Options .. autodata:: geosoft.gxapi.EDB_YAXIS_NORMAL :annotation: .. autoattribute:: geosoft.gxapi.EDB_YAXIS_NORMAL .. autodata:: geosoft.gxapi.EDB_YAXIS_INVERTED :annotation: .. autoattribute:: geosoft.gxapi.EDB_YAXIS_INVERTED <file_sep>/docs/GXVOXD.rst .. _GXVOXD: GXVOXD class ================================== .. autoclass:: geosoft.gxapi.GXVOXD :members: .. _VOXELRENDER_MODE: VOXELRENDER_MODE constants ----------------------------------------------------------------------- Render Modes .. autodata:: geosoft.gxapi.VOXELRENDER_FILL :annotation: .. autoattribute:: geosoft.gxapi.VOXELRENDER_FILL .. autodata:: geosoft.gxapi.VOXELRENDER_EDGES :annotation: .. autoattribute:: geosoft.gxapi.VOXELRENDER_EDGES .. autodata:: geosoft.gxapi.VOXELRENDER_FILL_EDGES :annotation: .. autoattribute:: geosoft.gxapi.VOXELRENDER_FILL_EDGES .. autodata:: geosoft.gxapi.VOXELRENDER_SMOOTH :annotation: .. autoattribute:: geosoft.gxapi.VOXELRENDER_SMOOTH <file_sep>/geosoft/gxapi/GXMULTIGRID3DUTIL.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXMULTIGRID3D import GXMULTIGRID3D ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXMULTIGRID3DUTIL(gxapi_cy.WrapMULTIGRID3DUTIL): """ GXMULTIGRID3DUTIL class. High Performance 3D Grid. """ def __init__(self, handle=0): super(GXMULTIGRID3DUTIL, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXMULTIGRID3DUTIL <geosoft.gxapi.GXMULTIGRID3DUTIL>` :returns: A null `GXMULTIGRID3DUTIL <geosoft.gxapi.GXMULTIGRID3DUTIL>` :rtype: GXMULTIGRID3DUTIL """ return GXMULTIGRID3DUTIL() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def import_from_xyz(cls, name, ra, type, ipj): """ Import XYZ file into a Multi-Voxset :param name: Name of output Voxel file :param ra: `GXRA <geosoft.gxapi.GXRA>` To import from :param type: Data Type :ref:`GS_TYPES` :param ipj: Projection :type name: str :type ra: GXRA :type type: int :type ipj: GXIPJ .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._import_from_xyz(GXContext._get_tls_geo(), name.encode(), ra, type, ipj) @classmethod def export_to_xyz(cls, grid3d_file, xyz, dir, rev_x, rev_y, rev_z, dummies): """ Export a `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` to an XYZ File :param grid3d_file: Input Voxel file :param xyz: File Name :param dir: :ref:`DIRECTION3D` :param rev_x: Reverse X? :param rev_y: Reverse Y? :param rev_z: Reverse Z? :param dummies: Write Dummies? :type grid3d_file: str :type xyz: str :type dir: int :type rev_x: bool :type rev_y: bool :type rev_z: bool :type dummies: bool .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._export_to_xyz(GXContext._get_tls_geo(), grid3d_file.encode(), xyz.encode(), dir, rev_x, rev_y, rev_z, dummies) @classmethod def export_to_binary(cls, grid3d_file, binary_file, dir, rev_x, rev_y, rev_z, swap, output_type): """ Export contents of `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` to a Binary File. :param grid3d_file: Input Voxel file :param binary_file: Binary file to write to :param dir: :ref:`DIRECTION3D` :param rev_x: Reverse X? :param rev_y: Reverse Y? :param rev_z: Reverse Z? :param swap: Swap Bytes? :param output_type: Output Type (Geosoft Type) :type grid3d_file: str :type binary_file: str :type dir: int :type rev_x: bool :type rev_y: bool :type rev_z: bool :type swap: bool :type output_type: int .. versionadded:: 9.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._export_to_binary(GXContext._get_tls_geo(), grid3d_file.encode(), binary_file.encode(), dir, rev_x, rev_y, rev_z, swap, output_type) @classmethod def export_to_binary_ex(cls, grid3d_file, binary_file, dir, rev_x, rev_y, rev_z, swap, dummy, output_type): """ Export contents of `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` to a Binary File, with dummy replacement. :param grid3d_file: Input Voxel file :param binary_file: Binary file to write to :param dir: :ref:`DIRECTION3D` :param rev_x: Reverse X? :param rev_y: Reverse Y? :param rev_z: Reverse Z? :param swap: Swap Bytes? :param dummy: Replace dummy values with this value on export :param output_type: Output Type (Geosoft Type) :type grid3d_file: str :type binary_file: str :type dir: int :type rev_x: bool :type rev_y: bool :type rev_z: bool :type swap: bool :type dummy: float :type output_type: int .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._export_to_binary_ex(GXContext._get_tls_geo(), grid3d_file.encode(), binary_file.encode(), dir, rev_x, rev_y, rev_z, swap, dummy, output_type) @classmethod def export_to_xml(cls, grid3d_file, xml_file): """ Export a `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` to XML :param grid3d_file: Voxel file :param xml_file: XML file :type grid3d_file: str :type xml_file: str .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._export_to_xml(GXContext._get_tls_geo(), grid3d_file.encode(), xml_file.encode()) @classmethod def check_equal_to_legacy_voxel(cls, grid3d_file, legacy_grid3d_file): """ Compare `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` to Legacy Voxel :param grid3d_file: Voxel file :param legacy_grid3d_file: Legacy Voxel file :type grid3d_file: str :type legacy_grid3d_file: str .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._check_equal_to_legacy_voxel(GXContext._get_tls_geo(), grid3d_file.encode(), legacy_grid3d_file.encode()) @classmethod def import_from_ubc(cls, name, mesh, mod, dummy, ipj): """ Import UBC file into a MultiVoxset :param name: Name of output `GXVOX <geosoft.gxapi.GXVOX>` :param mesh: Name of UBC Mesh File :param mod: Name of UBC Mod File :param dummy: Dummy Value :param ipj: Projection :type name: str :type mesh: str :type mod: str :type dummy: float :type ipj: GXIPJ .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._import_from_ubc(GXContext._get_tls_geo(), name.encode(), mesh.encode(), mod.encode(), dummy, ipj) @classmethod def import_from_gocad(cls, name, header, property, ipj, orientation): """ Imports a MultiVoxset from a GOCAD File :param name: Name of output `GXVOX <geosoft.gxapi.GXVOX>` :param header: Name of GOCAD Voxel file :param property: Propert name to import :param orientation: :ref:`GOCAD_ORIENTATION` OBSOLETE as of 9.7. The UVW axes and ZPOSITIVE values are read from the *.vo header and are handled on import automatically. By default ZPOSITIVE Elevation is assumed :type name: str :type header: str :type property: str :type ipj: GXIPJ :type orientation: int .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._import_from_gocad(GXContext._get_tls_geo(), name.encode(), header.encode(), property.encode(), ipj, orientation) @classmethod def list_properties_gocad(cls, header, lst): """ List all the properties available in this GOCAD file. :param header: Name of GOCAD Voxel file :param lst: List object to populate :type header: str :type lst: GXLST .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._list_properties_gocad(GXContext._get_tls_geo(), header.encode(), lst) @classmethod def import_from_gdb(cls, grid3d_file, db, symb): """ Imports from a Geosoft Database :param grid3d_file: Name of output Voxel file :param db: `GXDB <geosoft.gxapi.GXDB>` To import from :param symb: Symbol to import data from :type grid3d_file: str :type db: GXDB :type symb: int .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._import_from_gdb(GXContext._get_tls_geo(), grid3d_file.encode(), db, symb) @classmethod def import_from_gdb_ignore_stored_voxel_geometry(cls, grid3d_file, db, symb): """ Imports from a Geosoft Database, but ignores any stored internal geometry :param grid3d_file: Name of output Voxel file :param db: `GXDB <geosoft.gxapi.GXDB>` To import from :param symb: Symbol to import data from :type grid3d_file: str :type db: GXDB :type symb: int .. versionadded:: 2021.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._import_from_gdb_ignore_stored_voxel_geometry(GXContext._get_tls_geo(), grid3d_file.encode(), db, symb) @classmethod def database_contains_voxel_geometry(cls, db): """ Returns 1 if the original voxel geometry is stored inside the database :param db: `GXDB <geosoft.gxapi.GXDB>` To import from :type db: GXDB :rtype: int .. versionadded:: 2021.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMULTIGRID3DUTIL._database_contains_voxel_geometry(GXContext._get_tls_geo(), db) return ret_val @classmethod def import_from_vector_gdb(cls, grid3d_file, db, vector_type, symb_x, symb_y, symb_z, inc, dec): """ Imports from a Vector Geosoft Database :param grid3d_file: Voxel Name :param db: `GXDB <geosoft.gxapi.GXDB>` To import from :param vector_type: VECTOR_IMPORTImport XYZ, UVW or Amplitude/Inclination/Declination channels :param symb_x: Symbol to import X, U or Amplitude data from :param symb_y: Symbol to import Y, V or Inclination data from :param symb_z: Symbol to import Z, W or Declination data from :param inc: Inclination value for `VOX_VECTORVOX_UVW <geosoft.gxapi.VOX_VECTORVOX_UVW>` (-90° to 90°) :param dec: Declination value for `VOX_VECTORVOX_UVW <geosoft.gxapi.VOX_VECTORVOX_UVW>` (-180° to 180°) :type grid3d_file: str :type db: GXDB :type vector_type: int :type symb_x: int :type symb_y: int :type symb_z: int :type inc: float :type dec: float .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._import_from_vector_gdb(GXContext._get_tls_geo(), grid3d_file.encode(), db, vector_type, symb_x, symb_y, symb_z, inc, dec) @classmethod def export_to_segy(cls, multigrid3d_file, output_segy_filename, sample_interval): """ Export To SEGY :param multigrid3d_file: Input Voxel file :param output_segy_filename: Output Segy file :param sample_interval: Sampling Internal :type multigrid3d_file: str :type output_segy_filename: str :type sample_interval: float .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._export_to_segy(GXContext._get_tls_geo(), multigrid3d_file.encode(), output_segy_filename.encode(), sample_interval) @classmethod def export_to_gdb(cls, grid3d_file, db, chan, dir, rev_x, rev_y, rev_z, dummies): """ Export To GDB :param grid3d_file: Input Voxel file :param db: Database :param chan: Channel Name :param dir: :ref:`DIRECTION3D` :param rev_x: Reverse X? :param rev_y: Reverse Y? :param rev_z: Reverse Z? :param dummies: Write Dummies? :type grid3d_file: str :type db: GXDB :type chan: str :type dir: int :type rev_x: bool :type rev_y: bool :type rev_z: bool :type dummies: bool .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._export_to_gdb(GXContext._get_tls_geo(), grid3d_file.encode(), db, chan.encode(), dir, rev_x, rev_y, rev_z, dummies) @classmethod def export_to_wa(cls, file_name, wa, dir, rev_x, rev_y, rev_z, dummy): """ Export To GDB :param file_name: Input Voxel file :param wa: `GXWA <geosoft.gxapi.GXWA>` File :param dir: :ref:`DIRECTION3D` :param rev_x: Reverse X? :param rev_y: Reverse Y? :param rev_z: Reverse Z? :param dummy: The Dummy string to write :type file_name: str :type wa: GXWA :type dir: int :type rev_x: bool :type rev_y: bool :type rev_z: bool :type dummy: str .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._export_to_wa(GXContext._get_tls_geo(), file_name.encode(), wa, dir, rev_x, rev_y, rev_z, dummy.encode()) @classmethod def convert_double_to_vector(cls, x_file_name, y_file_name, z_file_name, out_file_name, inclination, declination, rotated): """ Convert 3 Double Voxels to a Vector Voxel :param x_file_name: Input X Voxel file :param y_file_name: Input Y Voxel file :param z_file_name: Input Z Voxel file :param out_file_name: Output Vector Voxel file :param inclination: Inclination :param declination: Declination :param rotated: Rotated? :type x_file_name: str :type y_file_name: str :type z_file_name: str :type out_file_name: str :type inclination: float :type declination: float :type rotated: bool .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._convert_double_to_vector(GXContext._get_tls_geo(), x_file_name.encode(), y_file_name.encode(), z_file_name.encode(), out_file_name.encode(), inclination, declination, rotated) @classmethod def convert_vector_to_double(cls, file_name, x_file_name, y_file_name, z_file_name, rotated): """ Convert a Vector Voxel to 3 double Voxels :param file_name: Input Vector Voxel file :param x_file_name: Output X Voxel file :param y_file_name: Output Y Voxel file :param z_file_name: Output Z Voxel file :param rotated: Rotated? :type file_name: str :type x_file_name: str :type y_file_name: str :type z_file_name: str :type rotated: bool .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._convert_vector_to_double(GXContext._get_tls_geo(), file_name.encode(), x_file_name.encode(), y_file_name.encode(), z_file_name.encode(), rotated) @classmethod def convert_vector_to_double_using_rotation(cls, file_name, x_file_name, y_file_name, z_file_name, inclination, declination): """ Convert a Vector Voxel to 3 double Voxels using an external rotation. Internal rotations are ignored. :param file_name: Input Vector Voxel file :param x_file_name: Output X Voxel file :param y_file_name: Output Y Voxel file :param z_file_name: Output Z Voxel file :param inclination: Inclination :param declination: Declination :type file_name: str :type x_file_name: str :type y_file_name: str :type z_file_name: str :type inclination: float :type declination: float .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._convert_vector_to_double_using_rotation(GXContext._get_tls_geo(), file_name.encode(), x_file_name.encode(), y_file_name.encode(), z_file_name.encode(), inclination, declination) @classmethod def convert_thematic_to_double(cls, input_grid3d_filename, translate_vv, output_grid3d_filename): """ Convert Thematic MultiVoxset to Double MultiVoxset :param input_grid3d_filename: Input grid3d filename :param translate_vv: Translation VV handle :param output_grid3d_filename: Output grid3d filename :type input_grid3d_filename: str :type translate_vv: GXVV :type output_grid3d_filename: str .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._convert_thematic_to_double(GXContext._get_tls_geo(), input_grid3d_filename.encode(), translate_vv, output_grid3d_filename.encode()) @classmethod def convert_double_to_thematic(cls, input_grid3d_filename, translate_vv, tpat, output_grid3d_filename): """ Convert Double MultiVoxset to Thematic MultiVoxset :param input_grid3d_filename: Input grid3d filename :param translate_vv: Translation VV handle :param tpat: `GXTPAT <geosoft.gxapi.GXTPAT>` object :param output_grid3d_filename: Output grid3d filename :type input_grid3d_filename: str :type translate_vv: GXVV :type tpat: GXTPAT :type output_grid3d_filename: str .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._convert_double_to_thematic(GXContext._get_tls_geo(), input_grid3d_filename.encode(), translate_vv, tpat, output_grid3d_filename.encode()) @classmethod def convert_velocity_to_density(cls, input_grid3d_filename, input_scaling_factor, input_lower_bound, input_upper_bound, a5, a4, a3, a2, a1, a0, output_scaling_factor, output_grid3d_filename): """ Convert Velocity MultiVoxset to Density MultiVoxset :param input_grid3d_filename: Input grid3d filename :param input_scaling_factor: 1.0, if this grid3d is in meters per second. Otherwise, a value by which each input cell is multiplied to convert it into meters per second. :param input_lower_bound: Lower bound on velocity values, in meters per second. If the input value (after being pre-multiplied by dInputScalingFactor) is less than this value, the output cell value will be DUMMY. :param input_upper_bound: Upper bound on velocity values, in meters per second. If the input value (after being pre-multiplied by dInputScalingFactor) is greater than this value, the output cell value will be DUMMY. :param a5: Coefficient of fifth-order polynomial term. :param a4: Coefficient of fourth-order polynomial term. :param a3: Coefficient of third-order polynomial term. :param a2: Coefficient of second-order polynomial term. :param a1: Coefficient of first-order polynomial term. :param a0: Constant offset of output. :param output_scaling_factor: 1.0, to produce an output grid3d that has units of g/cm^3. If different units are desired, pass in a different value, which will be multiplied into each output grid3d cell. :param output_grid3d_filename: Output grid3d filename :type input_grid3d_filename: str :type input_scaling_factor: float :type input_lower_bound: float :type input_upper_bound: float :type a5: float :type a4: float :type a3: float :type a2: float :type a1: float :type a0: float :type output_scaling_factor: float :type output_grid3d_filename: str .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._convert_velocity_to_density(GXContext._get_tls_geo(), input_grid3d_filename.encode(), input_scaling_factor, input_lower_bound, input_upper_bound, a5, a4, a3, a2, a1, a0, output_scaling_factor, output_grid3d_filename.encode()) @classmethod def convert_density_to_velocity(cls, input_grid3d_filename, input_scaling_factor, input_lower_bound, input_upper_bound, a5, a4, a3, a2, a1, a0, output_scaling_factor, output_grid3d_filename): """ Convert Density MultiVoxset to Velocity MultiVoxset :param input_grid3d_filename: Input grid3d filename :param input_scaling_factor: 1.0, if this grid3d is in meters per second. Otherwise, a value by which each input cell is multiplied to convert it into meters per second. :param input_lower_bound: Lower bound on velocity values, in meters per second. If the input value (after being pre-multiplied by dInputScalingFactor) is less than this value, the output cell value will be DUMMY. :param input_upper_bound: Upper bound on velocity values, in meters per second. If the input value (after being pre-multiplied by dInputScalingFactor) is greater than this value, the output cell value will be DUMMY. :param a5: Coefficient of fifth-order polynomial term. :param a4: Coefficient of fourth-order polynomial term. :param a3: Coefficient of third-order polynomial term. :param a2: Coefficient of second-order polynomial term. :param a1: Coefficient of first-order polynomial term. :param a0: Constant offset of output. :param output_scaling_factor: 1.0, to produce an output grid3d that has units of g/cm^3. If different units are desired, pass in a different value, which will be multiplied into each output grid3d cell. :param output_grid3d_filename: Output grid3d filename :type input_grid3d_filename: str :type input_scaling_factor: float :type input_lower_bound: float :type input_upper_bound: float :type a5: float :type a4: float :type a3: float :type a2: float :type a1: float :type a0: float :type output_scaling_factor: float :type output_grid3d_filename: str .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._convert_density_to_velocity(GXContext._get_tls_geo(), input_grid3d_filename.encode(), input_scaling_factor, input_lower_bound, input_upper_bound, a5, a4, a3, a2, a1, a0, output_scaling_factor, output_grid3d_filename.encode()) @classmethod def get_gocad_location(cls, input_grid3d_filename, origin_x, origin_y, origin_z, vect_xx, vect_xy, vect_xz, vect_yx, vect_yy, vect_yz, vect_zx, vect_zy, vect_zz): """ Get the location of a grid3d with origin and scaled xyz vectors for use with GOCAD. :param input_grid3d_filename: Input grid3d filename :param origin_x: Origin X :param origin_y: Origin Y :param origin_z: Origin Z :param vect_xx: VectX X :param vect_xy: VectX Y :param vect_xz: VectX Z :param vect_yx: VectY X :param vect_yy: VectY Y :param vect_yz: VectY Z :param vect_zx: VectZ X :param vect_zy: VectZ Y :param vect_zz: VectZ Z :type input_grid3d_filename: str :type origin_x: float_ref :type origin_y: float_ref :type origin_z: float_ref :type vect_xx: float_ref :type vect_xy: float_ref :type vect_xz: float_ref :type vect_yx: float_ref :type vect_yy: float_ref :type vect_yz: float_ref :type vect_zx: float_ref :type vect_zy: float_ref :type vect_zz: float_ref .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ origin_x.value, origin_y.value, origin_z.value, vect_xx.value, vect_xy.value, vect_xz.value, vect_yx.value, vect_yy.value, vect_yz.value, vect_zx.value, vect_zy.value, vect_zz.value = gxapi_cy.WrapMULTIGRID3DUTIL._get_gocad_location(GXContext._get_tls_geo(), input_grid3d_filename.encode(), origin_x.value, origin_y.value, origin_z.value, vect_xx.value, vect_xy.value, vect_xz.value, vect_yx.value, vect_yy.value, vect_yz.value, vect_zx.value, vect_zy.value, vect_zz.value) @classmethod def create_double_constant(cls, name, value, ox, oy, oz, cell_x, cell_y, cell_z, size_x, size_y, size_z, ipj): """ Generate a double MultiVoxset with a constant value :param name: Name of output Voxel File :param value: Constant Value to use - DUMMY for a trully sparse grid3d :param ox: Origin X :param oy: Origin Y :param oz: Origin Z :param cell_x: Cell Size X :param cell_y: Cell Size Y :param cell_z: Cell Size Z :param size_x: Cell Count X :param size_y: Cell Count Y :param size_z: Cell Count Z :param ipj: Projection :type name: str :type value: float :type ox: float :type oy: float :type oz: float :type cell_x: float :type cell_y: float :type cell_z: float :type size_x: int :type size_y: int :type size_z: int :type ipj: GXIPJ .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._create_double_constant(GXContext._get_tls_geo(), name.encode(), value, ox, oy, oz, cell_x, cell_y, cell_z, size_x, size_y, size_z, ipj) @classmethod def create_double_constant_copy(cls, name, value, source_name): """ Generate a double MultiVoxset with a constant value based on an input voxel :param name: Name of output Voxel File :param value: Constant Value to use - DUMMY for a trully sparse grid3d :param source_name: Name of voxel to model :type name: str :type value: float :type source_name: str .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._create_double_constant_copy(GXContext._get_tls_geo(), name.encode(), value, source_name.encode()) @classmethod def create_thematic_constant(cls, name, value, ox, oy, oz, cell_x, cell_y, cell_z, size_x, size_y, size_z, ipj): """ Generate a double MultiVoxset with a constant value :param name: Name of output Voxel File :param value: Constant Value to use - DUMMY for a trully sparse grid3d :param ox: Origin X :param oy: Origin Y :param oz: Origin Z :param cell_x: Cell Size X :param cell_y: Cell Size Y :param cell_z: Cell Size Z :param size_x: Cell Count X :param size_y: Cell Count Y :param size_z: Cell Count Z :param ipj: Projection :type name: str :type value: int :type ox: float :type oy: float :type oz: float :type cell_x: float :type cell_y: float :type cell_z: float :type size_x: int :type size_y: int :type size_z: int :type ipj: GXIPJ .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._create_thematic_constant(GXContext._get_tls_geo(), name.encode(), value, ox, oy, oz, cell_x, cell_y, cell_z, size_x, size_y, size_z, ipj) @classmethod def create_vector_constant(cls, name, value_x, value_y, value_z, ox, oy, oz, cell_x, cell_y, cell_z, size_x, size_y, size_z, ipj): """ Generate a double MultiVoxset with a constant value :param name: Name of output Voxel File :param value_x: X Constant Value to use - DUMMY for a trully sparse grid3d :param value_y: Y Constant Value to use - DUMMY for a trully sparse grid3d :param value_z: Z Constant Value to use - DUMMY for a trully sparse grid3d :param ox: Origin X :param oy: Origin Y :param oz: Origin Z :param cell_x: Cell Size X :param cell_y: Cell Size Y :param cell_z: Cell Size Z :param size_x: Cell Count X :param size_y: Cell Count Y :param size_z: Cell Count Z :param ipj: Projection :type name: str :type value_x: float :type value_y: float :type value_z: float :type ox: float :type oy: float :type oz: float :type cell_x: float :type cell_y: float :type cell_z: float :type size_x: int :type size_y: int :type size_z: int :type ipj: GXIPJ .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._create_vector_constant(GXContext._get_tls_geo(), name.encode(), value_x, value_y, value_z, ox, oy, oz, cell_x, cell_y, cell_z, size_x, size_y, size_z, ipj) @classmethod def create_double_constant_vv(cls, name, value, ox, oy, oz, cx, cy, cz, ipj): """ Generate a double MultiVoxset with a constant value and non-uniform cell sizes :param name: Name of output Voxel :param value: The contant Value to fill with - DUMMY for a trully sparse grid3d :param ox: Origin X :param oy: Origin Y :param oz: Origin Z :param cx: Cell Sizes X :param cy: Cell Sizes Y :param cz: Cell Sizes Z :param ipj: Projection :type name: str :type value: float :type ox: float :type oy: float :type oz: float :type cx: GXVV :type cy: GXVV :type cz: GXVV :type ipj: GXIPJ .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._create_double_constant_vv(GXContext._get_tls_geo(), name.encode(), value, ox, oy, oz, cx, cy, cz, ipj) @classmethod def create_thematic_constant_vv(cls, name, value, ox, oy, oz, cx, cy, cz, ipj): """ Generate a double MultiVoxset with a constant value and non-uniform cell sizes :param name: Name of output Voxel :param value: The contant Value to fill with - DUMMY for a trully sparse grid3d :param ox: Origin X :param oy: Origin Y :param oz: Origin Z :param cx: Cell Sizes X :param cy: Cell Sizes Y :param cz: Cell Sizes Z :param ipj: Projection :type name: str :type value: int :type ox: float :type oy: float :type oz: float :type cx: GXVV :type cy: GXVV :type cz: GXVV :type ipj: GXIPJ .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._create_thematic_constant_vv(GXContext._get_tls_geo(), name.encode(), value, ox, oy, oz, cx, cy, cz, ipj) @classmethod def create_vector_constant_vv(cls, name, x_value, y_value, z_value, ox, oy, oz, cx, cy, cz, ipj): """ Generate a double MultiVoxset with a constant value and non-uniform cell sizes :param name: Name of output Voxel :param x_value: The X contant Value to fill with - DUMMY for a trully sparse grid3d :param y_value: The Y contant Value to fill with - DUMMY for a trully sparse grid3d :param z_value: The Z contant Value to fill with - DUMMY for a trully sparse grid3d :param ox: Origin X :param oy: Origin Y :param oz: Origin Z :param cx: Cell Sizes X :param cy: Cell Sizes Y :param cz: Cell Sizes Z :param ipj: Projection :type name: str :type x_value: float :type y_value: float :type z_value: float :type ox: float :type oy: float :type oz: float :type cx: GXVV :type cy: GXVV :type cz: GXVV :type ipj: GXIPJ .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._create_vector_constant_vv(GXContext._get_tls_geo(), name.encode(), x_value, y_value, z_value, ox, oy, oz, cx, cy, cz, ipj) @classmethod def export_to_voxel(cls, project_file, multi_voxset_uuid, multi_voxset_attribute, grid3d_file): """ Exports a Multi-Voxset into a Voxel :param project_file: Project file :param multi_voxset_uuid: Multi-Voxset UUID :param multi_voxset_attribute: Multi-Voxset attribute :param grid3d_file: Output Voxel file :type project_file: str :type multi_voxset_uuid: str :type multi_voxset_attribute: str :type grid3d_file: str .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._export_to_voxel(GXContext._get_tls_geo(), project_file.encode(), multi_voxset_uuid.encode(), multi_voxset_attribute.encode(), grid3d_file.encode()) @classmethod def import_from_voxel(cls, project_file, grid3d_file, multi_voxset_attribute, p_uuid_string): """ Import a Voxel directly into a Multi-Voxset :param project_file: Project file :param grid3d_file: Input Voxel file :param multi_voxset_attribute: Multi-Voxset attribute :param p_uuid_string: UUID string returned :type project_file: str :type grid3d_file: str :type multi_voxset_attribute: str :type p_uuid_string: str_ref .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ p_uuid_string.value = gxapi_cy.WrapMULTIGRID3DUTIL._import_from_voxel(GXContext._get_tls_geo(), project_file.encode(), grid3d_file.encode(), multi_voxset_attribute.encode(), p_uuid_string.value.encode()) @classmethod def import_from_datamine(cls, file, field, ipj, grid3d): """ Create a Geosoft Voxel file from a Datamine block model file. :param file: Datamine file name :param field: Field to use for data :param ipj: Projection to set :param grid3d: Output grid3d file name :type file: str :type field: str :type ipj: GXIPJ :type grid3d: str .. versionadded:: 9.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Create a Geosoft Voxel file from a Datamine block model file. """ gxapi_cy.WrapMULTIGRID3DUTIL._import_from_datamine(GXContext._get_tls_geo(), file.encode(), field.encode(), ipj, grid3d.encode()) @classmethod def compute_default_cell_size(cls, min_x, max_x, min_y, max_y, min_z, max_z): """ Used if the user does not provide a default cell size. :param min_x: MinX :param max_x: MaxX :param min_y: MinY :param max_y: MaxY :param min_z: MinZ :param max_z: MaxZ :type min_x: float :type max_x: float :type min_y: float :type max_y: float :type min_z: float :type max_z: float :returns: Default Cell Size :rtype: float .. versionadded:: 9.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Compute a default cell size for a grid3d given a data range. """ ret_val = gxapi_cy.WrapMULTIGRID3DUTIL._compute_default_cell_size(GXContext._get_tls_geo(), min_x, max_x, min_y, max_y, min_z, max_z) return ret_val @classmethod def filter(cls, input_file, output_file, filter, filter_file, n_passes, interpolate_dummies): """ Apply a 3D filter to a grid3d. :param input_file: Name of the input grid3d :param output_file: Name of the output grid3d :param filter: :ref:`FILTER3D` :param filter_file: Filter file, if filter is `VOX_FILTER3D_FILE <geosoft.gxapi.VOX_FILTER3D_FILE>` :param n_passes: Number of filter passes :param interpolate_dummies: (1: interpolate dummies) :type input_file: str :type output_file: str :type filter: int :type filter_file: str :type n_passes: int :type interpolate_dummies: int .. versionadded:: 9.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._filter(GXContext._get_tls_geo(), input_file.encode(), output_file.encode(), filter, filter_file.encode(), n_passes, interpolate_dummies) @classmethod def invert_z(cls, input_file, output_file): """ Invert the Z values in the Grid3d. :param input_file: Name of the input grid3d :param output_file: Name of the output grid3d :type input_file: str :type output_file: str .. versionadded:: 9.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._invert_z(GXContext._get_tls_geo(), input_file.encode(), output_file.encode()) @classmethod def extract_dem(cls, input_file, output_file): """ Extract a DEM grid from a voxel. :param input_file: Name of the input grid3d :param output_file: Name of the output grid :type input_file: str :type output_file: str .. versionadded:: 9.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._extract_dem(GXContext._get_tls_geo(), input_file.encode(), output_file.encode()) @classmethod def clip_to_polygon(cls, input_file, output_file, poly, clip_dummies): """ Invert the Z values in the Grid3d. :param input_file: Name of the input grid3d :param output_file: Name of the output grid3d :param poly: Polygons to clip to :param clip_dummies: Clip Dummies (1) or leave them (0) :type input_file: str :type output_file: str :type poly: GXPLY :type clip_dummies: int .. versionadded:: 9.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._clip_to_polygon(GXContext._get_tls_geo(), input_file.encode(), output_file.encode(), poly, clip_dummies) @classmethod def grid_direct_from_gdb(cls, output_grid3d_filename, origin_x, origin_y, origin_z, cell_count_x, cell_count_y, cell_count_z, cell_size_x, cell_size_y, cell_size_z, method, db, x_channel, y_channel, z_channel, data_channel): """ Create a grid3d using direct gridding. :param output_grid3d_filename: Output grid3d filename :param origin_x: Voxel origin X :param origin_y: Voxel origin Y :param origin_z: Voxel origin Z :param cell_count_x: Voxel cell count X :param cell_count_y: Voxel cell count Y :param cell_count_z: Voxel cell count Z :param cell_size_x: Voxel cell size X :param cell_size_y: Voxel cell size Y :param cell_size_z: Voxel cell size Z :param method: :ref:`MULTIGRID3D_DIRECTGRID_METHOD` :param db: Database :param x_channel: X channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_channel: Y channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param z_channel: Z channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param data_channel: Data channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :type output_grid3d_filename: str :type origin_x: float :type origin_y: float :type origin_z: float :type cell_count_x: int :type cell_count_y: int :type cell_count_z: int :type cell_size_x: float :type cell_size_y: float :type cell_size_z: float :type method: int :type db: GXDB :type x_channel: int :type y_channel: int :type z_channel: int :type data_channel: int .. versionadded:: 9.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The Z and Data channels may be array channels. If they are, the array sizes must match. """ gxapi_cy.WrapMULTIGRID3DUTIL._grid_direct_from_gdb(GXContext._get_tls_geo(), output_grid3d_filename.encode(), origin_x, origin_y, origin_z, cell_count_x, cell_count_y, cell_count_z, cell_size_x, cell_size_y, cell_size_z, method, db, x_channel, y_channel, z_channel, data_channel) @classmethod def grid_idw_from_gdb(cls, output_grid3d_filename, origin_x, origin_y, origin_z, cell_count_x, cell_count_y, cell_count_z, cell_size_x, cell_size_y, cell_size_z, db, x_channel, y_channel, z_channel, data_channel, weight_power, weight_slope, search_radius, blanking_distance, log, log_base, log_negative): """ Create a grid3d using IDW gridding. :param output_grid3d_filename: Output grid3d filename :param origin_x: Voxel origin X :param origin_y: Voxel origin Y :param origin_z: Voxel origin Z :param cell_count_x: Voxel cell count X :param cell_count_y: Voxel cell count Y :param cell_count_z: Voxel cell count Z :param cell_size_x: Voxel cell size X :param cell_size_y: Voxel cell size Y :param cell_size_z: Voxel cell size Z :param db: Database :param x_channel: X channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_channel: Y channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param z_channel: Z channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param data_channel: Data channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param weight_power: Weight Power (default 2) :param weight_slope: Weight Slope (default 1) :param search_radius: Distance weighting limit (default = 4 * CUBE_ROOT(DX*DY*DZ)) :param blanking_distance: Dummy values farther from data than this distance. (default = 4 * CUBE_ROOT(DX*DY*DZ)) :param log: Apply log transform to input data before gridding (0:No (default), 1:Yes) :param log_base: One of `VV_LOG_BASE_10 <geosoft.gxapi.VV_LOG_BASE_10>` (default) or :const:`VV_LOG_BASE_E :param log_negative: One of `VV_LOG_NEGATIVE_NO <geosoft.gxapi.VV_LOG_NEGATIVE_NO>` (default) or `VV_LOG_NEGATIVE_YES <geosoft.gxapi.VV_LOG_NEGATIVE_YES>` :type output_grid3d_filename: str :type origin_x: float :type origin_y: float :type origin_z: float :type cell_count_x: int :type cell_count_y: int :type cell_count_z: int :type cell_size_x: float :type cell_size_y: float :type cell_size_z: float :type db: GXDB :type x_channel: int :type y_channel: int :type z_channel: int :type data_channel: int :type weight_power: float :type weight_slope: float :type search_radius: float :type blanking_distance: float :type log: int :type log_base: float :type log_negative: int .. versionadded:: 9.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The Z and Data channels may be array channels. If they are, the array sizes must match. 3D cells take on the averaged values within a search radius, weighted inversely by distance. Weighting can be controlled using the power and slope properties; weighting = 1 / (distance^wtpower + 1/slope) where distance is in units of grid cells (X dimenstion). Default is 0.0, If the blanking distance is set, all cells whose center point is not within the blanking distance of at least one data point are set to dummy. """ gxapi_cy.WrapMULTIGRID3DUTIL._grid_idw_from_gdb(GXContext._get_tls_geo(), output_grid3d_filename.encode(), origin_x, origin_y, origin_z, cell_count_x, cell_count_y, cell_count_z, cell_size_x, cell_size_y, cell_size_z, db, x_channel, y_channel, z_channel, data_channel, weight_power, weight_slope, search_radius, blanking_distance, log, log_base, log_negative) @classmethod def get_data_extents(cls, filename, min_x, min_y, min_z, max_x, max_y, max_z): """ Get the voxel size that has non-dummy data. :param filename: input filename :param min_x: Index of minimum valid data in X. :param min_y: Index of minimum valid data in Y. :param min_z: Index of minimum valid data in Z. :param max_x: Index of maximum valid data in X. :param max_y: Index of maximum valid data in Y. :param max_z: Index of maximum valid data in Z. :type filename: str :type min_x: int_ref :type min_y: int_ref :type min_z: int_ref :type max_x: int_ref :type max_y: int_ref :type max_z: int_ref .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Find the non-dummy volume of a `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` object. If the voxel is all dummies, returns `iMAX <geosoft.gxapi.iMAX>` for the minima, and `iMIN <geosoft.gxapi.iMIN>` for the maxima. """ min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value = gxapi_cy.WrapMULTIGRID3DUTIL._get_data_extents(GXContext._get_tls_geo(), filename.encode(), min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value) @classmethod def get_data_ground_extents(cls, filename, min_x, min_y, min_z, max_x, max_y, max_z): """ Get the voxel size in ground units that has non-dummy data. :param filename: input filename :param min_x: Ground location of minimum valid data in X. :param min_y: Ground location of minimum valid data in Y. :param min_z: Ground location of minimum valid data in Z. :param max_x: Ground location of maximum valid data in X. :param max_y: Ground location of maximum valid data in Y. :param max_z: Ground location of maximum valid data in Z. :type filename: str :type min_x: float_ref :type min_y: float_ref :type min_z: float_ref :type max_x: float_ref :type max_y: float_ref :type max_z: float_ref .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Find the non-dummy volume of a `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` object. If the voxel is all dummies, returns `iMAX <geosoft.gxapi.iMAX>` for the minima, and `iMIN <geosoft.gxapi.iMIN>` for the maxima. """ min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value = gxapi_cy.WrapMULTIGRID3DUTIL._get_data_ground_extents(GXContext._get_tls_geo(), filename.encode(), min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value) @classmethod def grid_points_from_gdb(cls, name, error, cell_size, var_only, min_radius, max_radius, min_points, max_points, model, power, slope, range, nugget, sill, type, db, x_channel, y_channel, z_channel, data_channel, ipj): """ Grid a grid3d from a database using kriging. :param name: Output grid3d filename :param error: Output error grid3d filename :param cell_size: Cell size (DUMMY for default) :param var_only: Variogram Only :param min_radius: Minimum Search Radius (DUMMY for none) :param max_radius: Maximum Search Radius (DUMMY for none) :param min_points: Minimum Search Points :param max_points: Maximum Search Points :param model: Model number 1-power, 2-sperical, 3-gaussian, 4-exponential :param power: Power :param slope: Slope :param range: Range :param nugget: Nugget :param sill: Sill :param type: :ref:`GS_TYPES` :param db: Database :param x_channel: X channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_channel: Y channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param z_channel: Z channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param data_channel: Data channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :type name: str :type error: str :type cell_size: float :type var_only: int :type min_radius: float :type max_radius: float :type min_points: int :type max_points: int :type model: int :type power: float :type slope: float :type range: float :type nugget: float :type sill: float :type type: int :type db: GXDB :type x_channel: int :type y_channel: int :type z_channel: int :type data_channel: int :type ipj: GXIPJ .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._grid_points_from_gdb(GXContext._get_tls_geo(), name.encode(), error.encode(), cell_size, var_only, min_radius, max_radius, min_points, max_points, model, power, slope, range, nugget, sill, type, db, x_channel, y_channel, z_channel, data_channel, ipj) @classmethod def grid_points_z_from_gdb(cls, name, error, cell_size, cell_size_z, var_only, min_radius, max_radius, min_points, max_points, model, power, slope, range, nugget, sill, type, db, x_channel, y_channel, z_channel, data_channel, ipj): """ Grid a grid3d from a database (using variable Z's) :param name: Output grid3d filename :param error: Output error grid3d filename :param cell_size: Cell size (DUMMY for default) :param cell_size_z: Cell size in Z ("" for default) :param var_only: Variogram Only :param min_radius: Minimum Search Radius (DUMMY for none) :param max_radius: Maximum Search Radius (DUMMY for none) :param min_points: Minimum Search Points :param max_points: Maximum Search Points :param model: Model number 1-power, 2-sperical, 3-gaussian, 4-exponential :param power: Power :param slope: Slope :param range: Range :param nugget: Nugget :param sill: Sill :param type: :ref:`GS_TYPES` :param db: Database :param x_channel: X channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_channel: Y channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param z_channel: Z channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param data_channel: Data channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :type name: str :type error: str :type cell_size: float :type cell_size_z: str :type var_only: int :type min_radius: float :type max_radius: float :type min_points: int :type max_points: int :type model: int :type power: float :type slope: float :type range: float :type nugget: float :type sill: float :type type: int :type db: GXDB :type x_channel: int :type y_channel: int :type z_channel: int :type data_channel: int :type ipj: GXIPJ .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMULTIGRID3DUTIL._grid_points_z_from_gdb(GXContext._get_tls_geo(), name.encode(), error.encode(), cell_size, cell_size_z.encode(), var_only, min_radius, max_radius, min_points, max_points, model, power, slope, range, nugget, sill, type, db, x_channel, y_channel, z_channel, data_channel, ipj) @classmethod def grid_points_z_ex_from_gdb(cls, name, error, cell_size, cell_size_z, var_only, min_radius, max_radius, min_points, max_points, model, power, slope, range, nugget, sill, strike, dip, plunge, along_strike_weight, down_dip_weight, type, db, x_channel, y_channel, z_channel, data_channel, ipj): """ Grid a grid3d from a database (using variable Z's) :param name: Output grid3d filename :param error: Output error grid3d filename :param cell_size: Cell size (DUMMY for default) :param cell_size_z: Cell size in Z ("" for default) :param var_only: Variogram Only :param min_radius: Minimum Search Radius (DUMMY for none) :param max_radius: Maximum Search Radius (DUMMY for none) :param min_points: Minimum Search Points :param max_points: Maximum Search Points :param model: Model number 1-power, 2-sperical, 3-gaussian, 4-exponential :param power: Power :param slope: Slope :param range: Range :param nugget: Nugget :param sill: Sill :param strike: Strike :param dip: Dip :param plunge: Plunge :param along_strike_weight: Strike Weight :param down_dip_weight: Dip Plane Weight :param type: :ref:`GS_TYPES` :param db: Database :param x_channel: X channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_channel: Y channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param z_channel: Z channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param data_channel: Data channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :type name: str :type error: str :type cell_size: float :type cell_size_z: str :type var_only: int :type min_radius: float :type max_radius: float :type min_points: int :type max_points: int :type model: int :type power: float :type slope: float_ref :type range: float_ref :type nugget: float :type sill: float_ref :type strike: float :type dip: float :type plunge: float :type along_strike_weight: float :type down_dip_weight: float :type type: int :type db: GXDB :type x_channel: int :type y_channel: int :type z_channel: int :type data_channel: int :type ipj: GXIPJ .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ slope.value, range.value, sill.value = gxapi_cy.WrapMULTIGRID3DUTIL._grid_points_z_ex_from_gdb(GXContext._get_tls_geo(), name.encode(), error.encode(), cell_size, cell_size_z.encode(), var_only, min_radius, max_radius, min_points, max_points, model, power, slope.value, range.value, nugget, sill.value, strike, dip, plunge, along_strike_weight, down_dip_weight, type, db, x_channel, y_channel, z_channel, data_channel, ipj) @classmethod def log_grid_points_z_ex_from_gdb(cls, name, error, cell_size, cell_size_z, var_only, min_radius, max_radius, min_points, max_points, model, power, slope, range, nugget, sill, strike, dip, plunge, along_strike_weight, down_dip_weight, log_opt, min_log, type, db, x_channel, y_channel, z_channel, data_channel, ipj): """ Log grid a grid3d from a database (using variable Z's) :param name: Output grid3d filename :param error: Output error grid3d filename :param cell_size: Cell size (DUMMY for default) :param cell_size_z: Cell size in Z ("" for default) :param var_only: Variogram Only :param min_radius: Minimum Search Radius (DUMMY for none) :param max_radius: Maximum Search Radius (DUMMY for none) :param min_points: Minimum Search Points :param max_points: Maximum Search Points :param model: Model number 1-power, 2-sperical, 3-gaussian, 4-exponential :param power: Power :param slope: Slope :param range: Range :param nugget: Nugget :param sill: Sill :param strike: Strike :param dip: Dip :param plunge: Plunge :param along_strike_weight: Strike Weight :param down_dip_weight: Dip Plane Weight :param log_opt: :ref:`VOX_GRID_LOGOPT` Log Option :param min_log: Minimum log :param type: :ref:`GS_TYPES` :param db: Database :param x_channel: X channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_channel: Y channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param z_channel: Z channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param data_channel: Data channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :type name: str :type error: str :type cell_size: float :type cell_size_z: str :type var_only: int :type min_radius: float :type max_radius: float :type min_points: int :type max_points: int :type model: int :type power: float :type slope: float_ref :type range: float_ref :type nugget: float :type sill: float_ref :type strike: float :type dip: float :type plunge: float :type along_strike_weight: float :type down_dip_weight: float :type log_opt: int :type min_log: float :type type: int :type db: GXDB :type x_channel: int :type y_channel: int :type z_channel: int :type data_channel: int :type ipj: GXIPJ .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ slope.value, range.value, sill.value = gxapi_cy.WrapMULTIGRID3DUTIL._log_grid_points_z_ex_from_gdb(GXContext._get_tls_geo(), name.encode(), error.encode(), cell_size, cell_size_z.encode(), var_only, min_radius, max_radius, min_points, max_points, model, power, slope.value, range.value, nugget, sill.value, strike, dip, plunge, along_strike_weight, down_dip_weight, log_opt, min_log, type, db, x_channel, y_channel, z_channel, data_channel, ipj) @classmethod def krig_from_gdb(cls, name, cell_size, type, db, x_channel, y_channel, z_channel, data_channel, ipj, reg): """ A more compact and extensible form of `log_grid_points_z_ex_from_gdb <geosoft.gxapi.GXMULTIGRID3DUTIL.log_grid_points_z_ex_from_gdb>`. :param name: Output grid3d filename :param cell_size: Cell size (DUMMY for default) :param type: :ref:`GS_TYPES` :param db: Database :param x_channel: X channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_channel: Y channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param z_channel: Z channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param data_channel: Data channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :type name: str :type cell_size: float :type type: int :type db: GXDB :type x_channel: int :type y_channel: int :type z_channel: int :type data_channel: int :type ipj: GXIPJ :type reg: GXREG .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Optional Parameters. If these values are not set in the `GXREG <geosoft.gxapi.GXREG>`, then default parameters will be used. ERROR_VOXEL: Output error grid3d filename ("" for none) CELLSIZEZ: Z Cell size string (space delimited, "" for default) RADIUS_MIN: Minimum Search Radius (REAL) (Default = 4) (Blanking Distance) RADIUS_MAX: Maximum Search Radius (REAL) (Default = 16) SEARCH_MIN: Minimum Search Points (INT) (Default = 16) SEARCH_MAX: Maximum Search Points (INT) (Default = 32) VARIOGRAM_ONLY: Set to 1 to calculate the variogram only (INT) (Default = 0) MODEL: Variogram Model number 1-power, 2-sperical, 3-gaussian, 4-exponential (INT) (Default = 2) POWER: Power (Default = DUMMY) SLOPE: Slope (REAL) (if input is DUMMY, value calculated and set on return) RANGE: Range (REAL) (if input is DUMMY, value calculated and set on return) SILL : Sill (REAL) (if input is DUMMY, value calculated and set on return) STRIKE: Strike (REAL) (Default = 0) DIP: Dip (REAL) (Default = 90) PLUNGE: Plunge (REAL) (Default = 0) STRIKE WEIGHT: Along-Strike Weight (REAL) (Default = 1) DIP_WEIGHT: Down-Dip Weight (REAL) (Default = 1) LOG_OPT: One of :ref:`VOX_GRID_LOGOPT` (Default = 0) MIN_LOG: Log Minimum (REAL) (Default = 1) MIN_X: Minimum X (REAL) (default = DUMMY to determine from the data. If input, nearest lt. or eq. multiple of cell size chosen) MAX_X: Maximum X (REAL) (default = DUMMY to determine from the data. If input, nearest gt. or eq. multiple of cell size chosen) MIN_Y: Minimum Y (REAL) (default = DUMMY to determine from the data. If input, nearest lt. or eq. external multiple of cell size chosen) MAX_Y: Maximum Y (REAL) (default = DUMMY to determine from the data. If input, nearest gt. or eq. multiple of cell size chosen) MIN_Z: Minimum Z (REAL) (default = DUMMY to determine from the data. If input, nearest lt. or eq. multiple of cell size chosen) MAX_Z: Maximum Z (REAL) (default = DUMMY to determine from the data. If input, nearest gt. or eq. multiple of cell size chosen)A more compact and extensible form of `GXVOX.log_grid_points_z_ex <geosoft.gxapi.GXVOX.log_grid_points_z_ex>`. Only the most basic parameters are entered directly. Optional parameters are passed via a `GXREG <geosoft.gxapi.GXREG>` object. """ gxapi_cy.WrapMULTIGRID3DUTIL._krig_from_gdb(GXContext._get_tls_geo(), name.encode(), cell_size, type, db, x_channel, y_channel, z_channel, data_channel, ipj, reg) @classmethod def create_subset(cls, input_name, output_name, offset_x, offset_y, offset_z, length_x, length_y, length_z): """ Create a new MULTIGRID3D that is a subset of an exisiting MULTIGRID3D. :param input_name: File Name of the MULTIGRID3D that will be subset :param output_name: File Name of the MULTIGRID3D that will be created :param offset_x: Starting location in X. :param offset_y: Starting location in Y. :param offset_z: Starting location in Z. :param length_x: Number of items to copy in X. :param length_y: Number of items to copy in Y. :param length_z: Number of items to copy in Z. :type input_name: str :type output_name: str :type offset_x: int :type offset_y: int :type offset_z: int :type length_x: int :type length_y: int :type length_z: int :rtype: GXMULTIGRID3D .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Creates a `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` object that is a subset . """ ret_val = gxapi_cy.WrapMULTIGRID3DUTIL._create_subset(GXContext._get_tls_geo(), input_name.encode(), output_name.encode(), offset_x, offset_y, offset_z, length_x, length_y, length_z) return GXMULTIGRID3D(ret_val) @classmethod def create_subset_from_double_extents(cls, input_name, output_name): """ Create a new MULTIGRID3D that is a subset of the non-dummy extents. :param input_name: File Name of the MULTIGRID3D that will be subset :param output_name: File Name of the MULTIGRID3D that will be created :type input_name: str :type output_name: str :rtype: GXMULTIGRID3D .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Creates a `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` object that is a subset with all dummy data regions removed. """ ret_val = gxapi_cy.WrapMULTIGRID3DUTIL._create_subset_from_double_extents(GXContext._get_tls_geo(), input_name.encode(), output_name.encode()) return GXMULTIGRID3D(ret_val) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXPG.rst .. _GXPG: GXPG class ================================== .. autoclass:: geosoft.gxapi.GXPG :members: .. _PG_3D_DIR: PG_3D_DIR constants ----------------------------------------------------------------------- 3D Pager direction .. autodata:: geosoft.gxapi.PG_3D_DIR_XYZ :annotation: .. autoattribute:: geosoft.gxapi.PG_3D_DIR_XYZ .. autodata:: geosoft.gxapi.PG_3D_DIR_YXZ :annotation: .. autoattribute:: geosoft.gxapi.PG_3D_DIR_YXZ .. autodata:: geosoft.gxapi.PG_3D_DIR_XZY :annotation: .. autoattribute:: geosoft.gxapi.PG_3D_DIR_XZY .. autodata:: geosoft.gxapi.PG_3D_DIR_YZX :annotation: .. autoattribute:: geosoft.gxapi.PG_3D_DIR_YZX .. autodata:: geosoft.gxapi.PG_3D_DIR_ZXY :annotation: .. autoattribute:: geosoft.gxapi.PG_3D_DIR_ZXY .. autodata:: geosoft.gxapi.PG_3D_DIR_ZYX :annotation: .. autoattribute:: geosoft.gxapi.PG_3D_DIR_ZYX .. _PG_BF_CONV: PG_BF_CONV constants ----------------------------------------------------------------------- Pager binary conversions .. autodata:: geosoft.gxapi.PG_BF_CONV_NONE :annotation: .. autoattribute:: geosoft.gxapi.PG_BF_CONV_NONE .. autodata:: geosoft.gxapi.PG_BF_CONV_SWAP :annotation: .. autoattribute:: geosoft.gxapi.PG_BF_CONV_SWAP <file_sep>/geosoft/gxapi/GXMISC.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXMISC(gxapi_cy.WrapMISC): """ GXMISC class. Not a class. A catch-all for miscellaneous geophysical methods, primarily file conversions. """ def __init__(self, handle=0): super(GXMISC, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXMISC <geosoft.gxapi.GXMISC>` :returns: A null `GXMISC <geosoft.gxapi.GXMISC>` :rtype: GXMISC """ return GXMISC() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def convert_cg3to_raw(cls, cg3, raw, tide_corr_opt): """ Convert a CG3 dump to RAW format. :param cg3: Name of the CG3 file :param raw: Name of the RAW file :param tide_corr_opt: TideCorr Option: 1 - use geosoft, 0 - use CG3/CG5 :type cg3: str :type raw: str :type tide_corr_opt: int .. versionadded:: 7.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapMISC._convert_cg3to_raw(GXContext._get_tls_geo(), cg3.encode(), raw.encode(), tide_corr_opt) @classmethod def convert_cg5to_raw(cls, cg5, raw, tide_corr_opt): """ Convert a CG5 dump to RAW format. :param cg5: Name of the CG5 file :param raw: Name of the RAW file :param tide_corr_opt: TideCorr Option: 1 - use geosoft, 0 - use CG3/CG5 :type cg5: str :type raw: str :type tide_corr_opt: int .. versionadded:: 7.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapMISC._convert_cg5to_raw(GXContext._get_tls_geo(), cg5.encode(), raw.encode(), tide_corr_opt) @classmethod def ukoa2_tbl(cls, ukoa, alias, tbl): """ Convert a UKOA file to a location TBL file. :param ukoa: Name of the UKOA file :param alias: Line name alias table :param tbl: Name of the output table :type ukoa: str :type alias: str :type tbl: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The TBL file will contain the following fields: = Line:string16 = Station:long = Latitude:double = Longitude:double = X:double = Y:double = Elevation:double """ gxapi_cy.WrapMISC._ukoa2_tbl(GXContext._get_tls_geo(), ukoa.encode(), alias.encode(), tbl.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/coordinate_system.py """ Coodinate systems :Classes: :`Coordinate_system`: define a coordinate system :`Coordinate_translate`: translate coordinates between coordinate systems :`Wkt`: well-know coordinate system for ESRI interoperability Coordinate systems describe how cartesian coordinates are located ralative to the Earth. Cartesian coordinates are right-handed (x, y, z) spatial ordinates that describe locations within a coordinate system frame of reference. For coordinates relative to a horizontal plane, positive z is up, usually equivalent to elevation relative to a vertical datum. Coordinate systems can be oriented in three dimensions using an `orientation` definition, which defines an (x0, y0, z0) origin and rotation (rx, ry, rz) around the X, Y and then Z axis relative to a base coordinate system. Base coordinate systems are usually defined by "well-known" coordinate system projections on a datum of the earth. **Coordinate System Name** A coordinate system will also have a descriptive name that identifies the base system with a datum and "well-known" map projection description, plus optional orientation and vertical reference datum if defined. Orientation parameters are enclosed in `<>` that define `<x0, y0, z0, rx, ry, rz>` (eg. `<400000, 6200000,0,0,-90,0>`). If a vertical reference datum is defined it will appear as a string in square brackets, for example `[CGVD28]`. Example coordinate system names: .. code:: "NAD83 / UTM zone 15N" "NAD83 / UTM zone 15N <450000,6250000,0,0,0,-25>" # oriented system, rotated -25 degrees "NAD83 / UTM zone 15N [NAVD88]" "NAD83 / UTM zone 15N <450000,6250000,0,0,0,-25> [NAVD88]" The descriptive name for "well-known" coordinate systems is sufficient to describe the coordinate system from the `EPSG Geodetic Registry <http://www.epsg.org/>`_. To fully locate ad-hoc coordinates you will need the parameters defined in the GXF stings. See :attr:`Coordinate_system.gxf`. :Constants: :NAME: None :NAME_HCS: gxapi.IPJ_NAME_PCS :NAME_PROJECTION: gxapi.IPJ_NAME_PROJECTION :NAME_METHOD: gxapi.IPJ_NAME_METHOD :NAME_DATUM: gxapi.IPJ_NAME_DATUM :NAME_ELLIPSOID: gxapi.IPJ_NAME_ELLIPSOID :NAME_LDATUM: gxapi.IPJ_NAME_LDATUM :NAME_UNIT: gxapi.IPJ_NAME_UNIT_ABBR :NAME_UNIT_FULL: gxapi.IPJ_NAME_UNIT_FULL :NAME_TYPE: gxapi.IPJ_NAME_TYPE :NAME_LLDATUM: gxapi.IPJ_NAME_LLDATUM :NAME_METHOD_PARMS: gxapi.IPJ_NAME_METHOD_PARMS :NAME_METHOD_LABEL: gxapi.IPJ_NAME_METHOD_LABEL :NAME_DATUM_PARMS: gxapi.IPJ_NAME_DATUM_PARMS :NAME_LDATUM_PARMS: gxapi.IPJ_NAME_LDATUM_PARMS :NAME_GEOID: gxapi.IPJ_NAME_GEOID :NAME_LDATUMDESCRIPTION: gxapi.IPJ_NAME_LDATUMDESCRIPTION :NAME_METHOD_PARMS_NATIVE: gxapi.IPJ_NAME_METHOD_PARMS_NATIVE :NAME_ORIENTATION: gxapi.IPJ_NAME_ORIENTATION_PARMS :NAME_VCS: -1 :NAME_HCS_VCS: -2 :LIST_COORDINATESYSTEM: gxapi.IPJ_PARM_LST_COORDINATESYSTEM :LIST_DATUM: gxapi.IPJ_PARM_LST_DATUM :LIST_PROJECTION: gxapi.IPJ_PARM_LST_PROJECTION :LIST_UNITS: gxapi.IPJ_PARM_LST_UNITS :LIST_UNITSDESCRIPTION: gxapi.IPJ_PARM_LST_UNITSDESCRIPTION :LIST_LOCALDATUMDESCRIPTION: gxapi.IPJ_PARM_LST_LOCALDATUMDESCRIPTION :LIST_LOCALDATUMNAME: gxapi.IPJ_PARM_LST_LOCALDATUMNAME :PARM_DATUM: 'datum' :PARM_PROJECTION: 'transform' :PARM_UNITS: 'units' :PARM_LOCAL_DATUM: 'datumtrf' .. seealso:: `geosoft.gxapi.GXIPJ` .. note:: Regression tests provide usage examples: `tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_coordinate_system.py>`_ """ import json import numpy as np import geosoft import geosoft.gxapi as gxapi from . import utility as gxu from . import dataframe as gxdf from . import vv as gxvv __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) ############# # Constants # 'HCS' and 'hcs' refer to Horizontal Coordinate System # 'VCS' and 'vcs' refer to Vertical Coordinate System NAME = None NAME_HCS = gxapi.IPJ_NAME_PCS NAME_PROJECTION = gxapi.IPJ_NAME_PROJECTION NAME_METHOD = gxapi.IPJ_NAME_METHOD NAME_DATUM = gxapi.IPJ_NAME_DATUM NAME_ELLIPSOID = gxapi.IPJ_NAME_ELLIPSOID NAME_LDATUM = gxapi.IPJ_NAME_LDATUM NAME_UNIT = gxapi.IPJ_NAME_UNIT_ABBR NAME_UNIT_FULL = gxapi.IPJ_NAME_UNIT_FULL NAME_TYPE = gxapi.IPJ_NAME_TYPE NAME_LLDATUM = gxapi.IPJ_NAME_LLDATUM NAME_METHOD_PARMS = gxapi.IPJ_NAME_METHOD_PARMS NAME_METHOD_LABEL = gxapi.IPJ_NAME_METHOD_LABEL NAME_DATUM_PARMS = gxapi.IPJ_NAME_DATUM_PARMS NAME_LDATUM_PARMS = gxapi.IPJ_NAME_LDATUM_PARMS NAME_GEOID = gxapi.IPJ_NAME_GEOID NAME_LDATUMDESCRIPTION = gxapi.IPJ_NAME_LDATUMDESCRIPTION NAME_METHOD_PARMS_NATIVE = gxapi.IPJ_NAME_METHOD_PARMS_NATIVE NAME_ORIENTATION = gxapi.IPJ_NAME_ORIENTATION_PARMS NAME_VCS = -1 NAME_HCS_VCS = -2 LIST_COORDINATESYSTEM = gxapi.IPJ_PARM_LST_COORDINATESYSTEM LIST_DATUM = gxapi.IPJ_PARM_LST_DATUM LIST_PROJECTION = gxapi.IPJ_PARM_LST_PROJECTION LIST_UNITS = gxapi.IPJ_PARM_LST_UNITS LIST_UNITSDESCRIPTION = gxapi.IPJ_PARM_LST_UNITSDESCRIPTION LIST_LOCALDATUMDESCRIPTION = gxapi.IPJ_PARM_LST_LOCALDATUMDESCRIPTION LIST_LOCALDATUMNAME = gxapi.IPJ_PARM_LST_LOCALDATUMNAME PARM_DATUM = 'datum' PARM_PROJECTION = 'transform' PARM_UNITS = 'units' PARM_LOCAL_DATUM = 'datumtrf' class CSException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.coordinate_system`. .. versionadded:: 9.2 """ pass def parameters(what, key): """ Get a dictionary of parameters for a coordinate system item. Parameters are maintained in csv coordinate system table files in the Geosoft Desktop Applications `csv` folder. :param what: | PARM_DATUM (from `datum.csv`) | PARM_PROJECTION (from `transform.csv`) | PARM_UNITS (from `units.csv`) | PARM_LOCAL_DATUM (from `datumtrf.csv`) :param key: parameter key to find and return :raises CSException: if table or key not found. .. versionadded:: 9.2 """ try: dct = gxdf.table_record(what, key) except gxdf.DfException as e: raise CSException(str(e)) return dct def parameter_exists(what, key): """ Test if a parameter set exists in a coordinate system table. :param what: see :func:`parameters` :param key: parameter key :returns: True if table/key exists .. versionadded:: 9.2 """ try: parameters(what, key) except CSException: return False else: return True _unknown_name = '*unknown' def _unknown(name): return _unknown_name in name def name_list(what, datum_filter=''): """ Get a list of coordinate system names :param what: | LIST_COORDINATESYSTEM | LIST_DATUM | LIST_PROJECTION | LIST_UNITS | LIST_LOCALDATUMDESCRIPTION | LIST_LOCALDATUMNAME | LIST_UNITSDESCRIPTION :param datum_filter: name of a datum to filter results :returns: sorted list of names .. versionadded:: 9.2 """ lst = gxapi.GXLST.create(1000) gxapi.GXIPJ.get_list(what, datum_filter, lst) namelist = list(gxu.dict_from_lst(lst)) namelist.sort(key=str.lower) return namelist def _extract(s, frame): c1, c2, *_ = frame s = s.strip(' \t"\'') end = s.rfind(c2) if end > 1: start = s.rfind(c1) sub = s[start + 1: end] s = s[:start] + s[end+1:] else: sub = '' return s.strip(' \t"\''), sub.strip(' \t"\'') def hcs_orient_vcs_from_name(name): """ Split a full coordinate system name into its components. A name has the form "hcs <orient> [vcs]" :param name: :returns: hcs, orient, vcs .. versionadded:: 9.2 """ name, vcs = _extract(name, '[]') hcs, orient = _extract(name, '<>') return hcs, orient, vcs def name_from_hcs_orient_vcs(hcs, orient=None, vcs=None): """ Construct a coordinate system name from an hcs, orientation and vcs. If orient or vcs are None or empty, the name will not include these parts. :param hcs: horizontal coordinate system string :param orient: orientation string :param vcs: vertical coordinate system string :returns: "hcs <orient> [vcs]" .. versionadded:: 9.2 """ if orient: orient = ' <' + orient + '>' else: orient = '' if vcs: vcs = ' [' + vcs + ']' else: vcs = '' return hcs + orient + vcs def list_from_wktsrs(wkt): """ Return a list from a wkt spatial reference string. .. versionadded:: 9.2 """ def first_item(wkts): n = 0 i = 0 for c in wkts: if n == 0 and c == ',': return wkts[:i].strip(' '), wkts[i + 1:].strip(' ') i += 1 if c == '[': n += 1 elif c == ']': n -= 1 return wkts.strip(' '), '' def parse_item(wkts): if wkts[0] == '"': return wkts[1:-1] if '[' in wkts: bkt = wkts.find('[') items = list_from_wktsrs(wkts[bkt + 1:-1]) dct = {'key': wkts[:bkt], 'name': items[0]} if len(items) > 1: dct['items'] = items[1:] return dct return wkts wkt = wkt.strip() wktlst = [] while wkt: first, wkt = first_item(wkt) wktlst.append(parse_item(first)) return wktlst def find_key(wkt, k): """ Find a key in the wkt, return it's name and items. .. versionadded:: 9.2 """ for w in wkt: if type(w) is dict: if w['key'] == k: return w['name'], w.get('items', []) # try the kids name, items = find_key(w.get('items', []), k) if name: return name, items return '', [] def wkt_vcs(vcs): """ Compose a wkt VERTCS block from a Geosoft vcs string. .. versionadded:: 9.2 """ return 'VERTCS["' + vcs + '"]' class Wkt: """ Helper class to parse WKT-formatted spatial reference strings. :param wkt: wkt (well-known text) string that describes a coordinate system. .. versionadded:: 9.2 """ def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): return self.name def __enter__(self): return self def __exit__(self, _type, _value, _traceback): pass def __init__(self, wkt): self._wkt = list_from_wktsrs(wkt) self.pcs, _ = find_key(self._wkt, 'PROJCS') self.gcs, _ = find_key(self._wkt, 'GEOGCS') self.vcs, _ = find_key(self._wkt, 'VERTCS') @property def name(self): """ Return the ESRI coordinate system WKT string .. versionadded:: 9.2 """ if self.pcs: name = self.pcs else: name = self.gcs if self.vcs: name += ' [{}]'.format(self.vcs) return name.strip() def find_key(self, k): """ Return the name and list of items for a key :param k: the key to look for in the wkt :returns: name ('' if not found), list of parameters, ([] if no items) .. versionadded:: 9.2 """ return find_key(self._wkt, k) class Coordinate_system: """ Coordinate system class. A coordinate system defines a horizontal and vertical reference system to locate (x, y, z) cartesian coordinates relative to the Earth. :param coordinate_system: a coordinate system can be created from a number of different forms: - Geosoft name string (ie. "WGS 84 / UTM zone 32N [geodetic]") - Geosoft xml with root 'projection', xmlns="http://www.geosoft.com/schema/geo" - ESRI WKT string (ie. "PROJCS["WGS_1984_UTM_Zone_35N",GEOGCS[...") - a dictionary that contains the coordinate system properties either as a set of xml properties from a Geosoft xml 'property' definition, or as defined by **Dictionary Structure** below. - a JSON string that contains the coordinate system properties - a list that contains the 5 `GXF coordinate system strings <http://www.geosoft.com/resources/goto/GXF-Grid-eXchange-File>`_. For example: ``['"WGS 84 / UTM zone 32N [geodetic]", "WGS 84", "UTM zone 32N", "", ""]`` - `geosoft.gxapi.GXIPJ` instance - `Coordinate_system` instance, returns a copy - None to create an unknown coordinate system. For examples refer to `tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_coordinate_system.py>`_ :Dictionary Structure: :Geosoft: .. code:: { "type": "Geosoft", "name": name "datum": datum "method": method "units": units "local_datum": local datum transform "orientation": x0, y0, z0, xR, yR, zR "vcs": "vertical coordinate system" } :local: type "local" can be used to locate local coordinates in situations where one only has the (longitude, latitude) of a point on local coordinate system and the orientation of the local axis relative to geographic North. Internally an Oblique Stereographic projection is created with an origin at the defined origin point. .. code:: { "type": "local", "lon_lat": (lon, lat) required longitude, latitude of "origin", in degrees "origin": (x0, y0) location of "lon_lat" on the local coordinate system, default is (0,0) "azimuth": azimuth of rotation of local axiz relative to North. "elevation": elevation of the origin in the vertical coordinate system, default is 0. "datum": datum, default is "WGS 84" "local_datum": local datum transform, default is the default for the datum "scale_factor": local scale factor, default is 0.9996 to be similar to UTM locally "vcs": "vertical coordinate system" default is undefined. } :Example: cs = geosoft.gxpy.Coordinate_system({'type': 'local', 'lon_lat': (-96, 43), 'azimuth': 25}) :EPSG: (http://www.epsg.org/) .. code:: { "type": "EPSG" "code": EPSG_code_number "orientation": x0, y0, z0, xR, yR, zR } :ESRI: (http://webhelp.esri.com/arcgisserver/9.3/java/index.htm#geodatabases/the_ogc-607957855.htm) .. code:: { "type": "ESRI", "wkt": wkt format string, starts with "PROJCS[" or "GEOGCS[" "orientation": x0, y0, z0, xR, yR, zR "vcs": "vertical coordinate system" } .. versionadded:: 9.2 supercedes `ipj` module. """ def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): return self.name def __enter__(self): return self def __exit__(self, _type, _value, _traceback): self.__del__() def __del__(self): if hasattr(self, '_gxapi_ipj'): self._gxapi_ipj = None def __init__(self, coordinate_system=None): self._dict = None self._gxapi_ipj = gxapi.GXIPJ.create() if coordinate_system is None: coordinate_system = _unknown_name if isinstance(coordinate_system, str): self._from_str(coordinate_system) elif isinstance(coordinate_system, gxapi.GXIPJ): coordinate_system.copy(self.gxipj) elif isinstance(coordinate_system, Coordinate_system): coordinate_system.gxipj.copy(self.gxipj) elif isinstance(coordinate_system, dict): self._from_dict(coordinate_system) else: self._from_gxf(coordinate_system) def __eq__(self, other): return self.same_as(other) def __ne__(self, other): return not self.__eq__(other) @classmethod def local(cls, lon_lat=(0, 0), origin=(0, 0), azimuth=0, elevation=0, datum="WGS 84", local_datum=None, scale_factor=0.9996, vcs=None): """ Create an ad-hoc local coordinate system. :param lon_lat: (longitude, latitude) of the center :param origin: (x, y) to assign to the center :param azimuth: local rotation of the system relative to geographic North, in degrees azimuth :param elevation: elevation for z = 0. :param datum: datum, default is "WGS 84" :param local_datum: local datum, default will be the default for the specified datum :param scale_factor: central scale factor. The default is 0.9996 as the most common map system is UTM and thus relative distances in the local system will be similar, though not the same, as UTM. :param vcs: name for the vertical coordinate system reference, default is unknown Local coordinate systems are simple cartesian systems established for a specific purpose without complete geodetic control, but for which one knows or can estimate the longitude, latitude of a point on the local coordinate system. In this way local coordinates can be located reasonably well relative to the Earth and and other Earth-refeerenced data. The local system will be constructed as an oblique stereographic projection centered at the system origin, which is similar to what one sees when looking at maps in Google maps. .. versionadded:: 9.3.1 """ csdict = {'type': 'local', 'lon_lat':lon_lat, 'origin': origin, 'azimuth': azimuth, 'elevation': elevation, 'datum': datum, 'scale_factor': scale_factor} if local_datum: csdict['local_datum'] = local_datum if vcs: csdict['vcs'] = vcs return cls(csdict) @property def gxipj(self): """ `geosoft.gxapi.GXIPJ` instance""" return self._gxapi_ipj @property def name(self): """ coordinate system name as 'datum / projection <orientation> [vcs]' """ return self.gxf[0] @property def units_name(self): """ name of the distance units (abbreviation)""" return self.cs_name(NAME_UNIT) @property def unit_of_measure(self): """ same as units_name, provided for naming consistency with other usage in gxpy.""" return self.units_name @property def metres_per_unit(self): """ the number metres per distance unit of the coordinate system.""" fr = gxapi.float_ref() sr = gxapi.str_ref() self.gxipj.get_units(fr, sr) return fr.value @property def hcs(self): """ horizontal coordinate system name""" return self.cs_name(NAME_HCS) @property def vcs(self): """ Vertical coordinate system name. Can be set.""" return self.cs_name(NAME_VCS) @vcs.setter def vcs(self, vcs): self.gxipj.set_vcs(vcs) @property def is_oriented(self): """True if the coordinate system has an orientation.""" return self.gxipj.get_orientation() not in (gxapi.IPJ_ORIENT_DEFAULT, gxapi.IPJ_ORIENT_PLAN) @property def orientation_name(self): """The name of an oriented section for display/reference purposes. .. versionadded:: 9.4 """ sr = gxapi.str_ref() self.gxipj.get_orientation_name(sr) return sr.value @property def is_known(self): """ True if this is a known coordinate system. .. versionadded:: 9.3 """ not_known = _unknown_name return self.name[:len(not_known)].lower() != not_known def coordinate_dict(self): """ Returns "Geosoft" dictionary of coordinate system attributes. .. versionadded:: 9.2 """ if self._dict is None: # initially from GXF values gxf1, gxf2, gxf3, gxf4, gxf5 = self.gxf hcs, orient, vcs = hcs_orient_vcs_from_name(gxf1) self._dict = {"type": "Geosoft", "name": self.name, "datum": gxf2, "projection": gxf3, "units": gxf4, "local_datum": gxf5, "orientation": orient, "vcs": vcs } return self._dict def same_hcs(self, other): """ Return True if the HCS are the same. .. versionadded:: 9.2 """ def same_units(a, b): a = a.coordinate_dict()['units'] b = b.coordinate_dict()['units'] if not (a and b): return True else: return a == b def same_orientation(a, b): return a._dict['orientation'] == b._dict['orientation'] if not same_units(self, other) or not same_orientation(self, other): return False if not(self.is_known) or not(is_known(other)): return True else: return bool(self.gxipj.coordinate_systems_are_the_same(other.gxipj)) def same_vcs(self, other): """ Return True if the VCS are the same. .. versionadded:: 9.2 """ svcs = self.vcs ovcs = other.vcs if (svcs == '') or (ovcs == ''): return True else: return svcs == ovcs def same_as(self, other): """ Return True if both coordinate systems (HCS and VCS) are the same. .. versionadded:: 9.2 """ if other is None: return True if not isinstance(other, Coordinate_system): other = Coordinate_system(other) return self.same_hcs(other) and self.same_vcs(other) def _from_str(self, cstr): """ Setup coordinate systems from a string. .. versionadded:: 9.2 .. versionchanged:: 9.4 supports xml (first character is '<') """ # json string if cstr[0] == '{': try: jsondict = json.loads(cstr) except ValueError: # try replacing single quotes jstr = cstr.replace('"', '\\"').replace("'", '"') try: jsondict = json.loads(jstr) except ValueError: raise ValueError(_t('"Invalid JSON coordinate system string: "{}"').format(cstr)) self._from_dict(jsondict) # xml elif cstr[0] == '<': self.xml = cstr # ESRI WKT elif 'GEOGCS[' in cstr: self.gxipj.set_esri(cstr) vcs, _ = Wkt(cstr).find_key('VERTCS') if vcs: self.vcs = vcs else: self._from_gxf([cstr, '', '', '', '']) def _from_gxf(self, gxfs): def raise_gxf_error(): raise CSException(_t('Unknown coordinate system:' + '\n name> {}' + '\n datum> {}' + '\n projection> {}' + '\n units> {}' + '\nlocal datum> {}') .format(gxfs[0], gxfs[1], gxfs[2], gxfs[3], gxfs[4])) gxf1, gxf2, gxf3, gxf4, gxf5 = gxfs hcs, orient, vcs = hcs_orient_vcs_from_name(gxf1) # if we get a name only, and it has a datum and projection, copy these. # The challenge with a name only is that the "datum / projection" must exist as # a known coordinate system, otherwise we cannot resolve it. Users some times # combine projections with different datums so copying the values allows for this if (gxf2 == '') and (gxf3 == ''): if '/' in hcs: datum, projection, *_ = hcs.strip('"').split('/') gxf2 = datum.strip() gxf3 = projection.strip() else: gxf2 = hcs # units only if (not _unknown(gxf1)) and not (gxf3 or gxf4 or gxf5) and parameter_exists(PARM_UNITS, gxf1): self.gxipj.set_gxf('', '', '', gxf1, '') else: try: self.gxipj.set_gxf(gxf1, gxf2, gxf3, gxf4, gxf5) except (geosoft.gxapi.GXAPIError, geosoft.gxapi.GXError): raise_gxf_error() def _from_dict(self, csdict): """ Create an IPJ from a dictionary. .. versionadded:: 9.2 """ cstype = csdict.get('type', '').lower() if not cstype: # first try Geosoft xml dictionary, if not try 'geosoft' type try: if 'projection' in csdict: dictxml = gxu.geosoft_xml_from_dict(csdict) else: dictxml = gxu.geosoft_xml_from_dict({'projection': csdict}) self.xml = dictxml return except geosoft.gxapi.GXError: cstype = 'geosoft' if cstype == 'geosoft': s1, orient, vcs = hcs_orient_vcs_from_name(csdict.get('name', '')) orient = csdict.get('orientation', orient) vcs = csdict.get('vcs', vcs) s1 = name_from_hcs_orient_vcs(s1, orient, vcs) s2 = csdict.get('datum', '') s3 = csdict.get('projection', '') s4 = csdict.get('units', '') s5 = csdict.get('local_datum', '') self._from_gxf([s1, s2, s3, s4, s5]) elif cstype == 'esri': wkt = csdict.get('wkt', None) if wkt is None: raise ValueError("'ESRI missing 'wkt' property.") # TODO: The following is a quick fix. Seems WKT strings returned from arcpy code in ArcGIS Pro can now # contain trailing parameters like: # ;-5121200 -9998400 450432031.862147;-100000 10000;-100000 10000;0.001;0.001;0.001;IsHighPrecision # removed here to allow core code to parse. Investigate and move any resulting handling logic to core wkt = ']'.join(wkt.split(']')[:-1]) + ']' # TODO arcpy code could produce WKT strings with single quotes. Core code should be changed to be tolerant of this instead wkt = wkt.replace("'", '"') # add vertical datum reference from dict if not in the wkt vcs = csdict.get('vcs', '') if vcs and ('VERTCS[' not in wkt): wkt += wkt_vcs(vcs) # clear any existing coordinate system - bug GX does not clear prior orientation self.gxipj.set_gxf('WGS 84 <0,0,0,0,0,0>', '', '', '', '') self.gxipj.set_esri(wkt) # add orientation and vcs orient = csdict.get('orientation', '') if orient or vcs: gxfs = self.gxf gxfs[0] = name_from_hcs_orient_vcs(gxfs[0], orient, vcs) self._from_gxf(gxfs) elif cstype == "epsg": code = csdict.get('code', None) if code is None: raise ValueError("'EPSG missing 'code' property.") orient = csdict.get('orientation', '') self._from_gxf([str(code) + orient, '', '', '', '']) elif cstype == 'local': # must at least have a latitude and longitude lon, lat = csdict.get('lon_lat', (None, None)) if (lat is None) or (lon is None): raise CSException(_t("Local must define 'lon_lat' of the local origin.")) x0, y0 = csdict.get('origin', (0, 0)) azimuth = csdict.get('azimuth', 0.0) sf = csdict.get('scale_factor', 0.9996) units = csdict.get('units', 'm') datum = csdict.get('datum', 'WGS 84') ldatum = csdict.get('ldatum', '') elevation = csdict.get('elevation', 0.0) proj = '"Oblique Stereographic",{},{},{},0,0'.format(lat, lon, sf) vcs = csdict.get('vcs', '') if (azimuth == 0.0) and (elevation == 0.0): orient = '' else: orient = '0,0,{},0,0,{}'.format(elevation, azimuth) name = '{} / *Local({},{},{},{})'.format(datum, lat, lon, x0, y0) name_azimuth = name_from_hcs_orient_vcs(name, orient, vcs) self._from_gxf([name_azimuth, datum, proj, units, ldatum]) if (x0 != 0) or (y0 != 0): xx0, yy0, _ = self.xyz_from_oriented(np.array((-x0, -y0, 0))) proj = '"Oblique Stereographic",{},{},{},{},{}'.format(lat, lon, sf, -xx0, -yy0) self._from_gxf([name, datum, proj, units, ldatum]) else: raise ValueError("Projection type '{}' not supported.".format(cstype)) @property def gxf(self): """ The GXF string list from ipj. (http://www.geosoft.com/resources/goto/GXF-Grid-eXchange-File) The first string (gxf[0]) is the coordinate system name in the form: `datum / projection <x0,y0,z0,rx,ry,rz> [vcs]` The orientation parameters are between the '<>', and will be omitted if all 0. 'vcs' is the vertical coordinate system, and is omitted if the vcs is undefined. .. versionadded:: 9.2 """ s1 = gxapi.str_ref() s2 = gxapi.str_ref() s3 = gxapi.str_ref() s4 = gxapi.str_ref() s5 = gxapi.str_ref() self.gxipj.get_gxf(s1, s2, s3, s4, s5) lst = [s1.value.replace('"', '').strip(), s2.value, s3.value, s4.value, s5.value] return lst @gxf.setter def gxf(self, gxfs): self._from_gxf(gxfs) @property def xml(self): """ xml of the coordinate system using Geosoft schema. Can be set. .. versionadded:: 9.4 """ xml = gxapi.str_ref() self.gxipj.get_xml(xml) return xml.value @xml.setter def xml(self, xml): self.gxipj.set_xml(xml) @property def esri_wkt(self): """ ESRI Well-Known-Text (wkt) format coordinate string .. versionadded:: 9.3 """ sr = gxapi.str_ref() self._gxapi_ipj.get_esri(sr) return sr.value @esri_wkt.setter def esri_wkt(self, gxfs): self._from_str(gxfs) @property def json(self): """ JSON formatted coordinate system string. .. versionadded:: 9.3 """ return json.dumps(self.coordinate_dict()) @json.setter def json(self, json_str): self._from_str(json_str) def cs_name(self, what=NAME): """ Return requested name. :param what: | NAME | NAME_HCS | NAME_VCS | NAME_HCS_VCS | NAME_PROJECTION | NAME_METHOD | NAME_DATUM | NAME_ELLIPSOID | NAME_LDATUM | NAME_UNIT | NAME_UNIT_FULL | NAME_TYPE | NAME_LLDATUM | NAME_METHOD_PARMS | NAME_METHOD_LABEL | NAME_DATUM_PARMS | NAME_LDATUM_PARMS | NAME_GEOID | NAME_LDATUMDESCRIPTION | NAME_METHOD_PARMS_NATIVE | NAME_ORIENTATION If 'what' is not specified, gxipj.NAME assumed, which returns the coordinate system display name. :returns: The name requested .. versionadded:: 9.2 """ s = gxapi.str_ref() if what == NAME: return self.gxf[0] else: csname, *_ = self.gxf hcs, orient, vcs = hcs_orient_vcs_from_name(csname) if what == NAME_HCS_VCS: return name_from_hcs_orient_vcs(hcs, orient, vcs) if what == NAME_HCS: return name_from_hcs_orient_vcs(hcs, orient, None) if what == NAME_VCS: return vcs if what == NAME_DATUM: return hcs.split('/')[0].strip() if what == NAME_PROJECTION: if '/' in hcs: return hcs.split('/')[1].strip() else: return '' self.gxipj.get_name(what, s) return s.value def _oriented_xyz(self, direction, xyz, column_ordered=False): """ Return oriented (x, y, z) coordinates from true base (x, y, z) coordinates. :param xyz: (x, y, z) or iterable :param column_ordered: if xyz is iterable, and this is True, the data is assumed to be column ordered and the results are returned column ordered. :returns: (x, y, z) in un-oriented space .. versionadded:: 9.2 """ if not isinstance(xyz, np.ndarray): xyz = np.array(xyz) if xyz.ndim == 1: x = (xyz[0],) y = (xyz[1],) z = (xyz[2],) else: if column_ordered: x, y, z = xyz[0, :], xyz[1, :], xyz[2, :] else: x, y, z = xyz[:, 0], xyz[:, 1], xyz[:, 2] x = gxvv.GXvv(x, dtype=float) y = gxvv.GXvv(y, dtype=float) z = gxvv.GXvv(z, dtype=float) self.gxipj.convert_orientation_warp_vv(x.gxvv, y.gxvv, z.gxvv, direction) if xyz.ndim == 1: return x[0][0], y[0][0], z[0][0] else: xyz_column = np.array([x.np, y.np, z.np]) if column_ordered: return xyz_column else: return xyz_column.swapaxes(0, 1) def oriented_from_xyz(self, xyz, column_ordered=False): """ Return oriented (x, y, z) coordinates from true base (x, y, z) coordinates. :param xyz: (x, y, z) or iterable :param column_ordered: if xyz is iterable, and this is True, the data is assumed to be column ordered and the results are returned column ordered. :returns: (x, y, z) in un-oriented space .. versionadded:: 9.2 """ return self._oriented_xyz(0, xyz, column_ordered=column_ordered) def xyz_from_oriented(self, xyz, column_ordered=False): """ Return true base (x, y, z) coordinates from oriented (x, y, z) coordinates. :param xyz: (x, y, z) or iterable :param column_ordered: if xyz is iterable, and this is True, the data is assumed to be column ordered and the results are returned column ordered. :returns: (x, y, z) in oriented space .. versionadded:: 9.2 """ return self._oriented_xyz(1, xyz, column_ordered=column_ordered) def is_known(coordinate_system): """ Return True if this is a known coordinate system .. versionadded:: 9.3.1 """ if coordinate_system is None: return False if isinstance(coordinate_system, Coordinate_system): return coordinate_system.is_known try: return Coordinate_system(coordinate_system).is_known except CSException: return False class Coordinate_translate: """ Class to reproject coordinates between different coordinate systems. :params cs_from: from `Coordinate_system` :params cs_to: to `Coordinate_system` .. versionadded:: 9.2 """ def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): return "PJ from \'{}\' to \'{}\'".format(str(self._cs_from), str(self._cs_to)) def __enter__(self): return self def __exit__(self, xtype, value, traceback): pass _cs_from = None _cs_to = None _sr = gxapi.str_ref() def __init__(self, cs_from, cs_to): if not isinstance(cs_from, Coordinate_system): cs_from = Coordinate_system(cs_from) self._cs_from = cs_from if not isinstance(cs_to, Coordinate_system): cs_to = Coordinate_system(cs_to) self._cs_to = cs_to self._pj = gxapi.GXPJ.create_ipj(cs_from.gxipj, cs_to.gxipj) def convert_vv(self, xvv, yvv, zvv=None): """ Project vv locations in-place. :param xvv: x locations as `geosoft.gxpy.vv.GXvv` instance :param yvv: y locations as `geosoft.gxpy.vv.GXvv` instance :param zvv: optional z locations as `geosoft.gxpy.vv.GXvv` instance .. versionadded:: 9.3.1 """ xvvin = yvvin = zvvin = None if not xvv.is_float64: xvvin = xvv xvv = gxvv.GXvv(xvv, dtype=np.float64) if not yvv.is_float64: yvvin = yvv yvv = gxvv.GXvv(yvv, dtype=np.float64) if zvv and not zvv.is_float64: zvvin = zvv zvv = gxvv.GXvv(zvv, dtype=np.float64) if zvv: self._pj.convert_vv3(xvv.gxvv, yvv.gxvv, zvv.gxvv) else: self._pj.convert_vv(xvv.gxvv, yvv.gxvv) if xvvin: xvvin.set_data(xvv) if yvvin: yvvin.set_data(yvv) if zvvin: zvvin.set_data(zvv) def convert(self, xyz, in_place=False): """ Project data in array in which first columns are x,y or x,y,z. Coordinates are reprojected in-place. :param xyz: numply shape (n,2) or (n,3+), or list, or a single (x, y, z) tuple. Array dimension (n,2) for (x, y), (n,3+) for x,y,z. Only numpy arrays may have dimensions above 3. :param in_place: if True, numpy array data is converted in-place. Ignored for list or tuple :example: Given an array shape (500,6), which represents 500 data records with 6 columns in which the first 3 columns are coordinates X, Y and Z. .. code:: data = np.zeros((10,5), dtype='float') #then fill the array with some data xy_only = pj.convert(data[:,2]) #transform x,y xyz_only = pj.convert(data[:,3]) #transform x,y and z all = pj.convert(data) #transform x,y and z with data returned :returns: projected data in the same form as passed (numpy array, list, or (x,y,z)) .. versionadded:: 9.2 .. versionchanged:: 9.3.1 conversion methods will return results in the same type as the input data. """ xyz_in = xyz if not isinstance(xyz, np.ndarray): xyz = np.array(xyz) npoints = xyz.shape[0] if npoints == 0: if in_place: return xyz if isinstance(xyz, np.ndarray): return np.array([[]]) return [[]] if xyz.ndim < 2: xyz = xyz.reshape((-1, xyz.shape[0])) flatten_return = True else: flatten_return = False nd = xyz.shape[1] if nd < 2: raise CSException(_t('Data must have dimension 2 (x,y) or 3 for (x,y,z) or higher.')) vvx = gxvv.GXvv(xyz[:, 0]) vvy = gxvv.GXvv(xyz[:, 1]) if nd >= 3: vvz = gxvv.GXvv(xyz[:, 2]) else: vvz = None self.convert_vv(vvx, vvy, vvz) if in_place: xyz[:, 0] = vvx.np xyz[:, 1] = vvy.np if nd > 2: xyz[:, 2] = vvz.np return xyz if nd >= 3: xyz = np.array([vvx.np, vvy.np, vvz.np]).T else: xyz = np.array([vvx.np, vvy.np]).T if flatten_return: xyz = xyz.flatten() if isinstance(xyz_in, np.ndarray): return xyz elif hasattr(xyz_in, '__iter__'): return list(xyz) if nd >= 3: return xyz[0, 0], xyz[0, 1], xyz[0, 2] else: return xyz[0, 0], xyz[0, 1] <file_sep>/examples/tutorial/Grids and Images/get_data_files.py import geosoft.gxpy.gx as gx import geosoft.gxpy.utility as gxu gxc = gx.GXpy() url = 'https://github.com/GeosoftInc/gxpy/raw/9.3/examples/tutorial/Grids%20and%20Images/' gxu.url_retrieve(url + 'elevation_surfer.GRD') gxu.url_retrieve(url + 'elevation') gxu.url_retrieve(url + 'elevation.ers') gxu.url_retrieve(url + 'elevation.ers.gi') gxu.url_retrieve(url + 'elevation.ers.xml')<file_sep>/geosoft/gxapi/GXDXFI.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXDXFI(gxapi_cy.WrapDXFI): """ GXDXFI class. The `GXDXFI <geosoft.gxapi.GXDXFI>` class is used for importing AutoCAD® dxf files into Geosoft maps. """ def __init__(self, handle=0): super(GXDXFI, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXDXFI <geosoft.gxapi.GXDXFI>` :returns: A null `GXDXFI <geosoft.gxapi.GXDXFI>` :rtype: GXDXFI """ return GXDXFI() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, name): """ Create `GXDXFI <geosoft.gxapi.GXDXFI>`. :param name: DXF file name :type name: str :returns: `GXDXFI <geosoft.gxapi.GXDXFI>` Object :rtype: GXDXFI .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapDXFI._create(GXContext._get_tls_geo(), name.encode()) return GXDXFI(ret_val) @classmethod def dxf2_ply(cls, ply, dxfi): """ Convert a DXF file to a `GXPLY <geosoft.gxapi.GXPLY>` object :param ply: `GXPLY <geosoft.gxapi.GXPLY>` handle :param dxfi: `GXDXFI <geosoft.gxapi.GXDXFI>` handle :type ply: GXPLY :type dxfi: GXDXFI .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapDXFI._dxf2_ply(GXContext._get_tls_geo(), ply, dxfi) def dxf2_view_ex(self, view, max_pen, pb_group, group, pb_one_color, color): """ Draw entities in a DXF file to a view in a map :param max_pen: User defined number of pens to use (can be `iDUMMY <geosoft.gxapi.iDUMMY>`) :param pb_group: TRUE to place entire DXF in one group :param group: Group name for one group (can be "" if above is FALSE) :param pb_one_color: TRUE to force one color :param color: :ref:`MVIEW_COLOR` (ignored if above is FALSE) :type view: GXMVIEW :type max_pen: int :type pb_group: int :type group: str :type pb_one_color: int :type color: int .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._dxf2_view_ex(view, max_pen, pb_group, group.encode(), pb_one_color, color) def dxf2_view_no_surfaces(self, view, max_pen, pb_group, group, pb_one_color, color): """ Draw entities in a DXF file to a view in a map, but for 3D views skips all surfaces :param max_pen: User defined number of pens to use (can be `iDUMMY <geosoft.gxapi.iDUMMY>`) :param pb_group: TRUE to place entire DXF in one group :param group: Group name for one group (can be "" if above is FALSE) :param pb_one_color: TRUE to force one color :param color: :ref:`MVIEW_COLOR` (ignored if above is FALSE) :type view: GXMVIEW :type max_pen: int :type pb_group: int :type group: str :type pb_one_color: int :type color: int .. versionadded:: 9.7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._dxf2_view_no_surfaces(view, max_pen, pb_group, group.encode(), pb_one_color, color) def get_range(self, min_x, max_x, min_y, max_y, min_z, max_z): """ Get DXF data range :param min_x: X min :param max_x: X max :param min_y: Y min :param max_y: Y max :param min_z: Z min :param max_z: Z max :type min_x: float_ref :type max_x: float_ref :type min_y: float_ref :type max_y: float_ref :type min_z: float_ref :type max_z: float_ref .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ min_x.value, max_x.value, min_y.value, max_y.value, min_z.value, max_z.value = self._get_range(min_x.value, max_x.value, min_y.value, max_y.value, min_z.value, max_z.value) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/dap_client.py """ Geosoft dap server handling. :Classes: ======================= =============================================================== `DapClient` Geosoft dap client `DataType` data type `GridExtractFormat` return format for extracting a grid `ExtractProgressStatus` progress `DataExtract` data extraction `BoundingBox` bounding box `DataCard` data information `SearchFilter` search filter `ResultFilter` result filter `SearchParameters` search parameters ======================= =============================================================== Regression tests provide usage examples: `Tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_dap.py>`_ .. versionadded:: 9.4 """ import time import os from json import dumps, loads from requests import get, post, exceptions from enum import Enum from collections.abc import Sequence import geosoft from . import gx as gx from . import coordinate_system as gxcs from . import geometry as gxgeo from . import system as gxsys from . import utility as gxu __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) def _json_default(o): return o.__dict__ def _decode_object(o): # print(str(o)) if 'CoordinateSystem' in o: b = BoundingBox(coordinate_system=o['CoordinateSystem']) b.__dict__.update(o) return b else: d = DataCard() d.__dict__.update(o) d.Type = DataType(d.Type) return d class DapClientException(geosoft.GXRuntimeError): """ Exceptions from `geosoft.gxpy.dap`. """ pass class DataType(Enum): """Supported data types""" Map = 0 Grid = 1 Picture = 2 Point = 3 Database = 4 Document = 5 SPF = 6 Generic = 7 Voxel = 8 ArcGIS = 9 ImageServer = 10 PictureSection = 11 GridSection = 12 ProjectZip = 13 Drillhole = 14 NoData = 15 ThreeDV = 16 Geostring = 17 GMSYS3D = 18 VOXI = 19 PDF = 20 Geosurface = 21 GMSYS2D = 22 VectorVoxel = 23 GeosoftOffline = 24 def datatype_default_extension(item): if not isinstance(item, DataType): item = DataType(item) ext_list = ('map', # 0 'grd', # 1 'png', # 2 'csv', # 3 'gdb', # 4 'unknown', # 5 'spf', # 6 'unknown', # 7 'geosoft_voxel', # 8 'ArcGIS', # 9 'png', # 10 'png', # 11 'grd', # 12 'zip', # 13 'zip', # 14 'unknown', # 15 '3dv', # 16 'geosoft_geostring', # 17 'GMSYS3D', # 18 'geosoft_voxi', # 19 'pdf', # 20 'geosoft_geosurface', # 21 'GMSYS2D', # 22 'geosoft_vector_voxel', # 23 'unknown') # 24 return ext_list[item.value] def extract_url(item): if not isinstance(item, DataType): item = DataType(item) return 'dataset/extract/' + item.name.lower() + '/' class GridExtractFormat(Enum): GeosoftCompressed = 0 GeosoftUncompressed = 1 ESRIBinaryRaster = 2 BIL = 3 Geopak = 4 GXFText = 5 GXFCompressed = 6 ODDFPC = 7 ODDFUnix = 8 SurferV6 = 9 SurferV7 = 10 USGSPC = 11 USGSUnix = 12 ERMapper = 13 class ExtractProgressStatus(Enum): Prepare = 0 Extract = 1 Compress = 2 Complete = 3 Cancelled = 4 Failed = 5 class DataExtract: """ Data extraction instance. :param filename: name of the data file :param extents: data extent as a `BoundingBox` or `geosoft.gxpy.geopmetry.Point2` instance :param resolution: desired resolution in the distance units of the extents coordinate system :param format: one of the extraction formats for the data type, default is the first format. .. versionadded:: 9.4 """ def __init__(self, filename, extents=None, resolution=0, format=0): extents = BoundingBox(extents) self.BoundingBox = extents self.Filename = filename self.Resolution = resolution if not isinstance(format, int): format = format.value self.Format = format def __str__(self): return 'Resolution: %s, Format: %s, Extents: %s' % (self.Resolution, self.Format, self.BoundingBox) def __repr__(self): return 'DataExtract(extents=%r,filename=%r,resolution=%r,format=%r)' % ( self.BoundingBox, self.Filename, self.Resolution, self.Format) class BoundingBox: """ Bounding box instance. :param minx: `MinX` :param miny: `MinY` :param minz: `MinZ` :param maxx: `MaxX` :param maxy: `MaxY` :param maxz: `MaxZ` :param coordinate_system: `CoordinateSystem` any supported coordinate system representation. Default is "WGS 84". :param cs_as_xml: True to force the coordinate_system to be xml. """ def __init__(self, minx=-180, miny=-90, minz=0, maxx=180, maxy=90, maxz=0, coordinate_system="WGS 84"): if isinstance(minx, BoundingBox): extent = minx self.MinX = extent.MinX self.MinY = extent.MinY self.MinZ = extent.MinZ self.MaxX = extent.MaxX self.MaxY = extent.MaxY self.MaxZ = extent.MaxZ self.CoordinateSystem = extent.CoordinateSystem else: try: self.MinX = float(minx) self.MinY = float(miny) self.MinZ = float(minz) self.MaxX = float(maxx) self.MaxY = float(maxy) self.MaxZ = float(maxz) self.CoordinateSystem = gxcs.Coordinate_system(coordinate_system).xml except (TypeError, ValueError): extent = minx if not isinstance(extent, gxgeo.Point2): extent = gxgeo.Point2(extent, coordinate_system=coordinate_system) self.MinX, self.MinY, self.MinZ, self.MaxX, self.MaxY, self.MaxZ = extent.extent_xyz self.CoordinateSystem = extent.coordinate_system.xml def __str__(self): a = '[%s, %s, %s] - [%s, %s %s], %s' b = (self.MinX, self.MinY, self.MinZ, self.MaxX, self.MaxY, self.MaxZ, self.CoordinateSystem) return a % b def __repr__(self): a = 'BoundingBox(minx=%r,miny=%r,minz=%r,maxx=%r,maxy=%r,maxz=%r,coordinate_system=%r)' b = (self.MinX, self.MinY, self.MinZ, self.MaxX, self.MaxY, self.MaxZ, self.CoordinateSystem) return a % b class DataCard(gxgeo.Geometry): """ Single dataset information instance. :param dap: `DapClient` instance :param id: `Id` unique dataset identifier property :param title: `Title` property :param type: `Type` dataset type, one of `DataType` values. :param hierarchy: `Hierarchy` location in the catalog hierarchy :param stylesheet: `Stylesheet` metadata style sheet :param extents: `Extents` is a `BoundingBox` instance :param has_original: `HasOriginal` True if the original data is available .. versionadded:: 9.4 """ def __init__(self, dap=None, id=None, title=None, type=0, hierarchy=None, stylesheet=None, extents=None, has_original=False): self._dap = dap if extents is None: extents = BoundingBox() self._extent = None self.Id = id self.Title = title self.Type = type self.Hierarchy = hierarchy self.Stylesheet = stylesheet self.Extents = extents self.HasOriginal = has_original super().__init__(name=title) def __str__(self): a = 'Id: %s, Title: %s, Type: %s, Hierarchy: %s' b = (self.Id, self.Title, self.Type, self.Hierarchy) return a % b def __repr__(self): a = 'Dataset(id=%r, title=%r, type=%r, hierarchy=%r, stylesheet=%r, has_original=%r)' b = (self.Id, self.Title, self.Type, self.Hierarchy, self.Stylesheet, self.HasOriginal) return a % b @property def dap_client(self): """ `DapClient` instance for this dataset, may be None if card is not yet associated with a server. .. versionadded:: 9.4 """ return self._dap @dap_client.setter def dap_client(self, dap): self._dap = dap @property def extent(self): if self._extent is None: sp = self.spatial_properties p1 = (sp['NativeMinX'], sp['NativeMinY'], sp['NativeMinZ']) p2 = (sp['NativeMaxX'], sp['NativeMaxY'], sp['NativeMaxZ']) cs = gxcs.Coordinate_system(sp['CoordinateSystem']) self._extent = gxgeo.Point2((p1, p2), coordinate_system=cs) return self._extent @property def info(self): """ Dataset info: http://dap.geosoft.com/REST/dataset/help/operations/GetDatasetById .. versionadded:: 9.4 """ return self._dap.post('dataset/info/' + str(self.Id)) @property def edition(self): """ Edition: http://dap.geosoft.com/REST/dataset/help/operations/GetEdition .. versionadded:: 9.4 """ return self._dap.get('dataset/edition/' + str(self.Id)) @property def disclaimer(self): """ Disclaimer: http://dap.geosoft.com/REST/dataset/help/operations/GetDisclaimer .. versionadded:: 9.4 """ return self._dap.get('dataset/disclaimer/' + str(self.Id)) @property def permission(self): """ Permission: http://dap.geosoft.com/REST/dataset/help/operations/GetPermission .. versionadded:: 9.4 """ return self._dap.get('dataset/permission/' + str(self.Id)) @property def metadata(self): """ Metadata: http://dap.geosoft.com/REST/dataset/help/operations/GetMetadata .. versionadded:: 9.4 """ return self._dap.get('dataset/metadata/' + str(self.Id)) @property def grid_properties(self): """ Grid data properties, `None` if not a grid dataset. http://dap.geosoft.com/REST/dataset/help/operations/GetGridProperties .. versionadded:: 9.4 """ if self.Type == DataType.Grid: return self._dap.get('dataset/properties/grid/' + str(self.Id)) return None @property def document_properties(self): """ Properties of the dataset as a document. http://dap.geosoft.com/REST/dataset/help/operations/GetDocumentProperties .. versionadded:: 9.4 """ try: return self._dap.get('dataset/properties/document/' + str(self.Id)) except Exception: return None @property def point_properties(self): """ Point properties, `None` if not a point (hxyz) dataset. http://dap.geosoft.com/REST/dataset/help/operations/GetHXYZProperties .. versionadded:: 9.4 """ if self.Type == DataType.Point: return self._dap.get('dataset/properties/hxyz/' + str(self.Id)) return None @property def map_properties(self): """ Map properties, `None` if not a map. http://dap.geosoft.com/REST/dataset/help/operations/GetMapProperties .. versionadded:: 9.4 """ if self.Type == DataType.Map: return self._dap.get('dataset/properties/map/' + str(self.Id)) return None @property def voxel_properties(self): """ Voxel properties, `None` if not a voxel. http://dap.geosoft.com/REST/dataset/help/operations/GetVoxelProperties .. versionadded:: 9.4 """ if self.Type == DataType.Voxel or self.Type == DataType.VectorVoxel: return self._dap.get('dataset/properties/voxel/' + str(self.Id)) return None @property def spatial_properties(self): """ Spatial properties: http://dap.geosoft.com/REST/dataset/help/operations/GetProperties .. versionadded:: 9.4 """ return self._dap.get('dataset/properties/' + str(self.Id)) class SearchFilter: """ Search filter instance. :param free_text_query: title/keyword search filter :param structured_metadata_query: :param extents: `BoundingBox` instance :param entirely_within: `True` for completely enclosed data, `False` for intersecting data. :param version: minimum version, default is 1. .. versionadded:: 9.4 """ def __init__(self, free_text_query=None, structured_metadata_query=None, extents=None, entirely_within=False, version=1): if extents is None: extents = BoundingBox() self.FreeTextQuery = free_text_query self.StructuredMetadataQuery = structured_metadata_query self.BoundingBox = extents self.EntirelyWithin = int(entirely_within) self.RequestVersion = version def __str__(self): a = 'FreeTextQuery: %s, StructuredMetadataQuery: %s, EntirelyWithin: %s, Extents: %s' b = (self.FreeTextQuery, self.StructuredMetadataQuery, self.EntirelyWithin, self.BoundingBox) return a % b def __repr__(self): a = 'SearchFilter(free_text_query=%r,structured_metadata_query=%r,extents=%r,entirely_within=%r,version=%r)' b = (self.FreeTextQuery, self.StructuredMetadataQuery, self.BoundingBox, self.EntirelyWithin, self.RequestVersion) return a % b class ResultFilter: """ Results filter instance. :param path: to this location in the hierarchy :param depth: to this depth in the hierarchy, default no depth limit :param start_index: start index in the list :param max_results: maximum results to include .. versionadded:: 9.4 """ def __init__(self, path=None, depth=2147483647, start_index=0, max_results=0, valid_path=False): self.Path = path self.Depth = depth self.StartIndex = start_index self.MaxResults = max_results if path is None: self.ValidPath = False else: self.ValidPath = True def __str__(self): return 'Path: %s, Depth: %s, StartIndex: %s, MaxResults: %s, ValidPath: %s' % ( self.Path, self.Depth, self.StartIndex, self.MaxResults, self.ValidPath) def __repr__(self): return 'ResultFilter(path=%r,depth=%r,start_index=%r,max_results=%r,valid_path=%r)' % ( self.Path, self.Depth, self.StartIndex, self.MaxResults, self.ValidPath) class SearchParameters: """ Search parameter instance, defined by a `SearchFilter` and a `ResultFilter` :param search_filter: `SearchFilter` instance :param result_filter: `ResultFilter` instance .. versionadded:: 9.4 """ def __init__(self, search_filter=None, result_filter=None): if search_filter is None: search_filter = SearchFilter() if result_filter is None: result_filter = ResultFilter() self.SearchFilter = search_filter self.ResultFilter = result_filter def __str__(self): return 'SearchFilter: %s, ResultFilter: %s' % (self.SearchFilter, self.ResultFilter) def __repr__(self): return 'SearchParameters(search_filter=%r,result_filter=%r)' % (self.SearchFilter, self.ResultFilter) class DapClient(Sequence): """ DapClient class to communicate with a Geosoft DAP server. :param url: url of the server, default is 'http://dap.geosoft.com/' :param get_catalog: `True` to get the server catalog. If `False` (the default) call method `catalog()` to get the retrieve the catalog from the server. The catalog is cached as part of the instance. .. versionadded:: 9.4 """ def __enter__(self): return self def __exit__(self, xtype, xvalue, xtraceback): pass def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): if self._config is None: name = _t('unknown name') else: name = self._config['Name'] datasets = len(self._cat) if datasets == 0: datasets = '?' return '{}: {} ({} datasets)'.format(self._url, name, datasets) def __init__(self, url='http://dap.geosoft.com/', get_catalog=False): super().__init__() self._cat = [] self._config = None self._http_headers = {'Content-Type': 'application/json', 'Accept': 'application/json'} self._http_params = {'key': 'test'} # establish url and rest url url = url.lower() if url[-1] != '/': url = url + '/' if url[-5:] == 'rest/': self._rest_url = url self._url = url[:-5] else: self._rest_url = url + 'rest/' self._url = url # configuration try: c = self.configuration except exceptions.HTTPError as e: raise DapClientException(_t('Server \'{}\' has a problem:\n{}'.format(self._url, str(e)))) # dataset catalog if get_catalog: try: self.catalog() except exceptions.HTTPError as e: raise DapClientException(_t('Server \'{}\' has a problem:\n{}'.format(self._url, str(e)))) self._next = 0 def __len__(self): return len(self._cat) def __iter__(self): return self def __next__(self): if self._next >= len(self._cat): self._next = 0 raise StopIteration else: ds = self._cat[self._next] self._next += 1 return ds def __getitem__(self, item): card = None if not self._cat: self.catalog() if isinstance(item, int): if item < 0 or item >= len(self._cat): raise IndexError('catalog index {} out of range {}'.format(item, len(self._cat))) card = self._cat[item] else: if isinstance(item, str): title = item hierarchy = None else: hierarchy, title, *_ = tuple(item) for i in self._cat: if hierarchy and i.Hierarchy != hierarchy: continue if i.Title == title: card = i break if card: if card.dap_client is None: card.dap_client = self return card raise DapClientException('\'{}\' not found in catalog'.format(item)) def _http_get(self, url, decoder=None, raw_content=False): response = get(self._rest_url + url, params=self._http_params, headers=self._http_headers) if (response.ok): if raw_content: return response.content else: return gxu.dict_from_http_response_text(response.text) else: response.raise_for_status() def _http_post(self, url, post_parameters=None, decoder=None): if post_parameters is not None: post_parameters = dumps(post_parameters, default=_json_default) response = post(self._rest_url + url, data=post_parameters, params=self._http_params, headers=self._http_headers) if (response.ok): data = loads(response.content.decode('utf-8'), object_hook=decoder) return data else: response.raise_for_status() def datacard_from_id(self, id): """ Return the `DataCard` instance based on the dataset ID # :param id: dataset id :return: `DataCard` instance .. versionadded:: 9.4 """ id = int(id) for card in self.catalog(): if int(card.Id) == id: return card raise DapClientException('Id \'{}\' not found in catalog'.format(id)) def get(self, what): """ GET information from the server. :param what: string of what to get. for example "dataset/properties/265" retrieves the dataset properties for dataset 265. See http://dap.geosoft.com/REST/dataset/help for a list of the kinds of things you can get about a dataset. :return: requested info as a dict. """ return self._http_get(what) def post(self, what): """ POST information from the server. :param what: string of what to post. :return: returned info as a dict. """ return self._http_post(what) @property def url(self): """ Server url.""" return self._url @property def configuration(self): """ Return service configuration info. See http://dap.geosoft.com/REST/service/help/operations/GetConfiguration .. versionadded:: 9.4 """ if self._config is None: self._config = self._http_get('service/configuration') return self._config def catalog(self, search_parameters=None, refresh=False): """ Return a filtered catalog list. :param search_parameters: search filter, instance of `SearchParameters` :param refresh: `True` to force a refresh, otherwise cached catalog is returned :return: list of server catalog entries as `DataCard` instances .. versionadded:: 9.4 """ if search_parameters is None: search_parameters = SearchParameters() if refresh or len(self._cat) == 0: self._cat = self._http_post('catalog/search', search_parameters, decoder=_decode_object) # assign this server to all cards for card in self._cat: card.dap_client = self return self._cat def fetch_data(self, datacard, filename=None, extent=None, resolution=None, max_seconds=3600, progress=None, cadence=5): """ Fetch data from the server. :param datacard: `DataCard` instance, or a dataset description (hierarchy, title) or just title. :param filename: file name in which to plase data, default is a temporary geosoft gris file. :param extent: `geosoft.gxpy.geometry.Point2` instance, or a `BoundingBox` instance :param resolution: data resolution in the length units of the extent coordinate system :param max_seconds: maximum number of seconds to wait for the process to finish :param progress: callback that can report progress, for example `progress=print` will print to the console :param cadence: time in seconds between checking on server preparation status. :return: data file name, which may be a temporry file. Temporary files will only persist during the live of the current context. .. code:: import geosoft.gxpy.gx as gx import geosoft.gxpy.dap_client as gxdap: gx.GXpy() with gxdap.DapClient() as dap: # some point data dataset = dap['Kimberlite Indicator Mineral Grain Chemistry'] extent = gxgeo.Point2(((-112, 65), (-111, 65.5)), coordinate_system='NAD83') data_file = dap.fetch_data(dataset, extent=extent, resolution=0, progress=print) .. versionadded:: 9.4 """ if not isinstance(datacard, DataCard): datacard = self[datacard] if filename is None: filename = gx.gx().temp_file(DataType.datatype_default_extension(datacard.Type)) folder, filename = os.path.split(filename) if resolution is None: url = 'dataset/extract/resolution/' + datacard.Id res = self._http_post(url, datacard.Extents) resolution = res['Default'] pro = _t('\nFetching \'{}\'({}) from \'{}\' to file \'{}\'').\ format(datacard.Title, datacard.Id, self._url, filename) gx.gx().log(pro) if progress: progress(pro) extract_parameters = DataExtract(extents=extent, resolution=resolution, filename=filename) urlx = DataType.extract_url(datacard.Type) + datacard.Id key = self._http_post(urlx, extract_parameters) time.sleep(1) # give it a second in case it is really fast url = 'dataset/extract/progress/' + key status = self._http_get(url) stage = status['Stage'] seconds = 0 while (stage != ExtractProgressStatus.Complete.value and stage != ExtractProgressStatus.Cancelled.value and seconds < max_seconds): if stage == ExtractProgressStatus.Failed.value: raise DapClientException(_t('Extraction failed, likely no data in this extent:\nurl: {}\nextract detail:\n{}'). format(urlx, str(extract_parameters))) if progress: progress('{} {}%'.format(status['Message'], status['PercentComplete'])) time.sleep(cadence) seconds += cadence status = self._http_get(url) stage = status['Stage'] if stage == ExtractProgressStatus.Cancelled: return None info = self._http_get('dataset/extract/describe/' + key) zip_file = gx.gx().temp_file('zip') url = 'stream/dataset/extract/block/' + key + '/' with open(zip_file, 'wb') as out: ## Open temporary file as bytes for index in range(info['NumberOfBlocks']): if progress: progress(_t('Download block {} of {}'). format(index + 1, info['NumberOfBlocks'])) out.write(self._http_get(url + str(index), raw_content=True)) gxsys.unzip(zip_file, folder=folder) os.remove(zip_file) return_file = os.path.join(folder, filename) if not os.path.exists(return_file): raise DapClientException(_t('No result file, something went wrong.')) if progress: progress(_t('Fetch complete: {}').format(return_file)) return return_file def fetch_image(self, datacard, extent=None, resolution=None): if not isinstance(datacard, DataCard): datacard = self[datacard] pass <file_sep>/docs/GXMVIEW.rst .. _GXMVIEW: GXMVIEW class ================================== .. autoclass:: geosoft.gxapi.GXMVIEW :members: .. _MAKER: MAKER constants ----------------------------------------------------------------------- Maker defines .. autodata:: geosoft.gxapi.MAKER_GX :annotation: .. autoattribute:: geosoft.gxapi.MAKER_GX .. _MVIEW_CLIP: MVIEW_CLIP constants ----------------------------------------------------------------------- Boolean clipping defines .. autodata:: geosoft.gxapi.CLIP_ON :annotation: .. autoattribute:: geosoft.gxapi.CLIP_ON .. autodata:: geosoft.gxapi.CLIP_OFF :annotation: .. autoattribute:: geosoft.gxapi.CLIP_OFF .. _MVIEW_COLOR: MVIEW_COLOR constants ----------------------------------------------------------------------- 24-bit color defines The `color <geosoft.gxapi.GXMVIEW.color>` function can be used to create a color int from a color string description. The iColorXXX_MVIEW macros can be used to create colors from component intensities. .. autodata:: geosoft.gxapi.C_BLACK :annotation: .. autoattribute:: geosoft.gxapi.C_BLACK .. autodata:: geosoft.gxapi.C_RED :annotation: .. autoattribute:: geosoft.gxapi.C_RED .. autodata:: geosoft.gxapi.C_GREEN :annotation: .. autoattribute:: geosoft.gxapi.C_GREEN .. autodata:: geosoft.gxapi.C_BLUE :annotation: .. autoattribute:: geosoft.gxapi.C_BLUE .. autodata:: geosoft.gxapi.C_CYAN :annotation: .. autoattribute:: geosoft.gxapi.C_CYAN .. autodata:: geosoft.gxapi.C_MAGENTA :annotation: .. autoattribute:: geosoft.gxapi.C_MAGENTA .. autodata:: geosoft.gxapi.C_YELLOW :annotation: .. autoattribute:: geosoft.gxapi.C_YELLOW .. autodata:: geosoft.gxapi.C_GREY :annotation: .. autoattribute:: geosoft.gxapi.C_GREY .. autodata:: geosoft.gxapi.C_LT_RED :annotation: .. autoattribute:: geosoft.gxapi.C_LT_RED .. autodata:: geosoft.gxapi.C_LT_GREEN :annotation: .. autoattribute:: geosoft.gxapi.C_LT_GREEN .. autodata:: geosoft.gxapi.C_LT_BLUE :annotation: .. autoattribute:: geosoft.gxapi.C_LT_BLUE .. autodata:: geosoft.gxapi.C_LT_CYAN :annotation: .. autoattribute:: geosoft.gxapi.C_LT_CYAN .. autodata:: geosoft.gxapi.C_LT_MAGENTA :annotation: .. autoattribute:: geosoft.gxapi.C_LT_MAGENTA .. autodata:: geosoft.gxapi.C_LT_YELLOW :annotation: .. autoattribute:: geosoft.gxapi.C_LT_YELLOW .. autodata:: geosoft.gxapi.C_LT_GREY :annotation: .. autoattribute:: geosoft.gxapi.C_LT_GREY .. autodata:: geosoft.gxapi.C_GREY10 :annotation: .. autoattribute:: geosoft.gxapi.C_GREY10 .. autodata:: geosoft.gxapi.C_GREY25 :annotation: .. autoattribute:: geosoft.gxapi.C_GREY25 .. autodata:: geosoft.gxapi.C_GREY50 :annotation: .. autoattribute:: geosoft.gxapi.C_GREY50 .. autodata:: geosoft.gxapi.C_WHITE :annotation: .. autoattribute:: geosoft.gxapi.C_WHITE .. autodata:: geosoft.gxapi.C_TRANSPARENT :annotation: .. autoattribute:: geosoft.gxapi.C_TRANSPARENT .. _MVIEW_CYLINDER3D: MVIEW_CYLINDER3D constants ----------------------------------------------------------------------- What parts of the cylinder are closed .. autodata:: geosoft.gxapi.MVIEW_CYLINDER3D_OPEN :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_CYLINDER3D_OPEN .. autodata:: geosoft.gxapi.MVIEW_CYLINDER3D_CLOSESTART :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_CYLINDER3D_CLOSESTART .. autodata:: geosoft.gxapi.MVIEW_CYLINDER3D_CLOSEEND :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_CYLINDER3D_CLOSEEND .. autodata:: geosoft.gxapi.MVIEW_CYLINDER3D_CLOSEALL :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_CYLINDER3D_CLOSEALL .. _MVIEW_DRAW: MVIEW_DRAW constants ----------------------------------------------------------------------- Polygon drawing defines .. autodata:: geosoft.gxapi.MVIEW_DRAW_POLYLINE :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_DRAW_POLYLINE .. autodata:: geosoft.gxapi.MVIEW_DRAW_POLYGON :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_DRAW_POLYGON .. _MVIEW_DRAWOBJ3D_ENTITY: MVIEW_DRAWOBJ3D_ENTITY constants ----------------------------------------------------------------------- What types of entities to draw .. autodata:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_POINTS :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_POINTS .. autodata:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_LINES :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_LINES .. autodata:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_LINE_STRIPS :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_LINE_STRIPS .. autodata:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_LINE_LOOPS :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_LINE_LOOPS .. autodata:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_TRIANGLES :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_TRIANGLES .. autodata:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_TRIANGLE_STRIPS :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_TRIANGLE_STRIPS .. autodata:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_TRIANGLE_FANS :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_TRIANGLE_FANS .. autodata:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_QUADS :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_QUADS .. autodata:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_QUADS_STRIPS :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_QUADS_STRIPS .. autodata:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_POLYGONS :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_DRAWOBJ3D_ENTITY_POLYGONS .. _MVIEW_DRAWOBJ3D_MODE: MVIEW_DRAWOBJ3D_MODE constants ----------------------------------------------------------------------- What types of entities to draw .. autodata:: geosoft.gxapi.MVIEW_DRAWOBJ3D_MODE_FLAT :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_DRAWOBJ3D_MODE_FLAT .. autodata:: geosoft.gxapi.MVIEW_DRAWOBJ3D_MODE_SMOOTH :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_DRAWOBJ3D_MODE_SMOOTH .. _MVIEW_EXTENT: MVIEW_EXTENT constants ----------------------------------------------------------------------- Types of extents defines .. autodata:: geosoft.gxapi.MVIEW_EXTENT_ALL :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_EXTENT_ALL .. autodata:: geosoft.gxapi.MVIEW_EXTENT_CLIP :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_EXTENT_CLIP .. autodata:: geosoft.gxapi.MVIEW_EXTENT_MAP :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_EXTENT_MAP .. autodata:: geosoft.gxapi.MVIEW_EXTENT_VISIBLE :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_EXTENT_VISIBLE .. _MVIEW_FIT: MVIEW_FIT constants ----------------------------------------------------------------------- Fit area defines .. autodata:: geosoft.gxapi.MVIEW_FIT_MAP :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_FIT_MAP .. autodata:: geosoft.gxapi.MVIEW_FIT_VIEW :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_FIT_VIEW .. _MVIEW_FONT_WEIGHT: MVIEW_FONT_WEIGHT constants ----------------------------------------------------------------------- Font weight defines .. autodata:: geosoft.gxapi.MVIEW_FONT_WEIGHT_NORMAL :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_FONT_WEIGHT_NORMAL .. autodata:: geosoft.gxapi.MVIEW_FONT_WEIGHT_ULTRALIGHT :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_FONT_WEIGHT_ULTRALIGHT .. autodata:: geosoft.gxapi.MVIEW_FONT_WEIGHT_LIGHT :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_FONT_WEIGHT_LIGHT .. autodata:: geosoft.gxapi.MVIEW_FONT_WEIGHT_MEDIUM :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_FONT_WEIGHT_MEDIUM .. autodata:: geosoft.gxapi.MVIEW_FONT_WEIGHT_BOLD :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_FONT_WEIGHT_BOLD .. autodata:: geosoft.gxapi.MVIEW_FONT_WEIGHT_XBOLD :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_FONT_WEIGHT_XBOLD .. autodata:: geosoft.gxapi.MVIEW_FONT_WEIGHT_XXBOLD :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_FONT_WEIGHT_XXBOLD .. _MVIEW_GRID: MVIEW_GRID constants ----------------------------------------------------------------------- Grid Drawing defines .. autodata:: geosoft.gxapi.MVIEW_GRID_DOT :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_GRID_DOT .. autodata:: geosoft.gxapi.MVIEW_GRID_LINE :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_GRID_LINE .. autodata:: geosoft.gxapi.MVIEW_GRID_CROSS :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_GRID_CROSS .. _MVIEW_GROUP: MVIEW_GROUP constants ----------------------------------------------------------------------- Open Group defines .. autodata:: geosoft.gxapi.MVIEW_GROUP_NEW :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_GROUP_NEW .. autodata:: geosoft.gxapi.MVIEW_GROUP_APPEND :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_GROUP_APPEND .. _MVIEW_GROUP_LIST: MVIEW_GROUP_LIST constants ----------------------------------------------------------------------- What groups to list .. autodata:: geosoft.gxapi.MVIEW_GROUP_LIST_ALL :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_GROUP_LIST_ALL .. autodata:: geosoft.gxapi.MVIEW_GROUP_LIST_MARKED :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_GROUP_LIST_MARKED .. autodata:: geosoft.gxapi.MVIEW_GROUP_LIST_VISIBLE :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_GROUP_LIST_VISIBLE .. _MVIEW_HIDE: MVIEW_HIDE constants ----------------------------------------------------------------------- Boolean hidding defines .. autodata:: geosoft.gxapi.HIDE_ON :annotation: .. autoattribute:: geosoft.gxapi.HIDE_ON .. autodata:: geosoft.gxapi.HIDE_OFF :annotation: .. autoattribute:: geosoft.gxapi.HIDE_OFF .. _MVIEW_IS: MVIEW_IS constants ----------------------------------------------------------------------- Defines for mview types .. autodata:: geosoft.gxapi.MVIEW_IS_AGG :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_IS_AGG .. autodata:: geosoft.gxapi.MVIEW_IS_MOVABLE :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_IS_MOVABLE .. autodata:: geosoft.gxapi.MVIEW_IS_CSYMB :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_IS_CSYMB .. autodata:: geosoft.gxapi.MVIEW_IS_LINKED :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_IS_LINKED .. autodata:: geosoft.gxapi.MVIEW_IS_MADE :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_IS_MADE .. autodata:: geosoft.gxapi.MVIEW_IS_HIDDEN :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_IS_HIDDEN .. autodata:: geosoft.gxapi.MVIEW_IS_CLIPPED :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_IS_CLIPPED .. autodata:: geosoft.gxapi.MVIEW_IS_META :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_IS_META .. autodata:: geosoft.gxapi.MVIEW_IS_VOXD :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_IS_VOXD .. autodata:: geosoft.gxapi.MVIEW_IS_SHADOW_2D_INTERPRETATION :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_IS_SHADOW_2D_INTERPRETATION .. autodata:: geosoft.gxapi.MVIEW_IS_VECTOR3D :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_IS_VECTOR3D .. autodata:: geosoft.gxapi.MVIEW_IS_GENSURF :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_IS_GENSURF .. autodata:: geosoft.gxapi.MVIEW_IS_VOXSURF :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_IS_VOXSURF .. _MVIEW_LABEL_BOUND: MVIEW_LABEL_BOUND constants ----------------------------------------------------------------------- Label Binding Defines .. autodata:: geosoft.gxapi.MVIEW_LABEL_BOUND_NO :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_LABEL_BOUND_NO .. autodata:: geosoft.gxapi.MVIEW_LABEL_BOUND_YES :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_LABEL_BOUND_YES .. _MVIEW_LABEL_JUST: MVIEW_LABEL_JUST constants ----------------------------------------------------------------------- Label Justification Defines .. autodata:: geosoft.gxapi.MVIEW_LABEL_JUST_TOP :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_LABEL_JUST_TOP .. autodata:: geosoft.gxapi.MVIEW_LABEL_JUST_BOTTOM :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_LABEL_JUST_BOTTOM .. autodata:: geosoft.gxapi.MVIEW_LABEL_JUST_LEFT :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_LABEL_JUST_LEFT .. autodata:: geosoft.gxapi.MVIEW_LABEL_JUST_RIGHT :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_LABEL_JUST_RIGHT .. _MVIEW_LABEL_ORIENT: MVIEW_LABEL_ORIENT constants ----------------------------------------------------------------------- Label Orientation Defines .. autodata:: geosoft.gxapi.MVIEW_LABEL_ORIENT_HORIZONTAL :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_LABEL_ORIENT_HORIZONTAL .. autodata:: geosoft.gxapi.MVIEW_LABEL_ORIENT_TOP_RIGHT :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_LABEL_ORIENT_TOP_RIGHT .. autodata:: geosoft.gxapi.MVIEW_LABEL_ORIENT_TOP_LEFT :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_LABEL_ORIENT_TOP_LEFT .. _MVIEW_NAME_LENGTH: MVIEW_NAME_LENGTH constants ----------------------------------------------------------------------- Maximum length for view and group names .. autodata:: geosoft.gxapi.MVIEW_NAME_LENGTH :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_NAME_LENGTH .. _MVIEW_OPEN: MVIEW_OPEN constants ----------------------------------------------------------------------- Open `GXMVIEW <geosoft.gxapi.GXMVIEW>` define .. autodata:: geosoft.gxapi.MVIEW_READ :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_READ .. autodata:: geosoft.gxapi.MVIEW_WRITENEW :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_WRITENEW .. autodata:: geosoft.gxapi.MVIEW_WRITEOLD :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_WRITEOLD .. _MVIEW_PJ: MVIEW_PJ constants ----------------------------------------------------------------------- Projection modes .. autodata:: geosoft.gxapi.MVIEW_PJ_OFF :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_PJ_OFF .. autodata:: geosoft.gxapi.MVIEW_PJ_LOCATION :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_PJ_LOCATION .. autodata:: geosoft.gxapi.MVIEW_PJ_ALL :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_PJ_ALL .. autodata:: geosoft.gxapi.MVIEW_PJ_ON :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_PJ_ON .. _MVIEW_RELOCATE: MVIEW_RELOCATE constants ----------------------------------------------------------------------- Relocation Defines .. autodata:: geosoft.gxapi.MVIEW_RELOCATE_FIT :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_RELOCATE_FIT .. autodata:: geosoft.gxapi.MVIEW_RELOCATE_ASPECT :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_RELOCATE_ASPECT .. autodata:: geosoft.gxapi.MVIEW_RELOCATE_ASPECT_CENTER :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_RELOCATE_ASPECT_CENTER .. _MVIEW_SMOOTH: MVIEW_SMOOTH constants ----------------------------------------------------------------------- Interpolation method to use for drawing line and polygon edges .. autodata:: geosoft.gxapi.MVIEW_SMOOTH_NEAREST :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_SMOOTH_NEAREST .. autodata:: geosoft.gxapi.MVIEW_SMOOTH_CUBIC :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_SMOOTH_CUBIC .. autodata:: geosoft.gxapi.MVIEW_SMOOTH_AKIMA :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_SMOOTH_AKIMA .. _MVIEW_TILE: MVIEW_TILE constants ----------------------------------------------------------------------- Tiling defines .. autodata:: geosoft.gxapi.MVIEW_TILE_RECTANGULAR :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_TILE_RECTANGULAR .. autodata:: geosoft.gxapi.MVIEW_TILE_DIAGONAL :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_TILE_DIAGONAL .. autodata:: geosoft.gxapi.MVIEW_TILE_TRIANGULAR :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_TILE_TRIANGULAR .. autodata:: geosoft.gxapi.MVIEW_TILE_RANDOM :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_TILE_RANDOM .. _MVIEW_UNIT: MVIEW_UNIT constants ----------------------------------------------------------------------- Coordinate systems defines .. autodata:: geosoft.gxapi.MVIEW_UNIT_VIEW :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_UNIT_VIEW .. autodata:: geosoft.gxapi.MVIEW_UNIT_PLOT :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_UNIT_PLOT .. autodata:: geosoft.gxapi.MVIEW_UNIT_MM :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_UNIT_MM .. autodata:: geosoft.gxapi.MVIEW_UNIT_VIEW_UNWARPED :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_UNIT_VIEW_UNWARPED .. _MVIEW_EXTENT_UNIT: MVIEW_EXTENT_UNIT constants ----------------------------------------------------------------------- Types of units for extents (these map to the :ref:`MVIEW_UNIT` defines directly) .. autodata:: geosoft.gxapi.MVIEW_EXTENT_UNIT_VIEW :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_EXTENT_UNIT_VIEW .. autodata:: geosoft.gxapi.MVIEW_EXTENT_UNIT_PLOT :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_EXTENT_UNIT_PLOT .. autodata:: geosoft.gxapi.MVIEW_EXTENT_UNIT_MM :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_EXTENT_UNIT_MM .. autodata:: geosoft.gxapi.MVIEW_EXTENT_UNIT_VIEW_UNWARPED :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_EXTENT_UNIT_VIEW_UNWARPED .. _TEXT_REF: TEXT_REF constants ----------------------------------------------------------------------- Text reference locations .. autodata:: geosoft.gxapi.TEXT_REF_BOTTOM_LEFT :annotation: .. autoattribute:: geosoft.gxapi.TEXT_REF_BOTTOM_LEFT .. autodata:: geosoft.gxapi.TEXT_REF_BOTTOM_CENTER :annotation: .. autoattribute:: geosoft.gxapi.TEXT_REF_BOTTOM_CENTER .. autodata:: geosoft.gxapi.TEXT_REF_BOTTOM_RIGHT :annotation: .. autoattribute:: geosoft.gxapi.TEXT_REF_BOTTOM_RIGHT .. autodata:: geosoft.gxapi.TEXT_REF_MIDDLE_LEFT :annotation: .. autoattribute:: geosoft.gxapi.TEXT_REF_MIDDLE_LEFT .. autodata:: geosoft.gxapi.TEXT_REF_MIDDLE_CENTER :annotation: .. autoattribute:: geosoft.gxapi.TEXT_REF_MIDDLE_CENTER .. autodata:: geosoft.gxapi.TEXT_REF_MIDDLE_RIGHT :annotation: .. autoattribute:: geosoft.gxapi.TEXT_REF_MIDDLE_RIGHT .. autodata:: geosoft.gxapi.TEXT_REF_TOP_LEFT :annotation: .. autoattribute:: geosoft.gxapi.TEXT_REF_TOP_LEFT .. autodata:: geosoft.gxapi.TEXT_REF_TOP_CENTER :annotation: .. autoattribute:: geosoft.gxapi.TEXT_REF_TOP_CENTER .. autodata:: geosoft.gxapi.TEXT_REF_TOP_RIGHT :annotation: .. autoattribute:: geosoft.gxapi.TEXT_REF_TOP_RIGHT .. _MVIEW_3D_RENDER: MVIEW_3D_RENDER constants ----------------------------------------------------------------------- 3D Geometry rendering defines. These flags only affect mixed geometry groups and not the data specific groups (e.g. voxels, vector voxels surfaces etc.). Each of those groups has predefined optimum behaviour and any changes to these flags are ignored. .. autodata:: geosoft.gxapi.MVIEW_3D_RENDER_BACKFACES :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_3D_RENDER_BACKFACES .. autodata:: geosoft.gxapi.MVIEW_3D_DONT_SCALE_GEOMETRY :annotation: .. autoattribute:: geosoft.gxapi.MVIEW_3D_DONT_SCALE_GEOMETRY <file_sep>/geosoft/gxapi/GXEDB.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXDB import GXDB ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXEDB(gxapi_cy.WrapEDB): """ GXEDB class. The `GXEDB <geosoft.gxapi.GXEDB>` class provides access to a database as displayed within Oasis montaj, but does not change data within the database itself. It performs functions such as setting the current line. **Note:** To obtain access to the database itself, it is recommended practice to begin with an `GXEDB <geosoft.gxapi.GXEDB>` object, and use the `lock <geosoft.gxapi.GXEDB.lock>` function to lock the underlying map to prevent external changes. The returned `GXDB <geosoft.gxapi.GXDB>` object (see `GXDB <geosoft.gxapi.GXDB>`) may then be safely used to make changes to the map itself. """ def __init__(self, handle=0): super(GXEDB, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXEDB <geosoft.gxapi.GXEDB>` :returns: A null `GXEDB <geosoft.gxapi.GXEDB>` :rtype: GXEDB """ return GXEDB() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def apply_formula_internal(self, formula): """ Apply a formula to selected cells of the current line. (Do not use this wrapper if you want to apply a formula across multiple lines) Notes: The current selection must be on cell(s) of a channel or on the a channel header. If the selection is on cell(s) of a channel, the formula is applied to only these cells. If the selection is on a channel header, the formula is applied to every cell in the channel. The given formula string must be of the form: "<NameOfCurrentChannel>=<SomeExpression>;" e.g. "x=y+1;" :param formula: Formula ("<NameOfCurrentChannel>=<SomeExpression>;") :type formula: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._apply_formula_internal(formula.encode()) @classmethod def current(cls): """ This method returns the Current Edited Database. :returns: `GXEDB <geosoft.gxapi.GXEDB>` Object :rtype: GXEDB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEDB._current(GXContext._get_tls_geo()) return GXEDB(ret_val) @classmethod def current_no_activate(cls): """ This method returns the Current Edited Database. :returns: `GXEDB <geosoft.gxapi.GXEDB>` Object :rtype: GXEDB .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This function acts just like `current <geosoft.gxapi.GXEDB.current>` except that the document is not activated (brought to foreground) and no guarantee is given about which document is currently active. """ ret_val = gxapi_cy.WrapEDB._current_no_activate(GXContext._get_tls_geo()) return GXEDB(ret_val) @classmethod def current_if_exists(cls): """ This method returns the Current Edited Database. :returns: `GXEDB <geosoft.gxapi.GXEDB>` Object to current edited database. If there is no current database, the user is not prompted for a database, and 0 is returned. :rtype: GXEDB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEDB._current_if_exists(GXContext._get_tls_geo()) return GXEDB(ret_val) def del_line0(self): """ Delete Line 0. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Deletes an empty line 0 from the database. """ self._del_line0() def destroy_view(self, unload_flag): """ Removes the view from the workspace. :param unload_flag: :ref:`EDB_REMOVE` :type unload_flag: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Can only be run in interactive mode. After this call the `GXEDB <geosoft.gxapi.GXEDB>` object will become invalid. If this is the last view on the document and the document has been modified the map will be unloaded and optionally saved depending on the :ref:`EDB_REMOVE` parameter. """ self._destroy_view(unload_flag) def get_cur_chan_symb(self): """ Returns the currently marked channel symbol. :returns: Currently channel symbol. `NULLSYMB <geosoft.gxapi.NULLSYMB>` if the mark is not in a channel. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_cur_chan_symb() return ret_val def get_cur_line_symb(self): """ Get current line symbol. :returns: Currently displayed line symbol. `NULLSYMB <geosoft.gxapi.NULLSYMB>` if no line displayed. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_cur_line_symb() return ret_val def get_displ_fid_range(self, start, num): """ Return the displayed fiducial start index & number of cells :param start: Fiducial start :param num: Number of fiducials :type start: int_ref :type num: int_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ start.value, num.value = self._get_displ_fid_range(start.value, num.value) def get_cur_point(self, x, y, z): """ Returns the coordinates of the currently selected point in the database (first value if range selected) :param x: X coordinate (dummy if no selection or if no X channel defined) :param y: Y coordinate (dummy if no selection or if no Y channel defined) :param z: Z coordinate (dummy if no selection or if no Z channel defined) :type x: float_ref :type y: float_ref :type z: float_ref .. versionadded:: 9.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ x.value, y.value, z.value = self._get_cur_point(x.value, y.value, z.value) def get_fid_range(self, start, incr, num): """ Returns currently displayed fid range :param start: Fiducial start :param incr: Fiducial increment :param num: Number of fiducials :type start: float_ref :type incr: float_ref :type num: int_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ start.value, incr.value, num.value = self._get_fid_range(start.value, incr.value, num.value) def get_next_line_symb(self): """ Returns the next line symbol. :returns: The next line symbol of currently displayed line. `NULLSYMB <geosoft.gxapi.NULLSYMB>` if no line displayed. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_next_line_symb() return ret_val def get_prev_line_symb(self): """ Returns the previous line symbol. :returns: The previous line symbol of currently displayed line. `NULLSYMB <geosoft.gxapi.NULLSYMB>` if no line displayed. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_prev_line_symb() return ret_val def get_profile_x_axis_options(self, rescale_x, lines, interval): """ Get profile X-axis options :param rescale_x: Auto rescale X-axis :param lines: render vertical grid lines :param interval: vertical lines interval :type rescale_x: bool_ref :type lines: bool_ref :type interval: float_ref .. versionadded:: 9.5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ rescale_x.value, lines.value, interval.value = self._get_profile_x_axis_options(rescale_x.value, lines.value, interval.value) def set_profile_x_axis_options(self, rescale_x, lines, interval): """ Set profile X-axis options :param rescale_x: Auto rescale X-axis :param lines: render vertical grid lines :param interval: vertical lines interval :type rescale_x: bool :type lines: bool :type interval: float .. versionadded:: 9.5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_profile_x_axis_options(rescale_x, lines, interval) def get_profile_range_x(self, min_x, max_x, ph_chan_x): """ Get profile X range and X channel :param min_x: Minimum x :param max_x: Maximum x :param ph_chan_x: X axis channel, `NULLSYMB <geosoft.gxapi.NULLSYMB>` if none :type min_x: float_ref :type max_x: float_ref :type ph_chan_x: int_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ min_x.value, max_x.value, ph_chan_x.value = self._get_profile_range_x(min_x.value, max_x.value, ph_chan_x.value) def get_profile_range_y(self, window, prof, min_y, max_y, scl): """ Get profile Y range and display option :param window: Profile window number (0 to `MAX_PROF_WND <geosoft.gxapi.MAX_PROF_WND>`-1, see `profile_open <geosoft.gxapi.GXEDB.profile_open>`) :param prof: Profile number in window (see `window_profiles <geosoft.gxapi.GXEDB.window_profiles>` which returns number of profiles in a window) :param min_y: Minimum y :param max_y: Maximum y :param scl: :ref:`EDB_PROFILE_SCALE` :type window: int :type prof: int :type min_y: float_ref :type max_y: float_ref :type scl: int_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ min_y.value, max_y.value, scl.value = self._get_profile_range_y(window, prof, min_y.value, max_y.value, scl.value) def get_profile_split(self, d1, d2): """ Get profile split for 3 windows. :param d1: Split d1 (profile window 0 height / entire profile window height) :param d2: Split d2 (profile window 1 height / entire profile window height) :type d1: float_ref :type d2: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ d1.value, d2.value = self._get_profile_split(d1.value, d2.value) def get_profile_split5(self, d1, d2, d3, d4): """ Get profile split for 5 windows. :param d1: Split d1 (profile window 0 height / entire profile window height) :param d2: Split d2 (profile window 1 height / entire profile window height) :param d3: Split d3 (profile window 2 height / entire profile window height) :param d4: Split d4 (profile window 3 height / entire profile window height) :type d1: float_ref :type d2: float_ref :type d3: float_ref :type d4: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ d1.value, d2.value, d3.value, d4.value = self._get_profile_split5(d1.value, d2.value, d3.value, d4.value) def get_profile_split_vv(self, vv): """ Get profile window splits. :param vv: Split `GXVV <geosoft.gxapi.GXVV>` (REAL) (profile window heights / entire profile window height) :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The returned `GXVV <geosoft.gxapi.GXVV>` is sized to the maximum number of profiles that can be displayed. If a profile is not currently displayed, its height fraction is 0. The sum of all the fractions returned is equal to 1. The profile splits refers to the relative sizes of the individual profile windows. To get/set the fraction of the total database window devoted to the profiles, use the `set_split <geosoft.gxapi.GXEDB.set_split>` and `get_split <geosoft.gxapi.GXEDB.get_split>` functions. """ self._get_profile_split_vv(vv) def get_profile_vertical_grid_lines(self, grid, interval): """ Get profile grid vertical line info. :param grid: Vertical grid lines? :param interval: Vertical grid interval :type grid: int_ref :type interval: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ grid.value, interval.value = self._get_profile_vertical_grid_lines(grid.value, interval.value) def get_profile_window(self, window, x, y): """ Get profile window size :param window: Profile window number (0 to `MAX_PROF_WND <geosoft.gxapi.MAX_PROF_WND>`-1, see `profile_open <geosoft.gxapi.GXEDB.profile_open>`) :param x: Window x size in pixels :param y: Window y size in pixels :type window: int :type x: int_ref :type y: int_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ x.value, y.value = self._get_profile_window(window, x.value, y.value) def goto_column(self, col): """ Move the channel marker to a specific column. :param col: Channel column number, 0 is first -1 for first column without data :type col: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._goto_column(col) def goto_elem(self, elem): """ Goto an element in the current line. :param elem: Element number :type elem: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._goto_elem(elem) def goto_line(self, line_symb): """ Goto to a line symbol in the editor. :param line_symb: Line symbol to goto to :type line_symb: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._goto_line(line_symb) def histogram(self, st, min, incr, count): """ Create histogram stats. :param st: `GXST <geosoft.gxapi.GXST>` handle to update :param min: Histogram minimum :param incr: Histogram increment :param count: Number of increments :type st: GXST :type min: float :type incr: float :type count: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._histogram(st, min, incr, count) def all_chan_list(self, vv): """ Get a list of the all channels but in the way they are displayed. :param vv: `GXVV <geosoft.gxapi.GXVV>` (INT) in which to place the list. :type vv: GXVV :returns: Number of symbols in the list. Terminates GX if there was an error. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The `GXVV <geosoft.gxapi.GXVV>` elements must be INT. Displayed channel lists are filled in the order the channels appear on the display, left to right. .. seealso:: `disp_chan_list <geosoft.gxapi.GXEDB.disp_chan_list>` """ ret_val = self._all_chan_list(vv) return ret_val def channels(self): """ Returns number of displayed channels :returns: x - number of displayed channels :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._channels() return ret_val def disp_chan_list(self, vv): """ Get a list of the displayed channel symbols. :param vv: `GXVV <geosoft.gxapi.GXVV>` (INT) in which to place the list. :type vv: GXVV :returns: Number of symbols in the list. Terminates GX if there was an error. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The `GXVV <geosoft.gxapi.GXVV>` elements must be INT. Displayed channel lists are filled in the order the channels appear on the display, left to right. .. seealso:: `disp_chan_lst <geosoft.gxapi.GXEDB.disp_chan_lst>` """ ret_val = self._disp_chan_list(vv) return ret_val def disp_chan_lst(self, lst): """ Get a list of the displayed channel names. :param lst: `GXLST <geosoft.gxapi.GXLST>` object :type lst: GXLST :returns: Number of channels in the list. Terminates GX if there was an error. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Displayed channel lists are filled in the order the channels appear on the display, left to right. The channel names will be placed in the "Name" part of the list and the values are set to the symbol handle. .. seealso:: `disp_chan_list <geosoft.gxapi.GXEDB.disp_chan_list>` """ ret_val = self._disp_chan_lst(lst) return ret_val def disp_class_chan_lst(self, lst, class_name): """ Get a list of the displayed channels in a given channel class. :param lst: `GXLST <geosoft.gxapi.GXLST>` object :param class_name: Class name ("" for all) :type lst: GXLST :type class_name: str :returns: Number of channels in the list. Terminates GX if there was an error. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Displayed channel lists are filled in the order the channels appear on the display, left to right. The channel names will be placed in the "Name" part of the list and the values are set to the symbol handle. Examples of channel classes in current use are "MASK" and "ASSAY". (Searches are case tolerant). .. seealso:: `disp_chan_list <geosoft.gxapi.GXEDB.disp_chan_list>` """ ret_val = self._disp_class_chan_lst(lst, class_name.encode()) return ret_val def find_channel_column(self, chan): """ Find the column that contains a channel :param chan: Channel :type chan: str :returns: Column number that contains a specific channel `iDUMMY <geosoft.gxapi.iDUMMY>` of channel not loaded :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._find_channel_column(chan.encode()) return ret_val def find_nearest(self, x, y, z, ipj): """ Find the nearest point on the current line based on X,Y and Z and their projection. :param x: X - Modified with true point :param y: Y - Modified with true point :param z: Z - Modified with true point :param ipj: Projection of X,Y,Z :type x: float_ref :type y: float_ref :type z: float_ref :type ipj: GXIPJ :returns: x - Nearest point -1 - Not available :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val, x.value, y.value, z.value = self._find_nearest(x.value, y.value, z.value, ipj) return ret_val def get_cur_chan(self, str_val): """ Get current channel name. :param str_val: Where to put the name :type str_val: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Returns "" if mark not currently in a channel. """ str_val.value = self._get_cur_chan(str_val.value.encode()) def get_cur_fid_string(self, val): """ This method returns the currently selected value at the current fid (if available). :param val: String returned here :type val: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ val.value = self._get_cur_fid_string(val.value.encode()) def get_cur_line(self, str_val): """ Get current line name. :param str_val: Where to put the name :type str_val: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ str_val.value = self._get_cur_line(str_val.value.encode()) def get_cur_mark(self, start, end, inc): """ Returns the current data mark info. :param start: Start fiducial :param end: End fiducial :param inc: Fiducial increment :type start: float_ref :type end: float_ref :type inc: float_ref :returns: 0 - if data is marked. 1 - if data is not currently marked. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val, start.value, end.value, inc.value = self._get_cur_mark(start.value, end.value, inc.value) return ret_val def get_current_selection(self, db, chan, line, fid): """ Get current selection information. :param db: Database name :param chan: Name of Selected channel :param line: Selected lines buffer :param fid: Fiducial range :type db: str_ref :type chan: str_ref :type line: str_ref :type fid: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Channel Name Empty if no channel Line Name "[All]" if all lines are selected Fid Range "[All]" if all values in all lines are selected "[None]" if no values are selected "10 to 20" giving the range of values. """ db.value, chan.value, line.value, fid.value = self._get_current_selection(db.value.encode(), chan.value.encode(), line.value.encode(), fid.value.encode()) @classmethod def get_databases_lst(cls, lst, path): """ Load the file names of open databases into a `GXLST <geosoft.gxapi.GXLST>`. :param lst: `GXLST <geosoft.gxapi.GXLST>` to load :param path: :ref:`EDB_PATH` :type lst: GXLST :type path: int :returns: The number of documents loaded into the `GXLST <geosoft.gxapi.GXLST>`. The `GXLST <geosoft.gxapi.GXLST>` is cleared first. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEDB._get_databases_lst(GXContext._get_tls_geo(), lst, path) return ret_val def get_mark_chan_vv(self, vv, chan): """ Get channel data for the current mark. :param vv: `GXVV <geosoft.gxapi.GXVV>` in which to place the data. :param chan: Channel symbol to retrieve. :type vv: GXVV :type chan: int :returns: 0 if successful. 1 if failed, or if entire database is marked. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The current "mark" in this case is the start and end fiducials and not the selected channel. You can use this method to retrieve the selected range from any channel, loaded or not. The `GXVV <geosoft.gxapi.GXVV>` will be resized to the length of the data """ ret_val = self._get_mark_chan_vv(vv, chan) return ret_val def get_mark_chan_va(self, vv, chan): """ Get channel data for the current mark. :param vv: `GXVA <geosoft.gxapi.GXVA>` in which to place the data. :param chan: Channel symbol to retrieve. :type vv: GXVA :type chan: int :returns: 0 if successful. 1 if failed, or if entire database is marked. :rtype: int .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The current "mark" in this case is the start and end fiducials and not the selected channel. You can use this method to retrieve the selected range from any channel, loaded or not. The `GXVA <geosoft.gxapi.GXVA>` will be resized to the length of the data """ ret_val = self._get_mark_chan_va(vv, chan) return ret_val def get_name(self, name): """ Get the name of the database object of this `GXEDB <geosoft.gxapi.GXEDB>`. :param name: Name returned :type name: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ name.value = self._get_name(name.value.encode()) def get_profile_parm_int(self, window, prof, parm): """ Get integer profile parameter :param window: Profile window number (0 to `MAX_PROF_WND <geosoft.gxapi.MAX_PROF_WND>`-1, see `profile_open <geosoft.gxapi.GXEDB.profile_open>`) :param prof: Profile number in window (see `get_profile_range_y <geosoft.gxapi.GXEDB.get_profile_range_y>`) :param parm: :ref:`EDB_PROF` :type window: int :type prof: int :type parm: int :returns: Data Value (See notes) :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_profile_parm_int(window, prof, parm) return ret_val def get_window_state(self): """ Retrieve the current state of the database window :returns: :ref:`EDB_WINDOW_STATE` :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_window_state() return ret_val @classmethod def have_current(cls): """ Checks if any database is currently loaded :rtype: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEDB._have_current(GXContext._get_tls_geo()) return ret_val def is_locked(self): """ Checks if the database locked :rtype: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._is_locked() return ret_val @classmethod def loaded(cls, name): """ Checks if a specific database is loaded. :param name: Database name :type name: str :rtype: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEDB._loaded(GXContext._get_tls_geo(), name.encode()) return ret_val def profile_open(self, window): """ Return TRUE or FALSE if profile window is open :param window: Profile window number: 0 is the top window 1 is the middle window 2 is the bottom window :type window: int :returns: TRUE if window is open FALSE if window is closed :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This functions will return FALSE if requested window is not supported in current version of Oasis montaj. """ ret_val = self._profile_open(window) return ret_val def read_only(self): """ Checks if a database is currently opened in a read-only mode. :rtype: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._read_only() return ret_val def get_window_position(self, left, top, right, bottom, state, is_floating): """ Get the map window's position and dock state :param left: Window left position :param top: Window top position :param right: Window right position :param bottom: Window bottom position :param state: Window state :ref:`EDB_WINDOW_STATE` :param is_floating: Docked or floating :ref:`EDB_WINDOW_POSITION` :type left: int_ref :type top: int_ref :type right: int_ref :type bottom: int_ref :type state: int_ref :type is_floating: int_ref .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ left.value, top.value, right.value, bottom.value, state.value, is_floating.value = self._get_window_position(left.value, top.value, right.value, bottom.value, state.value, is_floating.value) def set_window_position(self, left, top, right, bottom, state, is_floating): """ Get the map window's position and dock state :param left: Window left position :param top: Window top position :param right: Window right position :param bottom: Window bottom position :param state: Window state :ref:`EDB_WINDOW_STATE` :param is_floating: Docked or floating :ref:`EDB_WINDOW_POSITION` :type left: int :type top: int :type right: int :type bottom: int :type state: int :type is_floating: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_window_position(left, top, right, bottom, state, is_floating) def show_profile_name(self, state, chan): """ Show a profile in the profile window :param state: Profile window number (0 to `MAX_PROF_WND <geosoft.gxapi.MAX_PROF_WND>`-1, see `profile_open <geosoft.gxapi.GXEDB.profile_open>`) :param chan: Name of the channel :type state: int :type chan: str :returns: Profile number in window (see `get_profile_range_y <geosoft.gxapi.GXEDB.get_profile_range_y>`), -1 for error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the symbol is not loaded, it will be loaded. """ ret_val = self._show_profile_name(state, chan.encode()) return ret_val def profile_shown(self, chan): """ Return index of first profile window in which a profile is shown :param chan: Name of the channel :type chan: str :returns: Index of first profile window in which it is loaded, -1 if not loaded in any profile window :rtype: int .. versionadded:: 9.7 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Returns -1 if channel profile is not loaded. """ ret_val = self._profile_shown(chan.encode()) return ret_val def get_window_y_axis_direction(self, window): """ Get the y-axis direction for a window :param window: Profile window number (0 to `MAX_PROF_WND <geosoft.gxapi.MAX_PROF_WND>`-1, see `profile_open <geosoft.gxapi.GXEDB.profile_open>`) :type window: int :returns: :ref:`EDB_YAXIS_DIRECTION` :rtype: int .. versionadded:: 8.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_window_y_axis_direction(window) return ret_val def window_profiles(self, window): """ Get number of profiles in a window :param window: Profile window number (0 to `MAX_PROF_WND <geosoft.gxapi.MAX_PROF_WND>`-1, see `profile_open <geosoft.gxapi.GXEDB.profile_open>`) :type window: int :returns: Number of profiles in a window :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._window_profiles(window) return ret_val def launch_histogram(self, chan): """ Launch histogram tool on a database. :param chan: First chan name :type chan: str .. versionadded:: 5.0.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `GXCHIMERA.launch_histogram <geosoft.gxapi.GXCHIMERA.launch_histogram>` in chimera.gxh """ self._launch_histogram(chan.encode()) def launch_scatter(self): """ Launch scatter tool on a database. .. versionadded:: 5.0.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The scatter tool uses the following INI parameters SCATTER.STM name of the scatter template,"none" for none SCATTER.STM_NAME name of last template section, "" for none. SCATTER.X name of channel to display in X SCATTER.Y name of channel to display in Y SCATTER.MASK name of channel to use for mask .. seealso:: `GXCHIMERA.launch_scatter <geosoft.gxapi.GXCHIMERA.launch_scatter>` in chimera.gxh """ self._launch_scatter() @classmethod def load(cls, name): """ Loads a list of databases into the workspace :param name: List of databases (';' or '|' delimited) to load. :type name: str :returns: Handle to current edited database, which will be the last database in the list. :rtype: GXEDB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The last listed database will become the current database. Databases may already be loaded. Only the first file in the list may have a directory path. All other files in the list are assumed to be in the same directory as the first file. """ ret_val = gxapi_cy.WrapEDB._load(GXContext._get_tls_geo(), name.encode()) return GXEDB(ret_val) @classmethod def load_no_activate(cls, name): """ Loads documents into the workspace :param name: List of documents (';' or '|' delimited) to load. :type name: str :returns: Handle to current edited document, which will be the last database in the list if multiple files were provided. :rtype: GXEDB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This function acts just like `load <geosoft.gxapi.GXEDB.load>` except that the document(s) is not activated (brought to foreground) and no guarantee is given about which document is currently active. """ ret_val = gxapi_cy.WrapEDB._load_no_activate(GXContext._get_tls_geo(), name.encode()) return GXEDB(ret_val) def load_all_chans(self): """ Load all channels into current database .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._load_all_chans() def load_chan(self, chan): """ Load a channel into current database :param chan: Channel name :type chan: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the channel does not exist, or if channel is already loaded nothing happens. """ self._load_chan(chan.encode()) @classmethod def load_new(cls, name): """ Loads a database into the workspace, flags as new. :param name: Database to load. :type name: str :returns: Handle to the current edited database. :rtype: GXEDB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** See `load <geosoft.gxapi.GXEDB.load>`. This is used for brand new databases, to set an internal flag such that if on closing the user chooses not to save changes, the database is deleted. """ ret_val = gxapi_cy.WrapEDB._load_new(GXContext._get_tls_geo(), name.encode()) return GXEDB(ret_val) @classmethod def load_pass(cls, name, login, password): """ Loads a database into the editor with login and password. :param name: Name of database to load :param login: <NAME> :param password: <PASSWORD> :type name: str :type login: str :type password: str :returns: Handle to current edited database. :rtype: GXEDB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The loaded database will become the current database. If the database is already loaded, it simply becomes the current database. """ ret_val = gxapi_cy.WrapEDB._load_pass(GXContext._get_tls_geo(), name.encode(), login.encode(), password.encode()) return GXEDB(ret_val) @classmethod def load_with_view(cls, name, p2): """ Load an `GXEDB <geosoft.gxapi.GXEDB>` with the view from a current `GXEDB <geosoft.gxapi.GXEDB>`. :param name: Source `GXDB <geosoft.gxapi.GXDB>` name :param p2: `GXEDB <geosoft.gxapi.GXEDB>` to use as the source view :type name: str :type p2: GXEDB :returns: New `GXEDB <geosoft.gxapi.GXEDB>` handle. :rtype: GXEDB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Can only be run in interactive mode. Is used by dbsubset to create a new database with the same view as previously. """ ret_val = gxapi_cy.WrapEDB._load_with_view(GXContext._get_tls_geo(), name.encode(), p2) return GXEDB(ret_val) def lock(self): """ This method locks the Edited Database. :returns: Handle to database associated with edited database. :rtype: GXDB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._lock() return GXDB(ret_val) def make_current(self): """ Makes this `GXEDB <geosoft.gxapi.GXEDB>` object the current active object to the user. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._make_current() def remove_profile(self, window, prof): """ Remove a profile from the profile window :param window: Profile window number (0 to `MAX_PROF_WND <geosoft.gxapi.MAX_PROF_WND>`-1, see `profile_open <geosoft.gxapi.GXEDB.profile_open>`) :param prof: Profile number in window (see `get_profile_range_y <geosoft.gxapi.GXEDB.get_profile_range_y>`) :type window: int :type prof: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._remove_profile(window, prof) def get_cur_fid(self): """ This method returns the currently selected fiducial if the user is selecting a fiducial. If not, it returns a dummy. :returns: x - Fiducial DUMMY - No Selected Fiducial :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_cur_fid() return ret_val def get_profile_parm_double(self, window, prof, parm): """ Get real profile parameter :param window: Profile window number (0 to `MAX_PROF_WND <geosoft.gxapi.MAX_PROF_WND>`-1, see `profile_open <geosoft.gxapi.GXEDB.profile_open>`) :param prof: Profile number in window (see `get_profile_range_y <geosoft.gxapi.GXEDB.get_profile_range_y>`) :param parm: :ref:`EDB_PROF` :type window: int :type prof: int :type parm: int :returns: Real profile parameter :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_profile_parm_double(window, prof, parm) return ret_val def get_split(self): """ Get split ratio between spreadsheet and profile sections. :returns: d = (spreadsheet window height/ (spreadsheet window height + entire profile window height)) :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_split() return ret_val def run_channel_maker(self, chan): """ Run the maker for a single channel. :param chan: Channel name :type chan: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Skips channels without makers; will not return an error if the channel does not exist. """ self._run_channel_maker(chan.encode()) def run_channel_makers(self): """ Recreate channels with makers. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Skips channels without makers. """ self._run_channel_makers() def set_cur_line(self, line): """ Set the current line name. :param line: Line name :type line: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_cur_line(line.encode()) def set_cur_line_no_message(self, str_val): """ Set Line but do not send a message. :param str_val: Line name :type str_val: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_cur_line_no_message(str_val.encode()) def set_cur_mark(self, start, end): """ Set the current mark. :param start: Start fiducial :param end: End fiducial :type start: float :type end: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_cur_mark(start, end) def set_profile_parm_i(self, window, prof, parm, value): """ Set integer profile parameter :param window: Profile window number (0 to `MAX_PROF_WND <geosoft.gxapi.MAX_PROF_WND>`-1, see `profile_open <geosoft.gxapi.GXEDB.profile_open>`) :param prof: Profile number in window (see `get_profile_range_y <geosoft.gxapi.GXEDB.get_profile_range_y>`) :param parm: :ref:`EDB_PROF` :param value: Setting :type window: int :type prof: int :type parm: int :type value: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_profile_parm_i(window, prof, parm, value) def set_profile_parm_r(self, window, prof, parm, value): """ Set real profile parameter :param window: Profile window number (0 to `MAX_PROF_WND <geosoft.gxapi.MAX_PROF_WND>`-1, see `profile_open <geosoft.gxapi.GXEDB.profile_open>`) :param prof: Profile number in window (see `get_profile_range_y <geosoft.gxapi.GXEDB.get_profile_range_y>`) :param parm: :ref:`EDB_PROF` :param value: Setting :type window: int :type prof: int :type parm: int :type value: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_profile_parm_r(window, prof, parm, value) def set_profile_range_x(self, min_x, max_x, x_ch): """ Set profile X range and X channel :param min_x: Minimum x, `rDUMMY <geosoft.gxapi.rDUMMY>` for data minimum :param max_x: Maximum x, `rDUMMY <geosoft.gxapi.rDUMMY>` for data maximum :param x_ch: X axis channel, `NULLSYMB <geosoft.gxapi.NULLSYMB>` to use fids :type min_x: float :type max_x: float :type x_ch: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_profile_range_x(min_x, max_x, x_ch) def set_profile_range_y(self, window, prof, min_y, max_y, scl): """ Set profile Y range and display option :param window: Profile window number (0 to `MAX_PROF_WND <geosoft.gxapi.MAX_PROF_WND>`-1, see `profile_open <geosoft.gxapi.GXEDB.profile_open>`) :param prof: Profile number in window (see `get_profile_range_y <geosoft.gxapi.GXEDB.get_profile_range_y>`) :param min_y: Minimum y :param max_y: Maximum y :param scl: :ref:`EDB_PROFILE_SCALE` :type window: int :type prof: int :type min_y: float :type max_y: float :type scl: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If channel is not loaded or displayed, it will loaded and/or displayed. """ self._set_profile_range_y(window, prof, min_y, max_y, scl) def profile_rescale_all(self, window): """ Rescale all profiles in a selected window in both X and Y, based on current scaling selections :param window: Profile window number (0 to `MAX_PROF_WND <geosoft.gxapi.MAX_PROF_WND>`-1, see `profile_open <geosoft.gxapi.GXEDB.profile_open>`) :type window: int .. versionadded:: 9.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._profile_rescale_all(window) def set_profile_split(self, d1, d2): """ Set profile split for 3 windows. :param d1: Split d1 (profile window 0 height / entire profile window height) :param d2: Split d2 (profile window 1 height / entire profile window height) :type d1: float :type d2: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_profile_split(d1, d2) def set_profile_split5(self, d1, d2, d3, d4): """ Set profile split for 5 windows. :param d1: Split d1 (profile window 0 height / entire profile window height) :param d2: Split d2 (profile window 1 height / entire profile window height) :param d3: Split d3 (profile window 2 height / entire profile window height) :param d4: Split d4 (profile window 3 height / entire profile window height) :type d1: float :type d2: float :type d3: float :type d4: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_profile_split5(d1, d2, d3, d4) def set_profile_split_vv(self, vv): """ Set profile splits :param vv: Split `GXVV <geosoft.gxapi.GXVV>` (REAL) (relative sizes of each profile window) :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The input `GXVV <geosoft.gxapi.GXVV>` values are the fractional heights for each profile window. Values are summed, and normalized (so you can enter "1,1,1", with a `GXVV <geosoft.gxapi.GXVV>` of length 3, if you want 3 equal profile windows). `GXVV <geosoft.gxapi.GXVV>` values beyond the maximum number of displayable profiles (`MAX_PROF_WND <geosoft.gxapi.MAX_PROF_WND>`) are ignored. """ self._set_profile_split_vv(vv) def set_split(self, d): """ Set split ratio between spreadsheet and profile sections. :param d: Split d (0.0 <= d <= 1.0). :type d: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** d = (spreadsheet window height/ (spreadsheet window height + entire profile window height)) """ self._set_split(d) def set_window_state(self, state): """ Changes the state of the database window :param state: :ref:`EDB_WINDOW_STATE` :type state: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_window_state(state) def show_profile(self, window, symb): """ Show a profile in the profile window :param window: Profile window number (0 to `MAX_PROF_WND <geosoft.gxapi.MAX_PROF_WND>`-1, -1 to plot to the currently selected profile window. See `profile_open <geosoft.gxapi.GXEDB.profile_open>`) :param symb: Channel symbol :type window: int :type symb: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the symbol is not loaded, it will be loaded. """ self._show_profile(window, symb) def statistics(self, st): """ Add all currently selected data to the `GXST <geosoft.gxapi.GXST>`. :param st: `GXST <geosoft.gxapi.GXST>` handle to update :type st: GXST .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Use `histogram <geosoft.gxapi.GXEDB.histogram>` to get median or histogram. """ self._statistics(st) @classmethod def un_load(cls, name): """ Unloads an edited database. :param name: Name of database to unload :type name: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the database is not loaded, nothing happens. Same as `un_load_verify <geosoft.gxapi.GXEDB.un_load_verify>` with FALSE to prompt save. """ gxapi_cy.WrapEDB._un_load(GXContext._get_tls_geo(), name.encode()) @classmethod def un_load_all(cls): """ Unloads all opened databases .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapEDB._un_load_all(GXContext._get_tls_geo()) def un_load_all_chans(self): """ Unload all channels into current database .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._un_load_all_chans() def un_load_chan(self, chan): """ Unload a channel into current database :param chan: Channel name :type chan: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the channel does not exist, or if channel is already loaded nothing happens. """ self._un_load_chan(chan.encode()) @classmethod def un_load_discard(cls, name): """ Unloads a database in the workspace, discards changes. :param name: Name of database to unload :type name: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the database is not loaded, nothing happens. """ gxapi_cy.WrapEDB._un_load_discard(GXContext._get_tls_geo(), name.encode()) @classmethod def un_load_verify(cls, name, prompt): """ Unloads an edited database, optional prompt to save. :param name: Name of database to unload :param prompt: :ref:`EDB_UNLOAD` :type name: str :type prompt: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the database is not loaded, nothing happens. The user can be prompted to save before unloading. If `EDB_UNLOAD_NO_PROMPT <geosoft.gxapi.EDB_UNLOAD_NO_PROMPT>`, data is always saved. EDB_UNLOAD_MULTIPROMPT is now obsolete and is equivalent to `EDB_UNLOAD_SINGLE_PROMPT <geosoft.gxapi.EDB_UNLOAD_SINGLE_PROMPT>`. """ gxapi_cy.WrapEDB._un_load_verify(GXContext._get_tls_geo(), name.encode(), prompt) def un_lock(self): """ This method unlocks the Edited Database. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._un_lock() # External Window @classmethod def load_control(cls, db_file, window): """ Version of `load <geosoft.gxapi.GXEDB.load>` that can be used to load a database via subclassing into a Windows control. :param db_file: Database filename :param window: Window handle to receive document :type db_file: str :type window: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapEDB._load_control(GXContext._get_tls_geo(), db_file.encode(), window) @classmethod def load_new_control(cls, db_file, window): """ Version of `load_new <geosoft.gxapi.GXEDB.load_new>` that can be used to load a database via subclassing into a Windows control. :param db_file: Database filename :param window: Window handle to receive document :type db_file: str :type window: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapEDB._load_new_control(GXContext._get_tls_geo(), db_file.encode(), window) @classmethod def load_pass_control(cls, db_file, user, password, window): """ Version of `load_pass <geosoft.gxapi.GXEDB.load_pass>` that can be used to load a database via subclassing into a Windows control. :param db_file: Database filename :param user: Login name :param password: <PASSWORD> :param window: Window handle to receive document :type db_file: str :type user: str :type password: str :type window: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapEDB._load_pass_control(GXContext._get_tls_geo(), db_file.encode(), user.encode(), password.encode(), window) @classmethod def load_with_view_control(cls, db_file, edb, window): """ Version of `load_with_view <geosoft.gxapi.GXEDB.load_with_view>` that can be used to load a database via subclassing into a Windows control. :param db_file: Database filename :param edb: `GXEDB <geosoft.gxapi.GXEDB>` handle to use as the source view :param window: Window handle to receive document :type db_file: str :type edb: GXEDB :type window: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapEDB._load_with_view_control(GXContext._get_tls_geo(), db_file.encode(), edb, window) # Obsolete def load_channel_after(self, chan_to_move, chan_to_move_after): """ Loads the channel after specified channel :param chan_to_move: Channel name :param chan_to_move_after: Channel name :type chan_to_move: str :type chan_to_move_after: str .. versionadded:: 2023.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the channel to locate does not exist it will load the channel in the default location. If the channel to be loaded is already loaded, no changes will be made. """ self._load_channel_after(chan_to_move.encode(), chan_to_move_after.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXLL2.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXLL2(gxapi_cy.WrapLL2): """ GXLL2 class. local datum lookup creator ll2 methods are used to create `GXLL2 <geosoft.gxapi.GXLL2>` objects. `GXLL2 <geosoft.gxapi.GXLL2>` objects contain latitude, longitude correction lookup tables to convert between datums. """ def __init__(self, handle=0): super(GXLL2, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXLL2 <geosoft.gxapi.GXLL2>` :returns: A null `GXLL2 <geosoft.gxapi.GXLL2>` :rtype: GXLL2 """ return GXLL2() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, lon0, lat0, lon, lat, nlon, nlat, in_ipj, out_ipj): """ Create an empty `GXLL2 <geosoft.gxapi.GXLL2>` table to be filled :param lon0: Longitude origin :param lat0: Latitude origin :param lon: Longitude increment :param lat: Latitude increment :param nlon: # longitudes :param nlat: # latitudes :param in_ipj: Input projection :param out_ipj: Output projection :type lon0: float :type lat0: float :type lon: float :type lat: float :type nlon: int :type nlat: int :type in_ipj: GXIPJ :type out_ipj: GXIPJ :returns: `GXLL2 <geosoft.gxapi.GXLL2>` Object :rtype: GXLL2 .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ .. seealso:: `destroy <geosoft.gxapi.GXLL2.destroy>`, `set_row <geosoft.gxapi.GXLL2.set_row>`, `save <geosoft.gxapi.GXLL2.save>` """ ret_val = gxapi_cy.WrapLL2._create(GXContext._get_tls_geo(), lon0, lat0, lon, lat, nlon, nlat, in_ipj, out_ipj) return GXLL2(ret_val) def save(self, name): """ Save an `GXLL2 <geosoft.gxapi.GXLL2>` to a named resource :param name: Named resource :type name: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The named resource is the name of the datum transform define inside square brackets in the datum transform name in the datumtrf table. """ self._save(name.encode()) def set_row(self, row, lon_vv, lat_vv): """ Define a row of the `GXLL2 <geosoft.gxapi.GXLL2>` :param row: The row to set :param lon_vv: Longitude corrections :param lat_vv: Latitude corrections :type row: int :type lon_vv: GXVV :type lat_vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The correction data is in degrees, added to the input datum to product output datum results. The `GXVV <geosoft.gxapi.GXVV>` lengths must be equal to #longitudes defined by `create <geosoft.gxapi.GXLL2.create>`. """ self._set_row(row, lon_vv, lat_vv) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXHGD.rst .. _GXHGD: GXHGD class ================================== .. autoclass:: geosoft.gxapi.GXHGD :members: <file_sep>/geosoft/gxpy/group.py """ A Geosoft View (:class:`geosoft.gxpy.view.View` or :class:`geosoft.gxpy.view.View_3d`) contains graphical elements as `Group` instances. Groups are named and are available to a user in a Geosoft viewer, which allows groups to be turned on or off, modify the transparency, or be deleted. 2D views can only accept 2D groups, while a 3D view can accept both 2D and 3D groups. When a 2D group is placed in a 3D view, the group is placed on a the active plane inside the 3D view :Classes: :`Group`: base class for named rendering groups in 2D and 3D views. :`Draw`: 2D drawing group, handles 2D drawing to a view or plane in a 3D view :`Draw_3d`: 3D grawing group for 3D objects placed in a 3d view :`Color_symbols_group`: group for 2D symbols rendered based on data values :`Aggregate_group`: group that contains a :class:`geosoft.gxpy.agg.Aggregate_image` instance :`Color`: colour definition :`Color_map`: maps values to colors :`Pen`: pen definition, includes line colour, thickness and pattern, and fill. :`Text_def`: defined text characteristics :`VoxDisplayGroup`: a 'geosoft.gxpy.vox.VoxDisplay` in a `geosoft.gxpy.view.View_3d` :Constants: :GROUP_NAME_SIZE: `geosoft.gxpy.view.VIEW_NAME_SIZE` :NEW: `geosoft.gxapi.MVIEW_GROUP_NEW` :APPEND: `geosoft.gxapi.MVIEW_GROUP_APPEND` :READ_ONLY: max(NEW, APPEND) + 1 :REPLACE: READ_ONLY + 1 :SMOOTH_NONE: `geosoft.gxapi.MVIEW_SMOOTH_NEAREST` :SMOOTH_CUBIC: `geosoft.gxapi.MVIEW_SMOOTH_CUBIC` :SMOOTH_AKIMA: `geosoft.gxapi.MVIEW_SMOOTH_AKIMA` :TILE_RECTANGULAR: `geosoft.gxapi.MVIEW_TILE_RECTANGULAR` :TILE_DIAGONAL: `geosoft.gxapi.MVIEW_TILE_DIAGONAL` :TILE_TRIANGULAR: `geosoft.gxapi.MVIEW_TILE_TRIANGULAR` :TILE_RANDOM: `geosoft.gxapi.MVIEW_TILE_RANDOM` :UNIT_VIEW: 0 :UNIT_MAP: 2 :UNIT_VIEW_UNWARPED: 3 :GRATICULE_DOT: 0 :GRATICULE_LINE: 1 :GRATICULE_CROSS: 2 :LINE_STYLE_SOLID: 1 :LINE_STYLE_LONG: 2 :LINE_STYLE_DOTTED: 3 :LINE_STYLE_SHORT: 4 :LINE_STYLE_LONG_SHORT_LONG: 5 :LINE_STYLE_LONG_DOT_LONG: 6 :SYMBOL_NONE: 0 :SYMBOL_DOT: 1 :SYMBOL_PLUS: 2 :SYMBOL_X: 3 :SYMBOL_BOX: 4 :SYMBOL_TRIANGLE: 5 :SYMBOL_INVERTED_TRIANGLE: 6 :SYMBOL_HEXAGON: 7 :SYMBOL_SMALL_BOX: 8 :SYMBOL_SMALL_DIAMOND: 9 :SYMBOL_CIRCLE: 20 :SYMBOL_3D_SPHERE: 0 :SYMBOL_3D_CUBE: 1 :SYMBOL_3D_CYLINDER: 2 :SYMBOL_3D_CONE: 3 :FONT_WEIGHT_ULTRALIGHT: 1 :FONT_WEIGHT_LIGHT: 2 :FONT_WEIGHT_MEDIUM: 3 :FONT_WEIGHT_BOLD: 4 :FONT_WEIGHT_XBOLD: 5 :FONT_WEIGHT_XXBOLD: 6 :CMODEL_RGB: 0 :CMODEL_CMY: 1 :CMODEL_HSV: 2 :C_BLACK: 67108863 :C_RED: 33554687 :C_GREEN: 33619712 :C_BLUE: 50266112 :C_CYAN: 50331903 :C_MAGENTA: 50396928 :C_YELLOW: 67043328 :C_GREY: 41975936 :C_LT_RED: 54542336 :C_LT_GREEN: 54526016 :C_LT_BLUE: 50348096 :C_LT_CYAN: 50331712 :C_LT_MAGENTA: 50348032 :C_LT_YELLOW: 54525952 :C_LT_GREY: 54542400 :C_GREY10: 51910680 :C_GREY25: 54542400 :C_GREY50: 41975936 :C_WHITE: 50331648 :C_TRANSPARENT: 0 :REF_BOTTOM_LEFT: 0 :REF_BOTTOM_CENTER: 1 :REF_BOTTOM_RIGHT: 2 :REF_CENTER_LEFT: 3 :REF_CENTER: 4 :REF_CENTER_RIGHT: 5 :REF_TOP_LEFT: 6 :REF_TOP_CENTER: 7 :REF_TOP_RIGHT: 8 :GROUP_ALL: 0 :GROUP_MARKED: 1 :GROUP_VISIBLE: 2 :GROUP_AGG: 3 :GROUP_CSYMB: 4 :GROUP_VOXD: 5 :LOCATE_FIT: `geosoft.gxapi.MVIEW_RELOCATE_FIT` :LOCATE_FIT_KEEP_ASPECT: `geosoft.gxapi.MVIEW_RELOCATE_ASPECT` :LOCATE_CENTER: `geosoft.gxapi.MVIEW_RELOCATE_ASPECT_CENTER` :COLOR_BAR_RIGHT: 0 :COLOR_BAR_LEFT: 1 :COLOR_BAR_BOTTOM: 2 :COLOR_BAR_TOP: 3 :COLOR_BAR_ANNOTATE_RIGHT: 1 :COLOR_BAR_ANNOTATE_LEFT: -1 :COLOR_BAR_ANNOTATE_TOP: 1 :COLOR_BAR_ANNOTATE_BOTTOM: -1 :CYLINDER_OPEN: 0 :CYLINDER_CLOSE_START: 1 :CYLINDER_CLOSE_END: 2 :CYLINDER_CLOSE_ALL: 3 :POINT_STYLE_DOT: 0 :POINT_STYLE_SPHERE: 1 :LINE3D_STYLE_LINE: 0 :LINE3D_STYLE_TUBE: 1 :LINE3D_STYLE_TUBE_JOINED: 2 :SURFACE_FLAT: `geosoft.gxapi.MVIEW_DRAWOBJ3D_MODE_FLAT` :SURFACE_SMOOTH: `geosoft.gxapi.MVIEW_DRAWOBJ3D_MODE_SMOOTH` .. note:: Regression tests provide usage examples: `group drawing tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_group.py>`_ .. seealso:: :mod:`geosoft.gxpy.view`, :mod:`geosoft.gxpy.map` :class:`geosoft.gxapi.GXMVIEW`, :class:`geosoft.gxapi.GXMVU` """ from functools import wraps import threading import os import numpy as np import geosoft import geosoft.gxapi as gxapi from . import gx from . import vv as gxvv from . import geometry as gxgm from . import coordinate_system as gxcs from . import utility as gxu from . import view as gxv from . import agg as gxagg from . import metadata as gxmeta from . import vox_display as gxvoxd from . import spatialdata as gxspd __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) MAX_TRANSPARENT = 4 class GroupException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.group`. .. versionadded:: 9.2 """ pass GROUP_NAME_SIZE = gxv.VIEW_NAME_SIZE NEW = gxapi.MVIEW_GROUP_NEW APPEND = gxapi.MVIEW_GROUP_APPEND READ_ONLY = max(NEW, APPEND) + 1 REPLACE = READ_ONLY + 1 SMOOTH_NONE = gxapi.MVIEW_SMOOTH_NEAREST SMOOTH_CUBIC = gxapi.MVIEW_SMOOTH_CUBIC SMOOTH_AKIMA = gxapi.MVIEW_SMOOTH_AKIMA TILE_RECTANGULAR = gxapi.MVIEW_TILE_RECTANGULAR TILE_DIAGONAL = gxapi.MVIEW_TILE_DIAGONAL TILE_TRIANGULAR = gxapi.MVIEW_TILE_TRIANGULAR TILE_RANDOM = gxapi.MVIEW_TILE_RANDOM UNIT_VIEW = 0 UNIT_MAP = 2 UNIT_VIEW_UNWARPED = 3 GRATICULE_DOT = 0 GRATICULE_LINE = 1 GRATICULE_CROSS = 2 LINE_STYLE_SOLID = 1 LINE_STYLE_LONG = 2 LINE_STYLE_DOTTED = 3 LINE_STYLE_SHORT = 4 LINE_STYLE_LONG_SHORT_LONG = 5 LINE_STYLE_LONG_DOT_LONG = 6 SYMBOL_NONE = 0 SYMBOL_DOT = 1 SYMBOL_PLUS = 2 SYMBOL_X = 3 SYMBOL_BOX = 4 SYMBOL_TRIANGLE = 5 SYMBOL_INVERTED_TRIANGLE = 6 SYMBOL_HEXAGON = 7 SYMBOL_SMALL_BOX = 8 SYMBOL_SMALL_DIAMOND = 9 SYMBOL_CIRCLE = 20 SYMBOL_3D_SPHERE = 0 SYMBOL_3D_CUBE = 1 SYMBOL_3D_CYLINDER = 2 SYMBOL_3D_CONE = 3 _weight_factor = (1.0 / 48.0, 1.0 / 24.0, 1.0 / 16.0, 1.0 / 12.0, 0.145, 1.0 / 4.0) FONT_WEIGHT_ULTRALIGHT = 1 FONT_WEIGHT_LIGHT = 2 FONT_WEIGHT_MEDIUM = 3 FONT_WEIGHT_BOLD = 4 FONT_WEIGHT_XBOLD = 5 FONT_WEIGHT_XXBOLD = 6 CMODEL_RGB = 0 CMODEL_CMY = 1 CMODEL_HSV = 2 C_BLACK = 67108863 C_RED = 33554687 C_GREEN = 33619712 C_BLUE = 50266112 C_CYAN = 50331903 C_MAGENTA = 50396928 C_YELLOW = 67043328 C_GREY = 41975936 C_LT_RED = 54542336 C_LT_GREEN = 54526016 C_LT_BLUE = 50348096 C_LT_CYAN = 50331712 C_LT_MAGENTA = 50348032 C_LT_YELLOW = 54525952 C_LT_GREY = 54542400 C_GREY10 = 51910680 C_GREY25 = 54542400 C_GREY50 = 41975936 C_WHITE = 50331648 C_TRANSPARENT = 0 REF_BOTTOM_LEFT = 0 REF_BOTTOM_CENTER = 1 REF_BOTTOM_RIGHT = 2 REF_CENTER_LEFT = 3 REF_CENTER = 4 REF_CENTER_RIGHT = 5 REF_TOP_LEFT = 6 REF_TOP_CENTER = 7 REF_TOP_RIGHT = 8 GROUP_ALL = 0 GROUP_MARKED = 1 GROUP_VISIBLE = 2 GROUP_AGG = 3 GROUP_CSYMB = 4 GROUP_VOXD = 5 LOCATE_FIT = gxapi.MVIEW_RELOCATE_FIT LOCATE_FIT_KEEP_ASPECT = gxapi.MVIEW_RELOCATE_ASPECT LOCATE_CENTER = gxapi.MVIEW_RELOCATE_ASPECT_CENTER COLOR_BAR_RIGHT = 0 COLOR_BAR_LEFT = 1 COLOR_BAR_BOTTOM = 2 COLOR_BAR_TOP = 3 COLOR_BAR_ANNOTATE_RIGHT = 1 COLOR_BAR_ANNOTATE_LEFT = -1 COLOR_BAR_ANNOTATE_TOP = 1 COLOR_BAR_ANNOTATE_BOTTOM = -1 CYLINDER_OPEN = 0 CYLINDER_CLOSE_START = 1 CYLINDER_CLOSE_END = 2 CYLINDER_CLOSE_ALL = 3 POINT_STYLE_DOT = 0 POINT_STYLE_SPHERE = 1 LINE3D_STYLE_LINE = 0 LINE3D_STYLE_TUBE = 1 LINE3D_STYLE_TUBE_JOINED = 2 SURFACE_FLAT = gxapi.MVIEW_DRAWOBJ3D_MODE_FLAT SURFACE_SMOOTH = gxapi.MVIEW_DRAWOBJ3D_MODE_SMOOTH _uom_attr = '/geosoft/data/unit_of_measure' def face_normals_np(faces, verticies): """ Return normals of the verticies based on tringular faces, assuming right-hand winding of vertex for each face. :param faces: faces as array of triangle indexes into verticies, shaped (-1, 3) :param verticies: verticies as array of (x, y, z) shaped (-1, 3) :return: face normals shaped (-1, 3) The normal of a zero area face will be np.nan .. versionadded:: 9.3.1 """ tris = verticies[faces] n = np.cross(tris[::, 1] - tris[::, 0], tris[::, 2] - tris[::, 1]) return gxu.vector_normalize(n) def vertex_normals_np(faces, verticies, normal_area=True): """ Return normals of the verticies based on tringular faces, assuming right-hand winding of vertex for each face. :param faces: faces as array of triangle indexes into verticies, shaped (-1, 3) :param verticies: verticies as array of (x, y, z) shaped (-1, 3) :param normal_area: True to weight normals by the area of the connected faces. :return: vertex normals shaped (-1, 3) .. versionadded:: 9.3.1 """ n = face_normals_np(faces, verticies) if not normal_area: n = gxu.vector_normalize(n) normals = np.zeros(verticies.shape, dtype=np.float64) normals[faces[:, 0]] += n normals[faces[:, 1]] += n normals[faces[:, 2]] += n return gxu.vector_normalize(normals) def vertex_normals_vv(faces, verticies, normal_area=True): """ Return normals of the verticies based on tringular faces, assuming right-hand winding of vertex for each face. :param faces: (i1, i2, i3) `geosoft.gxpy.vv.GXvv` faces as array of triangle indexes into verticies :param verticies: (vx, vy, vz) `geosoft.gxpy.vv.GXvv` verticies :param normal_area: True to weight normals by the area of the connected faces. :return: (nx, ny, nz) `geosoft.gxpy.vv.GXvv` normals .. versionadded:: 9.3.1 """ faces = gxvv.np_from_vvset(faces) verticies = gxvv.np_from_vvset(verticies) n = vertex_normals_np(faces, verticies, normal_area=normal_area) return gxvv.GXvv(n[:, 0]), gxvv.GXvv(n[:, 1]), gxvv.GXvv(n[:, 2]) def color_from_string(cstr): """ Return a Geosoft color number from a color string. :param cstr: color string (see below) :returns: color Colour strings may be "R", "G", "B", "C", "M", "Y", "H", "S", "V", or "K" or a combination of these characters, each followed by up to three digits specifying a number between 0 and 255. An empty string will produce C_ANY_NONE. You must stay in the same color model, RGB, CMY, HSV or K. For example "R", "R127G22", "H255S127V32" Characters are not case sensitive. .. versionadded:: 9.3 """ return gxapi.GXMVIEW.color(str(cstr)) def edge_reference(area, reference): """ Location of a reference point of an area. :param area: :class:`Point2` instance, or (x0, y0, x1, y1) :param reference: reference point relative to the clip limits of the view to which reference location. The points are: :: 6 7 8 top left, center, right 3 4 5 middle left, center, right 0 1 2 bottom left, center, right :returns: Point desired reference location as a Point .. versionadded:: 9.2 """ if not isinstance(area, gxgm.Point2): area = gxgm.Point2(area) centroid = area.centroid half_dim = gxgm.Point(area.dimension) * 0.5 xoff = yoff = 0.0 if reference in (0, 1, 2): yoff = -half_dim.y elif reference in (6, 7, 8): yoff = half_dim.y if reference in (0, 3, 6): xoff = -half_dim.x elif reference in (2, 5, 8): xoff = half_dim.x return centroid + gxgm.Point((xoff, yoff)) class Group: """ Geosoft group class. :parameters: :view: gxpy.View :name: group name, default is "_". :plane: plane number, or plane name if drawing to a 3D view. Default is plane number 0. :view_lock: True to lock the view for a single-stream drawing group. Default is False. :unit_of_measure: unit of measurement for data in this group, default is '' :group_3d: True for a 3D drawing group, default assumes a 2D drawing group to a plane. :mode: `APPEND` (default), `NEW` or `READ_ONLY` :Properties: :view: the :class:`geosoft.gxpy.view.View` instance that contains this group :name: the name of the group :unit_of_measure: the unit of measurement (uom) for this data in this group :name_uom: uom decorated group name as it appears in a view :extent: extent of the group in view units :extent_map_cm: extent of the group in map cm :drawing_coordinate_system: the coordinate system of drawing coordinates. Setting to None will reset drawing coordinates to the view cs. If `drawing_coordinate_system` is set to some other cs the drawing coordinates will be transformed into the view cs. .. versionadded:: 9.2 .. versionchanged:: 9.3 added support for `unit_of_measure` .. versionchanged:: 9.3.1 added mode=REPLACE and changed mode=NEW to always create a new unique group. """ def __enter__(self): return self def __exit__(self, xtype, xvalue, xtraceback): self.__del__() def __del__(self): if hasattr(self, '_close'): self._close() def _close(self): if hasattr(self, '_open'): if self._open: try: self._drawing_coordinate_system = None self._pen = None self._text_def = None # write metadata if self._new_meta: bf = gxapi.GXBF.create("", gxapi.BF_READWRITE_NEW) try: self._meta.gxmeta.serial(bf) bf.seek(0, gxapi.BF_SEEK_START) self.view.gxview.write_group_storage(self.number, "Geosoft_META", bf) finally: del bf finally: self._view.lock = False self._view = None self._open = False self._meta = None self._new_meta = False def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): if self.view.is_3d and not self.group_3d: return '{}/{}/{}'.format(self.name, self.view.current_3d_drawing_plane, self.view.name) return '{}/{}'.format(self.name, self.view.name) def __init__(self, view, name='_', plane=None, view_lock=False, mode=APPEND, unit_of_measure='', group_3d=False): if (len(name) == 0) or (name == view.name): name = name + '_' _lock = threading.Lock() _lock.acquire() try: if view.lock: raise GroupException(_t('This view is locked by group {}.'.format(view.lock))) if view_lock: view.lock = name finally: _lock.release() self.group_3d = False if view.is_3d: self.group_3d = group_3d if not group_3d: # setup a 2D drawing plane for this 2D group if plane is None: if view.current_3d_drawing_plane: plane = view.current_3d_drawing_plane else: plane = 'Plane' view.current_3d_drawing_plane = plane self._view = view self._name = name self._mode = mode self._new_meta = False self._meta = None if mode == REPLACE: if self.view.gxview.exist_group(name): self.view.delete_group(name) elif mode == NEW: # if the group exists, find a new unique group name if self.view.gxview.exist_group(name): self._name = gxu.unique_name(name, self.view.gxview.exist_group, separator='_') elif self.view.gxview.exist_group(self.name): group_number = self.view.gxview.find_group(self.name) if self.view.gxview.group_storage_exists(group_number, "Geosoft_META"): bf = self.view.gxview.read_group_storage(group_number, "Geosoft_META") if bf.size(): try: self._meta = gxmeta.Metadata(gxapi.GXMETA.create_s(bf)) finally: del bf if unit_of_measure: self.unit_of_measure = unit_of_measure self._view.gxview.start_group(self.name, mode) self._open = True def close(self): """ Close the group, unlocks the view""" self._close() @property def guid(self): """ The group GUID. .. versionadded:: 9.3 """ sr = gxapi.str_ref() self.view.gxview.get_group_guid(self.number, sr) return sr.value @property def view(self): """view that contains this group.""" return self._view @property def name(self): """group name""" return self._name @property def drawing_plane(self): """ drawing plane of this group, None for a group in a 2D view.""" if self.view.is_3d: return self.view.current_3d_drawing_plane else: return None @property def unit_of_measure(self): """ Unit of measure for scalar data contained in this group. This is only relevant for groups that contain scalar data, such as a Colour_symbols_group. For the spatial unit_of_measure use :attr:`drawing_coordinate_system.unit_of_measure` Can be set. ..versionadded:: 9.3 """ gxm = self.gx_metadata if gxm.has_attribute(_uom_attr): return gxm.get_attribute(_uom_attr) return '' @unit_of_measure.setter def unit_of_measure(self, uom): gxm = self.gx_metadata gxm.set_attribute(_uom_attr, str(uom)) self.gx_metadata = gxm @property def number(self): """group number in the view""" return self.view.gxview.find_group(self.name) def _extent(self, unit=UNIT_VIEW): xmin = gxapi.float_ref() ymin = gxapi.float_ref() xmax = gxapi.float_ref() ymax = gxapi.float_ref() self.view.gxview.get_group_extent(self.name, xmin, ymin, xmax, ymax, unit) return xmin.value, ymin.value, xmax.value, ymax.value @property def extent(self): """group extent as (xmin, ymin, xmax, ymax) in view units""" return self._extent(UNIT_VIEW) @property def visible(self): """True if group is visible, can be set.""" return self.name in self.view.group_list_visible @visible.setter def visible(self, visibility): if self.visible != visibility: marked = self.view.group_list_marked self.view.gxview.mark_all_groups(0) self.view.gxview.mark_group(self.name, 1) if visibility is True: self.view.gxview.hide_marked_groups(0) else: self.view.gxview.hide_marked_groups(1) self.view.gxview.mark_all_groups(0) for g in marked: self.view.gxview.mark_group(g, 1) def extent_map_cm(self, extent=None): """ Return an extent in map cm. :param extent: an extent in view units as a tuple (xmin, ymin, xmax, ymax), Default is the group extent. .. versionadded:: 9.2 """ if extent is None: extent = self.extent xmin, ymin = self.view.view_to_map_cm(extent[0], extent[1]) xmax, ymax = self.view.view_to_map_cm(extent[2], extent[3]) return xmin, ymin, xmax, ymax def locate(self, location, reference=REF_CENTER): """ Locate the group relative to a point. :param location: location (x, y) or a `geosoft.gxpy.geometry.Point` :param reference: reference point relative to the clip limits of the view to which reference location. The points are: :: 6 7 8 top left, center, right 3 4 5 center left, center, right 0 1 2 bottom left, center, right .. versionadded:: 9.2 """ area = gxgm.Point2(self.extent) area -= area.centroid area -= edge_reference(area, reference) area += location self.view.gxview.relocate_group(self.name, area.p0.x, area.p0.y, area.p1.x, area.p1.y, gxapi.MVIEW_RELOCATE_ASPECT_CENTER) @property def gx_metadata(self): """ The group metadata as a Geosoft `geosoft.gxpy.metadata.Metadata` instance. This metadata may contain standard Geosoft metadata, such as unit_of_measure for data contained in the group, and you can add your own metadata spexific to your application. See `geosoft.gxpy.metadata.Metadata` for information about working with metadata. Can be set, in which case the metadata is replaced by the new metadata. Normally you will get the current metadata, add to or modify, then set it back. .. versionadded:: 9.3 """ if self._meta: return self._meta else: return gxmeta.Metadata() @gx_metadata.setter def gx_metadata(self, meta): self._new_meta = True self._meta = meta def _draw(func): @wraps(func) def wrapper(self, *args, **kwargs): if self._mode == READ_ONLY: raise _t('This view is read-only.') if not self._pen: self._init_pen() if 'pen' in kwargs: cur_pen = self.pen try: self.pen = kwargs.pop('pen') func(self, *args, **kwargs) finally: self.pen = cur_pen else: func(self, *args, **kwargs) return wrapper def _make_point(p): if isinstance(p, gxgm.Point): return p else: return gxgm.Point(p) def _make_point2(p2): if isinstance(p2, gxgm.Point2): return p2 else: return gxgm.Point2(p2) def _make_ppoint(p): if isinstance(p, gxgm.PPoint): return p else: return gxgm.PPoint(p) class Draw(Group): """ Create (start) a drawing group for 2D drawing elements. On a 3D view, 2D drawing elements are placed on the default drawing plane. Drawing groups will lock the view such that only one drawing group can be instantiated at a time. Use `with Draw() as group:` to ensure correct unlocking when complete. Inherits from the `Group` base class. See `Group` arguments. """ def __init__(self, *args, **kwargs): kwargs['view_lock'] = True super().__init__(*args, **kwargs) self._pen = None self._text_def = None self._drawing_coordinate_system = None if self._mode != READ_ONLY: self._init_pen() self._text_def = Text_def(factor=self.view.units_per_map_cm) def _set_dot_symbol(self): # this is a hack because we cannot draw a box or a zero-length line, so # instead we draw a filled box self.view.gxview.symb_number(4) self.view.gxview.symb_color(0) self.view.gxview.symb_fill_color(self.pen.line_color.int_value) self.view.gxview.symb_size(self.pen.line_thick) @property def group_opacity(self): """ Group opacity setting. Can be set :return: opacity 0. to 1. (opaque) .. versionadded 9.3.1 """ fref = gxapi.float_ref() self.view.gxview.get_group_transparency(self.name, fref) return fref.value @group_opacity.setter def group_opacity(self, op): self.view.gxview.set_group_transparency(self.name, min(max(float(op), 0.), 1.)) @property def drawing_coordinate_system(self): """ The coordinate of incoming spatial data, which are converted to the coordinate system of the view. This is normally the same as the view coordinate system, but it can be set to a different coordinate system to have automatic reprojection occur during drawing. """ if self._drawing_coordinate_system is None: return self.view.coordinate_system return self._drawing_coordinate_system @drawing_coordinate_system.setter def drawing_coordinate_system(self, cs): if cs is None: self.view.gxview.set_user_ipj(self.view.coordinate_system.gxipj) self._drawing_coordinate_system = None else: self._drawing_coordinate_system = gxcs.Coordinate_system(cs) self.view.gxview.set_user_ipj(self._drawing_coordinate_system.gxipj) @property def pen(self): """the current drawing pen as a :class:`Pen` instance""" return self._pen @pen.setter def pen(self, pen): if self._mode == READ_ONLY: raise _t('This view is read-only.') if type(pen) is str: pen = Pen.from_mapplot_string(pen) if self._pen.line_color != pen.line_color: self.view.gxview.line_color(pen.line_color.int_value) if self._pen.line_thick != pen.line_thick: self.view.gxview.line_thick(pen.line_thick) if self._pen.line_smooth != pen.line_smooth: self.view.gxview.line_smooth(pen.line_smooth) if (self._pen.line_style != pen.line_style) or (self._pen.line_pitch != pen.line_pitch): self.view.gxview.line_style(pen.line_style, pen.line_pitch) if self._pen.fill_color != pen.fill_color: self.view.gxview.fill_color(pen.fill_color.int_value) if self._pen.pat_number != pen.pat_number: self.view.gxview.pat_number(pen.pat_number) if self._pen.pat_angle != pen.pat_angle: self.view.gxview.pat_angle(pen.pat_angle) if self._pen.pat_density != pen.pat_density: self.view.gxview.pat_density(pen.pat_density) if self._pen.pat_size != pen.pat_size: self.view.gxview.pat_size(pen.pat_size) if self._pen.pat_style != pen.pat_style: self.view.gxview.pat_style(pen.pat_style) if self._pen.pat_thick != pen.pat_thick: self.view.gxview.pat_thick(pen.pat_thick) self._pen = pen def _init_pen(self): scm = self.view.units_per_map_cm pen = Pen(line_thick=0.02 * scm, line_pitch=0.5 * scm, pat_size=0.25 * scm, pat_thick=0.02 * scm) self.view.gxview.line_color(pen.line_color.int_value) self.view.gxview.line_thick(pen.line_thick) self.view.gxview.line_smooth(pen.line_smooth) self.view.gxview.line_style(pen.line_style, pen.line_pitch) self.view.gxview.fill_color(pen.fill_color.int_value) self.view.gxview.pat_number(pen.pat_number) self.view.gxview.pat_angle(pen.pat_angle) self.view.gxview.pat_density(pen.pat_density) self.view.gxview.pat_size(pen.pat_size) self.view.gxview.pat_style(pen.pat_style) self.view.gxview.pat_thick(pen.pat_thick) self._pen = pen def new_pen(self, **kwargs): """ Returns a pen that inherits default from the current view pen. Arguments are the same as the `Pen` constructor. This using this ensures that default sizing of view unit-based dimensions (such as `line_thick`) are not lost when new pens are created. :param kwargs: see :class:`Pen` :returns: :class:`Pen` instance .. versionadded:: 9.2 """ return Pen(default=self.pen, **kwargs) @property def text_def(self): """the current text definition as a :class:`Text_def` instance, can be set.""" return self._text_def @text_def.setter def text_def(self, text_def): if self._mode == READ_ONLY: raise _t('This view is read-only.') if self._text_def != text_def: self._text_def = text_def self.view.gxview.text_font(text_def.font, text_def.gfn, text_def.weight, text_def.italics) self.view.gxview.text_size(text_def.height) self.view.gxview.text_color(text_def.color.int_value) def text_extent(self, str, text_def=None): """ Return the extent of a text string in view units relative to the current text `text_def` setting, or the specified `text_def` setting. :param str: text string :param text_def: `text_def` instance, None for the current setting :return: `geosoft.geometry.Point2` instance .. versionadded:: 9.4 """ x0 = gxapi.float_ref() y0 = gxapi.float_ref() x1 = gxapi.float_ref() y1 = gxapi.float_ref() if text_def: cur_text = self._text_def self.text_def = text_def else: cur_text = None self.view.gxview.measure_text(str, x0, y0, x1, y1) if cur_text: self.text_def = cur_text return gxgm.Point2(((x0.value, y0.value), (x1.value, y1.value)), coordinate_system=self.view.coordinate_system) @_draw def point(self, p): """ Draw a point. :param p: point location as `geosoft.gxpy.geometry.Point` .. versionadded:: 9.3 """ # just draw a box. TODO: MVIEW needs a way to draw a dot, and/or address issue #44 self._set_dot_symbol() self.view.gxview.symbol(p.x, p.y) @_draw def polypoint(self, pp): """ Draw many points. :param pp: point location as `geosoft.gxpy.geometry.PPoint`, or a pair of VVs (vvx, vvy), or something that `gxpy.geometry.PPoint` can construct into a PP. .. versionadded:: 9.3 """ self._set_dot_symbol() if not((len(pp) == 2) and isinstance(pp[0], gxvv.GXvv)): pp = _make_ppoint(pp) pp = (gxvv.GXvv(pp.x), gxvv.GXvv(pp.y)) self.view.gxview.symbols(pp[0].gxvv, pp[1].gxvv) @_draw def line(self, p2): """ Draw a line on the current plane :param p2: :class:`geometry.Point2`, or (p1, p2) .. versionadded:: 9.2 """ p2 = _make_point2(p2) self.view.gxview.line(p2.p0.x, p2.p0.y, p2.p1.x, p2.p1.y) @_draw def polyline(self, pp, close=False): """ Draw a polyline the current plane :param pp: `geosoft.gxpy.geometry.PPoint` instance or something that can be constructed, or a pair of `geosoft.gxpy.vv.GXvv` (xvv, yvv) :param close: if True, draw a polygon, default is a polyline .. note:: Smooth-line polygons must have at least 6 points for the closure to appear continuous. .. versionadded:: 9.2 """ if not((len(pp) == 2) and isinstance(pp[0], gxvv.GXvv)): pp = _make_ppoint(pp) pp = (gxvv.GXvv(pp.x), gxvv.GXvv(pp.y)) if close: self.view.gxview.poly_line(gxapi.MVIEW_DRAW_POLYGON, pp[0].gxvv, pp[1].gxvv) else: self.view.gxview.poly_line(gxapi.MVIEW_DRAW_POLYLINE, pp[0].gxvv, pp[1].gxvv) @_draw def polygon(self, pp): """ Draw a polygon on the current plane. :param pp: :class:`geosoft.gxpy.geometry.PPoint` .. note:: Smooth-line polygons must have at least 6 points for the closure to appear continuous. .. versionadded:: 9.2 """ self.polyline(pp, True) @_draw def rectangle(self, p2): """ Draw a 2D rectangle on the current plane :param p2: geometry.Point2, or (p1, p2), or (x0, y0, x2, y2) .. versionadded:: 9.2 """ p2 = _make_point2(p2) self.view.gxview.rectangle(p2.p0.x, p2.p0.y, p2.p1.x, p2.p1.y) @_draw def graticule(self, dx=None, dy=None, ddx=None, ddy=None, style=GRATICULE_LINE): """ Draw a graticule reference on a view. :param style: `GRATICULE_LINE`, `GRATICULE_CROSS` or `GRATICULE_DOT` :param dx: vertical line separation :param dy: horizontal line separation :param ddx: horizontal cross size for `GRATICULE_CROSS` :param ddy: vertical cross size for `GRATICULE_CROSS` .. versionadded:: 9.2 """ ext = self.extent if dx is None: dx = (ext[2] - ext[0]) * 0.2 ddx = dx * 0.25 if dy is None: dy = (ext[3] - ext[1]) * 0.2 ddy = dy * 0.25 if ddy is None: ddy = dy * 0.25 if ddx is None: ddx = dx * 0.25 self.view.gxview.grid(dx, dy, ddx, ddy, style) def text(self, text, location=(0, 0), reference=REF_BOTTOM_LEFT, angle=0., text_def=None): """ Draw text in the view. :param text: text string. Use line-feed characters for multi-line text. :param location: (x, y) or a `gxpy.geomerty.Point` location :param reference: Text justification point relative text outline box. The points are: :: 6 7 8 top left, center, right 3 4 5 middle left, center, right 0 1 2 bottom left, center, right :param angle: baseline angle in degrees clockwise :param text_def: text definition, if not set the current definition is used .. versionadded:: 9.2 """ if text_def: cur_text = self._text_def self.text_def = text_def else: cur_text = None self.view.gxview.text_ref(reference) self.view.gxview.text_angle(angle) if not isinstance(location, gxgm.Point): location = gxgm.Point(location) self.view.gxview.text(text, location.x, location.y) if cur_text: self.text_def = cur_text def contour(self, grid_file_name, parameters=None): """ Draw contours for a grid file. :param grid_file_name: Grid file name :param parameters: contour parameters, None for default contouring. Parameters can be provided as a list of strings that correspond the contouring control file (starting at control file line 2) as defined on the Geosoft Desktop help topic 'CONTCON'. The first 'MDF' line, which is used to specify the map scale and drawing location, is not required as the scale and location is fixed by the view. Following are the control file parameters names as they would appear in a text control file: .. code:: border, lowtic, smooth, suppop, nth, gtitle / 'general': {} pdef, ptick, pxval, pframe, psidel / 'special': {} hlb, htl, hcr, htk, hxv, hsl / 'text': {} ominl,cminl,skipa,skipb,skipc,skipd,skipe,skipf / 'line': {} xval, digxv, minxv, markxv / 'hilo': {} levopt, conbeg, conend, lbup, lbmult, logopt / 'levels': {} cint,lintyp,catt,label,dense,digits,conlo,conhi / 'contours': [{}, {}, ...] ... ... up to 32 contour levels ... Example parameters as text strings: ====================================== ================================================================= `parameter=` **Outcome** `None` default contour based on the grid data range `('','','','','','','10')` multiples of 10 `('','','','','','','10','50','250')` multiples of 10, 50 and 100, default attributes `('','','','','','0','0,,,0')` single contour (levopt=0) at value 0 (cint=0), no label (label=0) `('','','','','','0','0,,a=rt500,0')` red 500 micron thick contour at value 0, no label ====================================== ================================================================= Parameters may also be defined in a dictionary using explicit parameter names as shown in the text control file template above. Each line of parameters is defined by the key name to the right on the `/`, and the 'contours' entry is a list, and the values are dictionaries of the parameters to be defines. Parameters that are not defined will have the documented default behaviour. Example parameters as a dictionary: ================================================= =========================================== `parameter=` **Outcome** ------------------------------------------------- ------------------------------------------- `{'contours':[{'cint':10}]}` multiples of 10 `{'contours':[{'cint':10},{cint':50}]}` multiples of 10 and 50, default attributes `{'levels':{'levopt':0},[{'cint':10,'label':0}]}` single contour at data value = 0, no label ================================================= =========================================== .. versionadded:: 9.2 .. versionadded:: 9.4 added parameter controls """ def parms(set_str, keys): pstr = '' items = keys.split(',') if len(set_str): for k in items: pstr = pstr + str(set_str.get(k.strip(), '')) + ',' pstr = pstr[:-1] + ' /' + keys return pstr scale, ufac, x0, y0 = self.view.mdf()[1] control_file = gx.gx().temp_file('.con') with open(control_file, 'w+') as f: f.write('{},{},{},{} /scale, ufac, x0, y0\n'.format(scale, ufac, x0, y0)) if parameters is None: f.write(',,-1/\n') elif isinstance(parameters, dict): f.write('{}\n'.format(parms(parameters.get('general', {}), 'border, lowtic, smooth, suppop, nth, gtitle'))) f.write('{}\n'.format(parms(parameters.get('special', {}), 'pdef, ptick, pxval, pframe, psidel'))) f.write('{}\n'.format(parms(parameters.get('text', {}), 'hlb, htl, hcr, htk, hxv, hsl'))) f.write('{}\n'.format(parms(parameters.get('line', {}), 'ominl, cminl, skipa, skipb, skipc, skipd, skipe, skipf'))) f.write('{}\n'.format(parms(parameters.get('hilo', {}), 'xval, digxv, minxv, markxv'))) f.write('{}\n'.format(parms(parameters.get('levels', {}), 'levopt, conbeg, conend, lbup, lbmult, logopt'))) contours = parameters.get('contours', []) if len(contours) == 0: raise GroupException(_t('No contour levels specified.')) for con in contours: f.write('{}\n'.format(parms(con, 'cint, lintyp, catt, label, dense, digits, conlo, conhi'))) else: for pline in parameters: f.write(pline + '\n') geosoft.gxapi.GXMVU.contour(self.view.gxview, control_file, grid_file_name) gxu.delete_file(control_file) class Draw_3d(Draw): """ Create a 3D drawing group within a 3D view. 3D drawing groups accept 3D drawing objects that can be created using methods of this class. 2D objects can also be drawn to a 3D group and will be placed on the default drawing plane within the 3D view. :param render_backfaces: True to turn backface rendering on. .. versionadded:: 9.2 """ def __init__(self, view, *args, render_backfaces=False, **kwargs): if not isinstance(view, gxv.View_3d): raise GroupException(_t('View is not 3D')) kwargs['group_3d'] = True super().__init__(view, *args, **kwargs) if render_backfaces: self.render_backfaces = True @property def render_backfaces(self): """ True if backface rendering is on, default is off (False). Backface rendering controls the rendering of parts of solid objects that would normally be hidden from view. If drawing solid objects that have an open face, such as cylinders with an open end, backface rendering will be be turned on. Once on it cannot be turned off for a view. .. versionadded:: 9.2 """ return bool(self.view.gxview.get_3d_group_flags(self.number) & 0b1) @render_backfaces.setter def render_backfaces(self, setting): if not setting and self.render_backfaces: raise GroupException(_t('Once backface rendering is on it cannot be turned off.')) if not self.render_backfaces: f3d = (self.view.gxview.get_3d_group_flags(self.number) & 0b11111110) | 0b1 self.view.gxview.set_3d_group_flags(self.number, f3d) @_draw def sphere(self, p, radius): """ Draw a sphere. :param p: location as geometry.Point, or (x, y, z) :param radius: sphere radius .. versionadded:: 9.2 """ # solids use the fill color as the object color fci = self.pen.fill_color.int_value self.view.gxview.fill_color(self.pen.line_color.int_value) try: p = _make_point(p) self.view.gxview.sphere_3d(p.x, p.y, p.z, radius) finally: self.view.gxview.fill_color(fci) self.view.add_extent(gxgm.Point2((p - radius, p + radius))) @_draw def box_3d(self, p2, wireframe=False): """ Draw a 3D box :param p2: box corners as geometry.Point2, or (p0, p1), or (x0, y0, z0, x1, y1, z1) :param wireframe: True to draw edges only .. versionadded:: 9.2 """ # solids use the fill color as the object color fci = self.pen.fill_color.int_value self.view.gxview.fill_color(self.pen.line_color.int_value) pp = _make_point2(p2) try: if wireframe: sq = gxgm.PPoint(((pp.p0.x, pp.p0.y, pp.p0.z), (pp.p0.x, pp.p1.y, pp.p0.z), (pp.p1.x, pp.p1.y, pp.p0.z), (pp.p1.x, pp.p0.y, pp.p0.z), (pp.p0.x, pp.p0.y, pp.p0.z))) self.polyline_3d(sq, style=LINE3D_STYLE_TUBE_JOINED) sq += (0, 0, pp.p1.z - pp.p0.z) self.polyline_3d(sq, style=LINE3D_STYLE_TUBE_JOINED) self.cylinder_3d(gxgm.Point2(((pp.p0.x, pp.p0.y, pp.p0.z), (pp.p0.x, pp.p0.y, pp.p1.z))), radius=self.pen.line_thick * 0.5) self.cylinder_3d(gxgm.Point2(((pp.p0.x, pp.p1.y, pp.p0.z), (pp.p0.x, pp.p1.y, pp.p1.z))), radius=self.pen.line_thick * 0.5) self.cylinder_3d(gxgm.Point2(((pp.p1.x, pp.p1.y, pp.p0.z), (pp.p1.x, pp.p1.y, pp.p1.z))), radius=self.pen.line_thick * 0.5) self.cylinder_3d(gxgm.Point2(((pp.p1.x, pp.p0.y, pp.p0.z), (pp.p1.x, pp.p0.y, pp.p1.z))), radius=self.pen.line_thick * 0.5) else: self.view.gxview.box_3d(pp.p0.x, pp.p0.y, pp.p0.z, pp.p1.x, pp.p1.y, pp.p1.z) finally: self.view.gxview.fill_color(fci) self.view.add_extent(pp.extent) @_draw def cylinder_3d(self, p2, radius, r2=None, close=CYLINDER_CLOSE_ALL): """ Draw a cylinder. :param p2: end points as geometry.Point2, or (p0, p1), or (x0, y0, z0, x1, y1, z1) :param radius: cylinder radius. :param r2: end radius if different from the start :param close: one of: :: CYLINDER_OPEN CYLINDER_CLOSE_START CYLINDER_CLOSE_END CYLINDER_CLOSE_ALL .. versionadded:: 9.2 """ # solids use the fill color as the object color fci = self.pen.fill_color.int_value self.view.gxview.fill_color(self.pen.line_color.int_value) if close != CYLINDER_CLOSE_ALL: self.render_backfaces = True try: p2 = _make_point2(p2) if r2 is None: r2 = radius self.view.gxview.cylinder_3d(p2.p0.x, p2.p0.y, p2.p0.z, p2.p1.x, p2.p1.y, p2.p1.z, radius, r2, close) finally: self.view.gxview.fill_color(fci) r = max(radius, r2) ext = p2.extent self.view.add_extent(gxgm.Point2((ext.p0 - r, ext.p1 + r))) @_draw def cone_3d(self, p2, radius): """ Draw a cone. :param p2: end points as geometry.Point2, or (p0, p1), or (x0, y0, z0, x1, y1, z1). :param radius: cone base radius, base is as the the first point of p2. .. versionadded:: 9.2 """ self.cylinder_3d(p2, radius, r2=0.) def _poly_3d(self, points, ptype, smooth=gxapi.MVIEW_DRAWOBJ3D_MODE_FLAT): vvx, vvy, vvz = points.make_xyz_vv() null_vv = gxapi.GXVV.null() self.view.gxview.draw_object_3d(ptype, smooth, vvx.length, 0, vvx.gxvv, vvy.gxvv, vvz.gxvv, null_vv, null_vv, null_vv, null_vv, null_vv, null_vv) @_draw def polypoint_3d(self, points, style=POINT_STYLE_DOT): """ Draw multiple points. :param points: points to draw, :class:`geosoft.gxpy.geometry.PPoint` instance, or array-like [x,y,z] :param style: POINT_STYLE_DOT or POINT_STYLE_SPHERE. Dots are fast and intended for point clouds. The current pen thickness is used as the sphere sizes. .. versionadded:: 9.2 """ points = _make_ppoint(points) radius = self.pen.line_thick * 0.5 if style == POINT_STYLE_DOT: self._poly_3d(points, gxapi.MVIEW_DRAWOBJ3D_ENTITY_POINTS) else: for i in range(points.length): self.sphere(points[i], radius=radius) ext = points.extent self.view.add_extent(gxgm.Point2((ext.p0 - radius, ext.p1 + radius))) @_draw def polyline_3d(self, points, style=LINE3D_STYLE_LINE): """ Draw a polyline. :param points: verticies of the polyline, :class:`geosoft.gxpy.geometry.PPoint` instance, or array-like [x,y,z] :param style: LINE3D_STYLE_LINE, LINE3D_STYLE_TUBE or LINE3D_STYLE_TUBE_JOINED. Lines are single-pixel-wide. Tubes have width defined by the pen line thickness. Joined tubes have a joints and rounded ends. .. versionadded:: 9.2 """ points = _make_ppoint(points) if points.length < 2: raise GroupException(_t('Need at least two points.')) radius = self.pen.line_thick * 0.5 if style == LINE3D_STYLE_LINE: vvx, vvy, vvz = points.make_xyz_vv() self.view.gxview.poly_line_3d(vvx.gxvv, vvy.gxvv, vvz.gxvv) else: self.pen = Pen(fill_color=self.pen.line_color, default=self.pen) for i in range(points.length-1): self.cylinder_3d(gxgm.Point2((points[i], points[i+1])), radius=radius) if style == LINE3D_STYLE_TUBE_JOINED: for i in range(points.length): self.sphere(points[i], radius=radius) ext = points.extent self.view.add_extent(gxgm.Point2((ext.p0 - radius, ext.p1 + radius))) def polydata_3d(self, data, render_info_func=None, passback=None): """ Create 3D objects rendered using data attributes. :param data: iterable that yields items passed to your `render_info_func` callback :param render_info_func: a callback that given `(item, passback)` returns the rendering `(symbol_type, geometry, color_integer, attribute)`: ================== ======== =============== ========= Symbol Geometry Color Attribute ================== ======== =============== ========= SYMBOL_3D_SPHERE Point Color.int_value radius SYMBOL_3D_CUBE Point2 Color.int_value None SYMBOL_3D_CYLINDER Point2 Color.int_value radius SYMBOL_3D_CONE Point2 Color.int_value radius ================== ======== =============== ========= :param passback: something passed back to your render_info_func function, default None. **Example** .. code:: import geosoft.gxpy.geometry as gxgm import geosof.gxpy.view as gxv import geosogt.gxpy.group as gxg def render_spheres(xyz, cmap_radius): color, radius = cmap_radius return gxg.SYMBOL_3D_SPHERE, xyz, color.int_value, radius data = gxgm.PPoint(((5, 5, 5), (7, 5, 5), (7, 7, 7))) with gxv.View_3d.new('example_polydata') as v: with gxg.Draw_3d(v, 'red_spheres') as g: g.polydata_3d(data, render_spheres, (gxg.Color('r'), 0.25)) .. versionadded:: 9.2 """ cint = None for item in data: render = render_info_func(item, passback) if render: symbol, geometry, color, attribute = render if color != cint: self.view.gxview.fill_color(color) cint = color if symbol == SYMBOL_3D_SPHERE: self.view.gxview.sphere_3d(geometry[0], geometry[1], geometry[2], attribute) if not isinstance(geometry, gxgm.Geometry): geometry = gxgm.Point(geometry) elif symbol == SYMBOL_3D_CUBE: self.view.gxview.box_3d(geometry.p0.x, geometry.p0.y, geometry.p0.z, geometry.p1.x, geometry.p1.y, geometry.p1.z) elif symbol == SYMBOL_3D_CYLINDER: self.view.gxview.cylinder_3d(geometry.p0.x, geometry.p0.y, geometry.p0.z, geometry.p1.x, geometry.p1.y, geometry.p1.z, attribute, attribute, CYLINDER_CLOSE_ALL) elif symbol == SYMBOL_3D_CONE: self.view.gxview.cylinder_3d(geometry.p0.x, geometry.p0.y, geometry.p0.z, geometry.p1.x, geometry.p1.y, geometry.p1.z, attribute, 0, CYLINDER_CLOSE_ALL) else: raise GroupException(_t('Symbol type not implemented')) if attribute: e = gxgm.Point2(geometry).extent self.view.add_extent((e.p0 - attribute, e.p1 + attribute)) else: self.view.add_extent(geometry.extent) def _surface(self, faces, verticies, coordinate_system=None): """ TODO: awaiting resolution of #73 Draw a surface defined by faces and verticies :param faces: triangle faces as indexes into verticies, numpy array (n_faces, 3) :param verticies: verticies, numpy array (n_verticies, 3) :param coordinate_system: `geosoft.gxpy.Coordinate_system` instance if not in the drawing CS. .. versionadded:: 9.3.1 """ n_faces = len(faces) n_verticies = len(verticies) if np.nanmax(faces) > n_verticies or np.nanmin(faces) < 0: raise GroupException(_t('Faces refer to verticies out of range of verticies.')) # TODO validate buffering and rendering performance once #73 is resolved. n_buff = 1000 n_faces_written = 0 # normals normals = vertex_normals_np(faces, verticies) # coordinate_system if isinstance(coordinate_system, gxcs.Coordinate_system): gxipj = coordinate_system.gxipj else: gxipj = self.drawing_coordinate_system.gxipj # TODO: implement variable colour once issue #73 is addressed # color color = self.pen.fill_color.int_value if color == 0: color = C_GREY self.render_backfaces = True while n_faces_written < n_faces: n_write = min(n_buff, n_faces - n_faces_written) n_last = n_faces_written + n_write faces_buff = faces[n_faces_written: n_last] verticies_buff = verticies[faces_buff].reshape(-1, 3) vx, vy, vz = gxvv.vvset_from_np(verticies_buff) vf1, vf2, vf3 = gxvv.vvset_from_np(faces_buff) nx, ny, nz = gxvv.vvset_from_np(normals[faces_buff].reshape(-1, 3)) self.view.gxview.draw_surface_3d_ex(self.name, vx.gxvv, vy.gxvv, vz.gxvv, nx.gxvv, ny.gxvv, nz.gxvv, gxapi.GXVV.null(), color, vf1.gxvv, vf2.gxvv, vf3.gxvv, gxipj) n_faces_written += n_write def surface_group_from_file(v3d, file_name, group_name=None, overwrite=False): """ Create a 3D surface group from a surface dataset file. :param v3d: `geosoft.gxpy.view.View_3d` instance :param file_name: surface dataset file name (extension .geosoft_surface). See `geosoft.gxpy.surface.SurfaceDataset`. :param group_name: group name, default is the base file name. :param overwrite: True to overwrite existing group .. versionadded:: 9.3.1 """ if group_name is None: group_name = os.path.basename(file_name) group_name = os.path.splitext(group_name)[0] if v3d.has_group(group_name) and not overwrite: raise GroupException(_t('Cannot overwrite exing group "{}"').format(group_name)) v3d.gxview.draw_surface_3d_from_file(group_name, file_name) ext = gxspd.extent_from_metadata_file(file_name) v3d.add_extent(ext) def contour(view, group_name, grid_file_name, parameters=None): """ Create a contour group from a grid file. A default contour interval is determined from the grid. :param view: `geosoft.gxpy.view.View` instance :param group_name: name for the contour group :param grid_file_name: Grid file name .. versionadded:: 9.3 """ with Draw(view, group_name) as g: g.contour(grid_file_name, parameters=parameters) def legend_color_bar(view, group_name, cmap, cmap2=None, bar_location=COLOR_BAR_RIGHT, location=None, decimals=None, annotation_height=0.2, annotation_offset=None, annotation_side=COLOR_BAR_ANNOTATE_RIGHT, box_size=None, bar_width=None, max_bar_size=None, minimum_gap=0, post_end_values=False, annotate_vertical=False, division_line=1, interval_1=None, interval_2=None, title=None): """ Draw a color bar legend from :class:Color_map coloring definitions. :param view: :class:`gxpy.view.View` instance in which to place the bar :param group_name: name for the color_bar group, overwrites group if it exists. :param cmap: :class:`Color_map` instance :param cmap2: optional orthogonal blended :class:`Color_map` instance. If making a shaded-color legend, provide the shaded color map here. :param bar_location: one of: :: COLOR_BAR_RIGHT = 0 COLOR_BAR_LEFT = 1 COLOR_BAR_BOTTOM = 2 COLOR_BAR_TOP = 3 :param location: offset or (x, y) offset from `bar_location` reference point, in cm. The default is determined to center the bar off the location side specified. :param decimals: annotation decimal places :param annotation_height: annotation number height (cm) :param annotation_offset: offset of annotations from the bar (cm) :param annotation_side: side of the bar for annotations :: COLOR_BAR_ANNOTATE_RIGHT = 1 COLOR_BAR_ANNOTATE_LEFT = -1 COLOR_BAR_ANNOTATE_TOP = 1 COLOR_BAR_ANNOTATE_BOTTOM = -1 :param box_size: box size, height for vertical bars, width for horizontal bars :param bar_width: width of the color boxes, horizontal for vertical bars, vertical for horizontal bars :param max_bar_size: maximum bar size, default is the size of the view edge :param minimum_gap: minimum gap to between annotations. Annotations are dropped in necessary. :param post_end_values: post the maximum and minimum values :param annotate_vertical: True to orient labels vertically :param division_line: 0, no division lines, 1 - line, 2 - tick :param interval_1: Major annotation increment, default annotates everything :param interval_2: secondary smaller annotations, reduced to 1/10, 1/5, 1/4 or 1/2 of interval_1. Default chooses something reasonable. :param title: bar title, use new-lines for sub-titles. Default uses the title and unit_of_measure from `cmap`. .. versionadded:: 9.2 """ # ensure group name is unique in the view while group_name in view.group_list: group_name += '_' # default decimals if decimals is None: decimals = 1 minz = maxz = cmap.color_map[0][0] for c in cmap.color_map: z = c[0] if z: if z < minz: minz = z elif z > maxz: maxz = z delta = maxz - minz while delta > 0 and delta < 100: delta *= 10. decimals += 1 itr = cmap.gxitr with Draw(view, group_name) as g: v_area = gxgm.Point2(view.extent_clip) v_width = v_area.dimension[0] v_height = v_area.dimension[1] if (bar_location == COLOR_BAR_LEFT) or (bar_location == COLOR_BAR_RIGHT): bar_orient = 0 default_bar_size = v_height * 0.8 if max_bar_size is None: max_bar_size = v_height else: bar_orient = 1 default_bar_size = v_width * 0.8 if max_bar_size is None: max_bar_size = v_width * 0.8 # bar cell sizing def_box_size = default_bar_size / itr.get_size() if box_size is None: box_size = min(0.4 * view.units_per_map_cm, def_box_size) else: box_size *= view.units_per_map_cm if bar_width is None: if bar_location in (COLOR_BAR_LEFT, COLOR_BAR_RIGHT): bar_width = max(0.4 * view.units_per_map_cm, box_size * 2.0) else: bar_width = max(0.4 * view.units_per_map_cm, box_size) else: bar_width *= view.units_per_map_cm if max_bar_size is not None: box_size = min(box_size, max_bar_size / itr.get_size()) annotation_height *= view.units_per_map_cm if annotation_offset is None: annotation_offset = annotation_height * 0.5 else: annotation_offset *= view.units_per_map_cm annotation_offset *= annotation_side minimum_gap *= view.units_per_map_cm cdict = { "BAR_ORIENTATION": bar_orient, "DECIMALS": decimals, 'ANNOFF': annotation_offset, 'BOX_SIZE': box_size, 'BAR_WIDTH': bar_width, 'MINIMUM_GAP': minimum_gap, "X": v_area.centroid.x, "Y": v_area.centroid.y, "POST_MAXMIN": 1 if post_end_values else 0, "LABEL_ORIENTATION": 0 if annotate_vertical else 1, "DIVISION_STYLE": division_line, } if interval_1: if interval_2 is None: interval_2 = gxapi.rDUMMY if interval_2 <= interval_1 / 10.: interval_2 = interval_1 / 10. elif interval_2 <= interval_1 / 5.: interval_2 = interval_1 / 5. elif interval_2 <= interval_1 / 4.: interval_2 = interval_1 / 4. elif interval_2 <= interval_1 / 2.: interval_2 = interval_1 / 2. else: interval_2 = gxapi.rDUMMY cdict["FIXED_INTERVAL"] = interval_1 cdict["FIXED_MINOR_INTERVAL"] = interval_2 g.text_def = Text_def(height=annotation_height) if cmap2 is None: itr2 = gxapi.GXITR.null() else: itr2 = cmap2.gxitr gxapi.GXMVU.color_bar_reg(view.gxview, itr, itr2, gxu.reg_from_dict(cdict, 100, json_encode=False)) if title is None: if cmap.unit_of_measure: title = '{}\n({})'.format(cmap.title, cmap.unit_of_measure) else: title = cmap.title if title: title_height = annotation_height * 1.5 g.text_def = Text_def(height=title_height, weight=FONT_WEIGHT_BOLD) p = gxgm.Point(edge_reference(gxgm.Point2(g.extent), REF_BOTTOM_CENTER)) p -= (0, title_height * 0.5) if '\n' in title: tline = title[:title.index('\n')] title = title[title.index('\n') + 1:] else: tline = title title = '' g.text(tline, p, reference=REF_TOP_CENTER) if title: g.text_def = Text_def(height=title_height * 0.8, weight=FONT_WEIGHT_LIGHT) p -= (0, title_height * 1.5) g.text(title, p, reference=REF_TOP_CENTER) # locate the bar default_offset = 1.5 * view.units_per_map_cm if location and (not hasattr(location, '__iter__')): default_offset = location * view.units_per_map_cm location = None if location is not None: location = location[0] * view.units_per_map_cm, location[1] * view.units_per_map_cm area = gxgm.Point2(view.extent_clip) if bar_location == COLOR_BAR_LEFT: if location is None: location = (-default_offset, 0) xy = edge_reference(area, REF_CENTER_LEFT) reference = REF_CENTER_RIGHT elif bar_location == COLOR_BAR_BOTTOM: if location is None: location = (0, -default_offset) xy = edge_reference(area, REF_BOTTOM_CENTER) reference = REF_TOP_CENTER elif bar_location == COLOR_BAR_TOP: if location is None: location = (0, default_offset) xy = edge_reference(area, REF_TOP_CENTER) reference = REF_BOTTOM_CENTER else: # BAR_RIGHT if location is None: location = (default_offset, 0) xy = edge_reference(area, REF_CENTER_RIGHT) reference = REF_CENTER_LEFT location = xy + location g.locate(location, reference) class Color: """ Colours, which are stored as a 32-bit color integer. :param color: string descriptor (eg. 'R255G0B125'), color letter R, G, B, C, M, Y, H, S or V.; tuple (r, g, b), (c, m, y) or (h, s, v), each item defined in the range 0 to 255; 32-bit color number, which can be an item selected from the following list: :: C_BLACK C_RED C_GREEN C_BLUE C_CYAN C_MAGENTA C_YELLOW C_GREY C_LT_RED C_LT_GREEN C_LT_BLUE C_LT_CYAN C_LT_MAGENTA C_LT_YELLOW C_LT_GREY C_GREY10 C_GREY25 C_GREY50 C_WHITE C_TRANSPARENT :param model: model of the tuple: :: CMODEL_RGB (default) CMODEL_CMY CMODEL_HSV .. versionadded:: 9.2 """ def __init__(self, color, model=CMODEL_RGB): if isinstance(color, Color): self._color = color.int_value elif isinstance(color, int): self.int_value = color elif isinstance(color, str): self._color = gxapi.GXMVIEW.color(color) else: if model == CMODEL_CMY: self.cmy = color elif model == CMODEL_HSV: hue = max(0, min(255, color[0])) sat = max(0, min(255, color[1])) val = max(0, min(255, color[2])) self._color = gxapi.GXMVIEW.color_hsv(hue, sat, val) else: self.rgb = color def __eq__(self, other): return self.int_value == other.int_value def __ne__(self, other): return not self.__eq__(other) @property def int_value(self): """ color as a 32-bit color integer, can be set""" return self._color @int_value.setter def int_value(self, color): if color < 0: raise GroupException(_t('Invalid color integer {}, must be >= 0').format(color)) self._color = int(color) @property def rgb(self): """color as an (red, green, brue) tuple, can be set""" if self.int_value == 0: return None r = gxapi.int_ref() g = gxapi.int_ref() b = gxapi.int_ref() gxapi.GXMVIEW.color2_rgb(self._color, r, g, b) return r.value, g.value, b.value @rgb.setter def rgb(self, rgb): r = max(min(255, rgb[0]), 0) g = max(min(255, rgb[1]), 0) b = max(min(255, rgb[2]), 0) self._color = gxapi.GXMVIEW.color_rgb(r, g, b) @property def cmy(self): """color as an (cyan, magenta, yellow) tuple, can be set""" if self.int_value == 0: return None red, green, blue = self.rgb return 255 - red, 255 - green, 255 - blue @cmy.setter def cmy(self, cmy): self.rgb = (255 - cmy[0], 255 - cmy[1], 255 - cmy[2]) def adjust_brightness(self, brightness): """ Return a :class:`Color` instance adjusted for brightness. .. versionadded:: 9.2 """ if brightness == 0.: return self c, m, y = self.rgb if brightness > 0.0: w = round(brightness * 255) c = max(c - w, 0) m = max(m - w, 0) y = max(y - w, 0) return Color((c, m, y), model=CMODEL_CMY) else: k = round(-brightness * 255) c = max(c + k, 255) m = max(m + k, 255) y = max(y + k, 255) return Color((c, m, y), model=CMODEL_CMY) def font_weight_from_line_thickness(line_thick, height): """ Returns font weight for a text height and line thickness. :param line_thick: line thickness in same units as the text height :param height: text height :returns: one of: :: FONT_WEIGHT_ULTRALIGHT FONT_WEIGHT_LIGHT FONT_WEIGHT_MEDIUM FONT_WEIGHT_BOLD FONT_WEIGHT_XBOLD FONT_WEIGHT_XXBOLD .. versionadded:: 9.2 """ if height <= 0.: return FONT_WEIGHT_ULTRALIGHT ratio = line_thick / height fw = 1 for f in _weight_factor: if ratio <= f: return fw fw += 1 return FONT_WEIGHT_MEDIUM def thickness_from_font_weight(weight, height): """ Returns the line thickness appropriate for a text weight. :param weight: one of: :: FONT_WEIGHT_ULTRALIGHT FONT_WEIGHT_LIGHT FONT_WEIGHT_MEDIUM FONT_WEIGHT_BOLD FONT_WEIGHT_XBOLD FONT_WEIGHT_XXBOLD :param height: font height .. versionadded:: 9.2 """ return height * _weight_factor[weight - 1] class Text_def: """ Text definition: :param font: font name. TrueType fonts are assumed unless the name ends with '.gfn', which is a Geosoft gfn font. :param weight: one of: :: FONT_WEIGHT_ULTRALIGHT FONT_WEIGHT_LIGHT FONT_WEIGHT_MEDIUM FONT_WEIGHT_BOLD FONT_WEIGHT_XBOLD FONT_WEIGHT_XXBOLD :param line_thick: line thickness from which to determine a weight, which is calculated from the ratio of line thickness to height. :param italics: True for italics fonts :param height: text height, default 0.25 :param factor: default spatial properties are multiplied by this factor. This is useful for creating text scaled to the units of a view. The default text properties are scaled to cm. :Properties: :height: font height in view units :font: font name :weight: font weight, one of FONT_WEIGHT :line_thick: font line thickness for gfn stroke fonts :italics: True for italics :slant: Slant angle for stroke fonts, 0 if normal, 15 for italics :mapplot_string: mapplot compatible text definition string .. versionadded:: 9.2 """ def __init__(self, **kwargs): self._color = None self._font = None self._height = None self._gfn = None self._weight = None self._italics = None if 'default' in kwargs: def_pen = kwargs.pop('default') self.__dict__ = def_pen.__dict__.copy() else: self.color = Color(C_BLACK) self.height = 0.25 self.font = 'DEFAULT' self.gfn = True self.weight = None self.italics = False factor = kwargs.pop('factor', 1.) if factor != 1.0: self.height *= factor line_thick = None for k in kwargs: if k == 'color': self.color = kwargs[k] elif k == 'line_thick': line_thick = kwargs[k] elif k == 'font': self.font = kwargs[k] elif k in self.__dict__: self.__dict__[k] = kwargs[k] else: raise GroupException(_t('Invalid text definition parameter ({})'.format(k))) if self.weight is None: if line_thick is None: self.weight = FONT_WEIGHT_MEDIUM else: self.weight = font_weight_from_line_thickness(line_thick, self.height) def __eq__(self, other): if hasattr(other, '__dict__'): return self.__dict__ == other.__dict__ return False def __ne__(self, other): return self.__dict__ != other.__dict__ @property def color(self): """text color as a :class:`Color` instance, can be set""" return self._color @color.setter def color(self, color): if isinstance(color, Color): self._color = color else: self._color = Color(color) @property def font(self): """text font name, can be set.""" return self._font @font.setter def font(self, font): if font: if '.gfn' in font.lower(): self.gfn = True self._font = font.lower().replace('.gfn', '') else: self.gfn = False self._font = font.replace('(TT)', '') else: self._font = 'DEFAULT' self.gfn = True @property def line_thick(self): """text line thickness determined from the font weight, can be set.""" return thickness_from_font_weight(self.weight, self.height) @line_thick.setter def line_thick(self, line_thick): self.weight = font_weight_from_line_thickness(line_thick, self.height) @property def slant(self): """text slant, 15 for italics, 0 for not italics, can be set. If set, any slant greater than 5 will result in a 15 degree slant to create italics.""" if self.italics: return 15 else: return 0 @slant.setter def slant(self, slant): if slant > 5: self.italics = True else: self.italics = False @property def mapplot_string(self): """ Mapplot text definition string, assumes scaling in cm. """ if 'default' in self._font.lower(): font = 'DEFAULT' elif not self.gfn: font = self._font.strip() + '(TT)' else: font = self._font return '{},,,{},"{}"'.format(self.height, self.slant, font) class Pen: """ Geosoft Pen class. The default dimensioned properties (`line_thick`, `line_pitch`, `pat_size` and `pat_thick`) assume the view units are cm, and this is usually only the case for the base view. For views in other units either explicitly define the dimention in view units, or pass `factor` set the the view :attr:`geosoft.gxpy.view.View.units_per_map_cm`. :param line_color: line :class:`Color` instance, default is black :param fill_color: fill :class:`Color` instance, default is transparent :param line_thick: line thickness, default is 0.01 :param line_style: line pattern style :: LINE_STYLE_SOLID (default) LINE_STYLE_LONG LINE_STYLE_DOTTED LINE_STYLE_SHORT LINE_STYLE_LONG_SHORT_LONG LINE_STYLE_LONG_DOT_LONG :param line_pitch: line style pitch, default is 0.5 :param line_smooth: smooth line: :: SMOOTH_NONE (default) SMOOTH_AKIMA SMOOTH_CUBIC :param pat_number: pattern number for filled patterns (refer to `etc/default.pat`) default 0, flood fill :param pat_angle: pattern angle, default 0 :param pat_density: pattern density, default 1 :param pat_size: pattern size, default 1.0 :param pat_style: pattern style: :: TILE_RECTANGULAR (default) TILE_DIAGONAL TILE_TRIANGULAR TILE_RANDOM :param pat_thick: pattern line thickness, default 0.01 :param default: default :class:`Pen` instance, if specified defaults are established from this :param factor: default spatial properties are multiplied by this factor. This is useful for creating pens scaled to the units of a view. The default pen properties are scaled to cm. Typically you will pass :attr:`geosoft.gxpy.view.View.units_per_map_cm`. .. versionadded: 9.2 """ def __init__(self, **kwargs): self._line_color = None self._line_thick = None self._line_style = None self._line_pitch = None self._line_smooth = None self._fill_color = None self._pat_number = None self._pat_angle = None self._pat_density = None self._pat_size = None self._pat_style = None self.__pat_thick = None if 'default' in kwargs: def_pen = kwargs.pop('default') self.__dict__ = def_pen.__dict__.copy() else: self.line_color = Color(C_BLACK) self.line_thick = 0.01 self.line_style = LINE_STYLE_SOLID self.line_pitch = 0.5 self.line_smooth = SMOOTH_NONE self.fill_color = Color(C_TRANSPARENT) self.pat_number = 0 self.pat_angle = 0 self.pat_density = 1 self.pat_size = 1 self.pat_style = TILE_RECTANGULAR self.pat_thick = self.line_thick factor = kwargs.pop('factor', 1.) if factor != 1.0: self.line_thick *= factor self.line_pitch *= factor self.pat_size *= factor self.pat_thick *= factor for k in kwargs: if k == 'line_color': self.line_color = kwargs[k] elif k == 'fill_color': self.fill_color = kwargs[k] elif k in self.__dict__: self.__dict__[k] = kwargs[k] else: raise GroupException(_t('Invalid pen parameter ({})'.format(k))) @classmethod def from_mapplot_string(cls, cstr): """ Create a :class:`Pen` instance from a mapplot-style string descriptor using either a krgbKRGB or kcmyKCMY color model. Lower case letters indicate line color, uppercase indicates fill color, 'k', 'K' for black. Each letter may be followed by an intensity between 0 and 255. If an intensity is not specified 255 is assumed. Line thickness can be defined by 't' followed by a thickness in 1000'th of the view unit, which for the default 'base' view would be microns. :param cstr: mapplot-style color definition Examples: =========== ============================================== 'r' red line 'R' red fill 'rG64' red line, light-green fill 'c64' light cyan line, equivalent to 'R191G255B255' 'c64K96' light cyan line, light-grey fill 'bt500' blue line, 0.5 units thick =========== ============================================== .. versionadded:: 9.2 """ def color_model(colstr): s = colstr.lower() for c in 'cmy': if c in s: return 'cmyk' return 'rgbk' def get_part(colstr, c, default=255): if c not in colstr: return 0 start = colstr.index(c) end = start + 1 for c in colstr[end:]: if not (c in '0123456789'): break end += 1 if end == start + 1: return default return int(colstr[start + 1:end]) def add_k(c, k): return max(c[0] - k, 0), max(c[1] - k, 0), max(c[2] - k, 0) def has_color(colstr, cc): for c in cc: if c in colstr: return True return False def color(colstr, cc): if has_color(colstr, cc): k = get_part(colstr, cc[3]) if has_color(colstr, cc[:3]): if model[0] == 'c' or model[0] == 'C': return add_k((255 - get_part(colstr, cc[0]), 255 - get_part(colstr, cc[1]), 255 - get_part(colstr, cc[2])), k) else: return add_k((get_part(colstr, cc[0]), get_part(colstr, cc[1]), get_part(colstr, cc[2])), k) else: return add_k((255, 255, 255), k) else: return C_TRANSPARENT model = color_model(cstr) line_color = color(cstr, model) fill_color = color(cstr, model.upper()) line_thick = max(1, get_part(cstr, 't', 1)) * 0.001 return cls(line_color=line_color, fill_color=fill_color, line_thick=line_thick) def __eq__(self, other): for k, v in self.__dict__.items(): if other.__dict__[k] != v: return False return True @property def line_color(self): """pen line color as a :class:`color` instance, can be set.""" return self._line_color @line_color.setter def line_color(self, color): if isinstance(color, Color): self._line_color = color else: self._line_color = Color(color) @property def fill_color(self): return self._fill_color @fill_color.setter def fill_color(self, color): """pen fill color as a :class:`color` instance, can be set.""" if isinstance(color, Color): self._fill_color = color else: self._fill_color = Color(color) @property def mapplot_string(self): """line/fill colour and thickness string suing mapplor format, eg. 'kR125B64t1000'""" s = '' if self._line_color.int_value != C_TRANSPARENT: if self._line_color.int_value == C_BLACK: s += 'k' else: c = self._line_color.rgb s += 'r{}g{}b{}'.format(c[0], c[1], c[2]) if self._fill_color.int_value != C_TRANSPARENT: if self._line_color.int_value == C_BLACK: s += 'K' else: c = self._fill_color.rgb s += 'R{}G{}B{}'.format(c[0], c[1], c[2]) return s + 't{}'.format(int(self.line_thick * 1000.)) class Color_symbols_group(Group): """ Data represented as colored symbols based on a :class:`Color_map`. :Constructors: ============ ======================================= :func:`new` create a new symbol group in a view :func:`open` open an existing symbol group in a view ============ ======================================= """ def __exit__(self, exc_type, exc_val, exc_tb): self.__del__() def __del__(self): if hasattr(self, '_gxcsymb'): self._gxcsymb = None if hasattr(self, '_close'): self._close() def __init__(self, view, group_name, **kwargs): self._gxcsymb = None super().__init__(view, group_name, **kwargs) @classmethod def new(cls, view, name, data, color_map, symbol_def=None, symbol=SYMBOL_CIRCLE, mode=REPLACE, **kwargs): """ Create a new color symbols group with color mapping. If the group exists a new unique name is constructed. :param view: the view in which to place the group :param name: group name :param data: 2d numpy data array [[x, y, value], ...] or an iterable that yields `((x, y), value)`, or `((x, y, z), value, ...)`. Only the first `value` is used, an in the case of an iterable that yields (x, y, z) the z is ignored. :param color_map: symbol fill color :class:`Color_map`. Symbols are filled with the color lookup using `data`. :param symbol_def: :class:`Text_def` defines the symbol font to use, normally `symbols.gfn` is expected, and if used the symbols defined by the `SYMBOL` manifest are valid. For other fonts you will get the symbol requested. The default is `Text_def(font='symbols.gfn', color='k', weight=FONT_WEIGHT_ULTRALIGHT)` :param symbol: the symbol to plot, normally one of `SYMBOL`. :param mode: REPLACE (default) or NEW, which creates a new unique name if group exists :return: :class:`Color_symbols_group` instance .. versionadded:: 9.2 .. versionchanged:: 9.4 added support for passing data as a 2d numpy array """ def valid(xyd): if xyd[0][0] is None or xyd[0][1] is None or xyd[1] is None: return False return True cs = cls(view, name, mode=mode, **kwargs) cs._gxcsymb = gxapi.GXCSYMB.create(color_map.save_file()) if symbol_def is None: symbol_def = Text_def(font='geosoft.gfn', height=(0.25 * view.units_per_map_cm), weight=FONT_WEIGHT_ULTRALIGHT, color=C_BLACK) cs._gxcsymb.set_font(symbol_def.font, symbol_def.gfn, symbol_def.weight, symbol_def.italics) cs._gxcsymb.set_static_col(symbol_def.color.int_value, 0) cs._gxcsymb.set_scale(symbol_def.height) cs._gxcsymb.set_number(symbol) if isinstance(data, np.ndarray): if data.ndim != 2 or data.shape[1] < 3: raise GroupException(_t('data array must have shape (-1, 3)')) cs._gxcsymb.add_data(gxvv.GXvv(data[:, 0]).gxvv, gxvv.GXvv(data[:, 1]).gxvv, gxvv.GXvv(data[:, 2]).gxvv) else: xy = gxgm.PPoint([xy[0] for xy in data if valid(xy)]) cs._gxcsymb.add_data(gxvv.GXvv(xy.x).gxvv, gxvv.GXvv(xy.y).gxvv, gxvv.GXvv([d[1] for d in data if valid(d)]).gxvv) view.gxview.col_symbol(cs.name, cs._gxcsymb) if cs.unit_of_measure: color_map.unit_of_measure = cs.unit_of_measure return cs @classmethod def open(cls, view, group_name): """ Open an existing color symbols group. :param view: view that contains the group :param group_name: name of the group, which must be a color symbols group :return: :class:`Color_symbols_group` instance .. versionadded:: 9.2 """ cs = cls(view, group_name, mode=READ_ONLY) group_number = view.gxview.find_group(group_name) cs._gxcsymb = view.gxview.get_col_symbol(group_number) return cs def color_map(self): """ Return the :class:`geosoft.gxpy.group.Color_map` of a color symbol group. .. versionadded:: 9.3 """ itr = gxapi.GXITR.create() self._gxcsymb.get_itr(itr) cmap = geosoft.gxpy.group.Color_map(itr) cmap.title = self.name cmap.unit_of_measure = self.unit_of_measure return cmap class Aggregate_group(Group): """ Aggregate group in a view :Constructors: ======== ================================ `open()` open an existing aggregate group `new()` create a new aggregate group ======== ================================ :Properties: :name: aggregate group name :agg: :class:`gxpy.agg.Aggregate_image` instance .. versionadded:: 9.2 """ def __exit__(self, exc_type, exc_val, exc_tb): self.__del__() def __del__(self): if hasattr(self, 'agg'): self.agg = None if hasattr(self, '_close'): self._close() def __init__(self, view, group_name, mode): self.agg = None super().__init__(view, group_name, mode=mode) @classmethod def new(cls, view, agg, name=None, mode=REPLACE, clip=True): """ Create a new aggregate group in a view. :param view: `geosoft.gxpy.view.View` or `geosoft.gxpy.view.View_3d` instance :param agg: `geosoft.gxpy.agg.Aggregate` instance. :param name: group name, default is the aggregate name :param mode: REPLACE (default) or NEW, which creates a unique name if the group exists :param clip: True to clip the agregare to the view clip limits .. versionadded:: 9.2 .. versionchanged:: 9.3.1 added clip mode """ if name is None: name = agg.name agg_group = cls(view, name, mode=mode) agg_group.agg = agg view.clip = clip view.gxview.aggregate(agg.gxagg, agg_group.name) view.clip = False return agg_group @classmethod def open(cls, view, group_name): """ Open an existing aggregate group in a view. :param view: `geosoft.gxpy.view.View` or `geosoft.gxpy.view.View_3d` instance :param group_name: group name (or number) .. versionadded:: 9.2 """ agg_group = cls(view, group_name, mode=READ_ONLY) if isinstance(group_name, int): group_number = group_name else: group_number = view.gxview.find_group(agg_group.name) agg_group.agg = gxagg.Aggregate_image.open(view.gxview.get_aggregate(group_number)) return agg_group class VoxDisplayGroup(Group): """ Vox display group in a view. Use class methods `new()` and `open()` to create instances of `VoxDisplayGroup`. :Constructors: ======== ================================== `open()` open an existing vox_display group `new()` create a new vox_display group ======== ================================== .. versionadded:: 9.3.1 """ def __exit__(self, exc_type, exc_val, exc_tb): self.__del__() def __del__(self): if hasattr(self, '_voxd'): self._voxd = None if hasattr(self, '_close'): self._close() def __init__(self, view3d, group_name, mode=REPLACE): self._voxd = None if not view3d.is_3d: raise GroupException(_t('View must be 3d')) super().__init__(view3d, group_name, mode=mode) @classmethod def new(cls, view3d, voxd, name=None, mode=REPLACE): """ Add a VoxDisplay as a new group in the view :param view3d: `geosoft.gxpy.view.View_3d` instance :param voxd: `geosoft.gxpy.vox_display.VoxDisplay` instance :param name: group name, default is the voxd name :param mode: REPLACE (default) or NEW, which creates a unique name if the group exists .. versionadded:: 9.3.1 """ if name is None: name = voxd.name voxd_group = cls(view3d, name, mode=mode) ext = voxd.vox.extent if voxd.is_vector: scale, height_base_ratio, max_base_size_ratio, max_cones = voxd.vector_cone_specs if max_cones is None: max_cones = gxapi.iDUMMY minimum_value = voxd.shell_limits[0] if minimum_value is None: minimum_value = 0. view3d.gxview.draw_vector_voxel_vectors(voxd.vox.gxvox, name, voxd.color_map.gxitr, scale, height_base_ratio, max_base_size_ratio, minimum_value, max_cones) # add to extent to make room for vectors cell2 = min(min(voxd.vox.cells_x), min(voxd.vox.cells_y), min(voxd.vox.cells_z)) * 4. ext = gxgm.Point2((ext.p0 - cell2, ext.p1 + cell2)) else: view3d.gxview.voxd(voxd.gxvoxd, voxd_group.name) view3d.add_extent(ext) voxd_group._voxd = voxd voxd_group.unit_of_measure = voxd.unit_of_measure return voxd_group @classmethod def open(cls, view, group_name): """ Open an existing `VoxDisplayGroup` in a 3d view. :param view: the 3d view :param group_name: the name of the group to open, must be a `gxapi.GXVOXD` or `gxapi.GXVECTOR3D`. .. versionadded: 9.3.1 """ voxd_group = cls(view, group_name, mode=READ_ONLY) if view.gxview.is_group(group_name, gxapi.MVIEW_IS_VOXD): voxd_group._voxd = gxvoxd.VoxDisplay.gxapi_gxvoxd(voxd_group.view.gxview.get_voxd(voxd_group.number)) elif view.gxview.is_group(group_name, gxapi.MVIEW_IS_VECTOR3D): voxd_group._voxd = gxvoxd.VoxDisplay.gxapi_gxvoxd(voxd_group.view.gxview.get_vector_3d(voxd_group.number), name=group_name + ".geosoft_vectorvoxel") else: raise GroupException('Group "{}" is not a GXVOXD or a GXVECTOR3D'.format(group_name)) return voxd_group @property def voxd(self): """ The `geosoft.gxpy.vox_display.VoxDisplay` for this vox group. .. versionadded:: 9.3.1 """ return self._voxd class Color_map: """ Color map for establishing data color mapping for things like aggregates and color symbols. :param cmap: the name of a Geosoft color map file (`.tbl, .zon, .itr, .agg`) from which to establish the initial colors. If the file does not have zone values, which is the case for a `.tbl` file, the Color_map will be uninitialized and you can use one of the `set` methods to establish zone values. You can also provide an `int`, which will create an uninitialized map of the the specified length, or a :class:`geosoft.gxapi.GXITR` instance. If not specified the Geosoft default color table is used. :param title: Color map title which is displayed in the color map legend. :param unit_of_measure: Unit of measure to be displayed in a color map legend. .. versionadded:: 9.2 .. versionchanged:: 9.3 changed `units` to `unit_of_measure` for consistency across gxpy """ def __init__(self, cmap=None, title=None, unit_of_measure=None): if cmap is None: sr = gxapi.str_ref() if gxapi.GXSYS.global_('MONTAJ.DEFAULT_COLOUR', sr) == 0: cmap = sr.value if not cmap: cmap = 'colour' if isinstance(cmap, str): if cmap == 'color': cmap = 'colour' base, ext = os.path.splitext(cmap) if not ext: cmap = cmap + '.tbl' self.file_name = cmap self.gxitr = gxapi.GXITR.create_file(cmap) elif isinstance(cmap, int): self.gxitr = gxapi.GXITR.create() self.gxitr.set_size(cmap) for i in range(cmap): self.__setitem__(i, (gxapi.rMAX, C_BLACK)) self.file_name = None elif isinstance(cmap, gxapi.GXITR): self.gxitr = cmap else: raise ValueError('Cannot make a color map from: {}'.format(cmap)) self._next = 0 self._title = title self._units = unit_of_measure def __iter__(self): return self def __next__(self): if self._next >= self.length: self._next = 0 raise StopIteration else: self._next += 1 return self.__getitem__(self._next - 1) def __getitem__(self, item): if item < 0 or item >= self.length: raise IndexError ir = gxapi.int_ref() self.gxitr.get_zone_color(item, ir) color = Color(ir.value) if item < self.length - 1: v = self.gxitr.get_zone_value(item) else: v = None return v, color def __setitem__(self, item, setting): if item < 0 or item >= self.length: raise IndexError if not isinstance(setting[1], int): setting = (setting[0], setting[1].int_value) self.gxitr.set_zone_color(item, setting[1]) if item < self.length - 1: self.gxitr.set_zone_value(item, setting[0]) def __eq__(self, other): if self.length != other.length: return False for i in range(self.length): if self[i] != other[i]: return False return True @property def title(self): """ Title, usually the name of the data from which the color bar was made or is intended. None if no title .. versionadded:: 9.2 """ return self._title @title.setter def title(self, title): if title: self._title = str(title) else: self._title = None @property def unit_of_measure(self): """ Data unit of measure for the data from which the color bar was made or is intended. None if the unit of measure is unknown. .. versionadded:: 9.2 """ return self._units @unit_of_measure.setter def unit_of_measure(self, units): if units: self._units = str(units) else: self._units = None @property def data_limits(self): """ Data limits of color map The limit values are for information only. Applications will assume that these values represent the largest and smallest values in a population represented by the ITR. If they are dummy, they have not been set. :returns: min/max tuple .. versionadded:: 9.4 """ min = gxapi.float_ref() max = gxapi.float_ref() self.gxitr.get_data_limits(min, max) return (min.value, max.value) @data_limits.setter def data_limits(self, limits): self.gxitr.get_data_limits(limits[0], limits[1]) @property def length(self): """ Number of color zones in the map. """ return self.gxitr.get_size() @property def brightness(self): """ Brightness is a value between -1 (black) and +1 (white), The default is 0. :returns: brightness, -1 to +1 .. versionadded:: 9.2 """ return self.gxitr.get_brightness() @property def color_map(self): """list of zone limts, colours in the color map""" return [vc for vc in self] @property def color_map_rgb(self): """list of zone limits and (red, green, blue) colours""" return [(vc[0], vc[1].rgb) for vc in self] @brightness.setter def brightness(self, value): """Map brightness between -1 (black ) and +1 (white. Can be set.""" self.gxitr.change_brightness(value) @property def model_type(self): """Geosoft colour model used in the Geosoft :class:`geosoft.gxapi.GXITR`""" return self.gxitr.get_zone_model_type() @property def initialized(self): """ Returns True if the color_map has been initialized to have zone boundaries. .. versionadded:: 9.2 """ return self.length > 0 and self[0][0] != gxapi.rMAX def set_sequential(self, start=0, increment=1): """ Set color map zones based on a start and increment between each color zone. :param start: minimum zone boundary, values <= this value will have the first color :param increment: increment between each color. .. versionadded:: 9.2 """ if increment <= 0: raise ValueError(_t('increment must be > 0.')) for i in range(self.length - 1): self.gxitr.set_zone_value(i, start + i * increment) def set_linear(self, minimum, maximum, inner_limits=True, contour_interval=None): """ Set the map boundaries based on a linear distribution between minimum and maximum. :param minimum: minimum :param maximum: maximum :param inner_limits: True if the range specifies the inner limits of the color mappings, in which case values less than or equal to the minimum are mapped to the first color and colors greater than the maximum are mapped to the last color. If False, the minimum and maximum are at the outer-edges of the color map. :param contour_interval: align color edges on this interval, which is useful for matching colors contour map, for example. The color map will be reduced in size by thinning of unneeded colors if necessary. .. versionadded:: 9.2 """ if inner_limits: if self.length < 3: raise GroupException(_t("Colour map must have length >= 3 for inner edge linear range.")) delta = (maximum - minimum) / (self.length - 2) minimum -= delta maximum += delta self.gxitr.linear(minimum, maximum, gxapi.rDUMMY if contour_interval is None else contour_interval) def set_logarithmic(self, minimum, maximum, contour_interval=None): """ Set the color boundaries based on a logarithmic distribution between minimum and maximum. :param minimum: minimum, must be > 0 :param maximum: maximum :param contour_interval: align color edges on this interval, 10 for powers of 10. unneeded colors if necessary. .. versionadded:: 9.2 """ self.gxitr.log_linear(minimum, maximum, gxapi.rDUMMY if contour_interval is None else contour_interval) def set_normal(self, standard_deviation, mean, expansion=1.0, contour_interval=None): """ Set the color boundaries using a normal distribution around a mean. :param standard_deviation: the standard deviation of the normal distribution. :param mean: maximum :param expansion: expand by this factor around the mean :param contour_interval: align color edges on this interval, 10 for powers of 10. unneeded colors if necessary. .. versionadded:: 9.2 """ self.gxitr.normal(standard_deviation, mean, expansion, gxapi.rDUMMY if contour_interval is None else contour_interval) def color_of_value(self, value): """ Return the gxg.Color of a value. The mapping is determined with exclusive minima, inclusive maxima for each color level. Values <= level [0] are assigned the [0] color, and values greater than the the [n-2] level are assigned the [n-1] color. :param value: data value :returns: :class:`Color` instance .. versionadded:: 9.2 """ return Color(self.gxitr.color_value(value)) def save_file(self, file_name=None): """ Save to a Geosoft file, `.tbl`, `.itr` or `.zon`. If the file_name does not have an extension and the color_map has not been initialized a `.tbl` file is created (colors only), otherwise a `.itr` is created, which contains both zone boundaries and colors. :param file_name: file name, if None a temporary file is created This is useful for gxapi methods that require a colour map to be loaded from a file. Say you cave a Color_map instance named `cmap` and you want to create a GXCSYMB instance, which requires a colur map file: .. code:: cs = gxapi.GXCSYMB.create(cmap.save_file()) .. versionadded:: 9.2 """ if file_name is None: file_name = gx.gx().temp_file() fn, ext = os.path.splitext(file_name) if not ext: if self.initialized: file_name = fn + '.itr' else: file_name = fn + '.tbl' self.gxitr.save_file(file_name) return file_name <file_sep>/examples/extra_tests/test_stress_size.py import numpy as np import gc import unittest import geosoft import geosoft.gxapi as gxapi import geosoft.gxpy.gx as gx import geosoft.gxpy.va as gxva import geosoft.gxpy.vv as gxvv import geosoft.gxpy.gdb as gxdb class Test(unittest.TestCase): @classmethod def setUpClass(cls): cls.gx = gx.GXpy(log=print) def start(self): self._func = self.id().split('.')[-1] gx.gx().log('\n' + self._func) def test_vv(self): self.start() max = gxapi.iMAX // 16 npdata = np.empty(max) with gxvv.GXvv(npdata) as vv: self.assertTrue(vv.length, max) del npdata gc.collect() npdata = np.empty(gxapi.iMAX + 1) self.assertRaises(gxvv.VVException, gxvv.GXvv, npdata) del npdata gc.collect() def test_va(self): self.start() max = gxapi.iMAX // 16 print('max', max) npdata = np.empty(max * 2).reshape((max, 2)) with gxva.GXva(npdata) as va: self.assertTrue(va.length, gxapi.iMAX) del npdata gc.collect() npdata = np.empty((gxapi.iMAX + 1) * 2).reshape(((gxapi.iMAX + 1), 2)) self.assertRaises(gxva.VAException, gxva.GXva, npdata) del npdata gc.collect() def test_gdb(self): self.start() name = None pagesize = 4096 try: max_index = 65534 * pagesize // 8 print('maximum index', max_index) with gxdb.Geosoft_gdb.new('new', overwrite=True, comp=gxdb.COMP_NONE, page_size=pagesize) as gdb: name = gdb.file_name line = gdb.new_line('test') npd = np.zeros(max_index) npd_size = np.size(npd) gdb.write_line(line, npd, ['xx']) del npd npd2, ch, fid = gdb.read_line(line) self.assertEqual(len(ch), 1) self.assertEqual(np.size(npd2), npd_size) del npd2 finally: gxdb.delete_files(name) ############################################################################################## if __name__ == '__main__': unittest.main()<file_sep>/geosoft/gxapi/GXMESH.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXMESH(gxapi_cy.WrapMESH): """ GXMESH class. High Performance Surface API. """ def __init__(self, handle=0): super(GXMESH, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXMESH <geosoft.gxapi.GXMESH>` :returns: A null `GXMESH <geosoft.gxapi.GXMESH>` :rtype: GXMESH """ return GXMESH() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, name): """ Creates a new Mesh :param name: Mesh Name :type name: str :returns: `GXMESH <geosoft.gxapi.GXMESH>` handle, terminates if creation fails :rtype: GXMESH .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMESH._create(GXContext._get_tls_geo(), name.encode()) return GXMESH(ret_val) @classmethod def open(cls, fileName, lstMeshNames): """ Opens an existing Mesh :param fileName: File Name :param lstMeshNames: `GXLST <geosoft.gxapi.GXLST>` to fill with Mesh Names :type fileName: str :type lstMeshNames: GXLST :returns: `GXMESH <geosoft.gxapi.GXMESH>` handle, terminates if creation fails :rtype: GXMESH .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMESH._open(GXContext._get_tls_geo(), fileName.encode(), lstMeshNames) return GXMESH(ret_val) def insert_patch(self, mesh_name): """ Inserts a new surface patch to the mesh specified by a unique ID :param mesh_name: Mesh Name :type mesh_name: str :returns: Patch ID of the inserted patch :rtype: int .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._insert_patch(mesh_name.encode()) return ret_val def delete_patch(self, mesh_name, patch_id): """ Deletes a patch specified by Patch ID from a mesh :param mesh_name: Mesh Name :param patch_id: Patch ID :type mesh_name: str :type patch_id: int .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._delete_patch(mesh_name.encode(), patch_id) def patch_exists(self, mesh_name, patch_id): """ Checks if a patch specified by a patch ID exists in a mesh :param mesh_name: Mesh Name :param patch_id: Patch ID :type mesh_name: str :type patch_id: int :returns: TRUE if patch exists :rtype: bool .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._patch_exists(mesh_name.encode(), patch_id) return ret_val def num_patches(self, mesh_name): """ Returns the number of patches added to the mesh :param mesh_name: Mesh Name :type mesh_name: str :returns: The number of patches added to the mesh :rtype: int .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._num_patches(mesh_name.encode()) return ret_val def add_vertex(self, mesh_name, patch_id, x, y, z): """ Adds a vertex to a patch in a mesh :param mesh_name: Mesh Name :param patch_id: Patch ID :param x: x coordinate of the vertex :param y: y coordinate of the vertex :param z: z coordinate of the vertex :type mesh_name: str :type patch_id: int :type x: float :type y: float :type z: float :returns: Returns the vertex index of the added vertex :rtype: int .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._add_vertex(mesh_name.encode(), patch_id, x, y, z) return ret_val def num_vertices(self, mesh_name, patch_id): """ Number of vertices in a patch in mesh :param mesh_name: Mesh Name :param patch_id: Patch ID :type mesh_name: str :type patch_id: int :returns: Returns the number of vertices in a patch :rtype: int .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._num_vertices(mesh_name.encode(), patch_id) return ret_val def add_face(self, mesh_name, patch_id, v0, v1, v2): """ Adds a face to a patch in a mesh :param mesh_name: Mesh Name :param patch_id: Patch ID :param v0: Vertex index 0 for the face :param v1: Vertex index 1 for the face :param v2: Vertex index 2 for the face :type mesh_name: str :type patch_id: int :type v0: int :type v1: int :type v2: int :returns: Returns the face index of the added face :rtype: int .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._add_face(mesh_name.encode(), patch_id, v0, v1, v2) return ret_val def num_faces(self, mesh_name, patch_id): """ Number of faces in a patch in mesh :param mesh_name: Mesh Name :param patch_id: Patch ID :type mesh_name: str :type patch_id: int :returns: Returns the number of faces in a patch :rtype: int .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._num_faces(mesh_name.encode(), patch_id) return ret_val def get_vertex_point(self, mesh_name, patch_id, vertex_index, x_coordinate, y_coordinate, z_coordinate): """ Number of faces in a patch in mesh :param mesh_name: Mesh Name :param patch_id: Patch ID :param vertex_index: Vertex Index :param x_coordinate: X coordinate :param y_coordinate: Y coordinate :param z_coordinate: Z coordinate :type mesh_name: str :type patch_id: int :type vertex_index: int :type x_coordinate: float_ref :type y_coordinate: float_ref :type z_coordinate: float_ref :returns: Returns the number of faces in a patch :rtype: int .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, x_coordinate.value, y_coordinate.value, z_coordinate.value = self._get_vertex_point(mesh_name.encode(), patch_id, vertex_index, x_coordinate.value, y_coordinate.value, z_coordinate.value) return ret_val def get_vertices(self, mesh_name, patch_id, vert_v_vx, vert_v_vy, vert_v_vz): """ Returns all the vertices in a patch :param mesh_name: Mesh Name :param patch_id: Patch ID :param vert_v_vx: Vertices X :param vert_v_vy: Vertices Y :param vert_v_vz: Vertices Z :type mesh_name: str :type patch_id: int :type vert_v_vx: GXVV :type vert_v_vy: GXVV :type vert_v_vz: GXVV .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_vertices(mesh_name.encode(), patch_id, vert_v_vx, vert_v_vy, vert_v_vz) def get_faces(self, mesh_name, patch_id, face_v_1, face_v_2, face_v_3): """ Returns all the faces comprising of vertex indices in a patch :param mesh_name: Mesh Name :param patch_id: Patch ID :param face_v_1: Face vertex 1 :param face_v_2: Face vertex 2 :param face_v_3: Face vertex 3 :type mesh_name: str :type patch_id: int :type face_v_1: GXVV :type face_v_2: GXVV :type face_v_3: GXVV .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_faces(mesh_name.encode(), patch_id, face_v_1, face_v_2, face_v_3) def insert_attributes(self, mesh_name, attribute_name, data_type, attribute_type): """ Inserts an attribute set to a mesh :param mesh_name: Mesh Name :param attribute_name: Attribute Name :param data_type: :ref:`ATTRIBUTE_DATA_TYPE` :param attribute_type: :ref:`ATTRIBUTE_TYPE` :type mesh_name: str :type attribute_name: str :type data_type: int :type attribute_type: int .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._insert_attributes(mesh_name.encode(), attribute_name.encode(), data_type, attribute_type) def set_attribute_values(self, mesh_name, attribute_name, data_type, attribute_type, patch_id, vv): """ Inserts an attribute set to a mesh :param mesh_name: Mesh Name :param attribute_name: Attribute Name :param data_type: :ref:`ATTRIBUTE_DATA_TYPE` :param attribute_type: :ref:`ATTRIBUTE_TYPE` :param patch_id: Patch ID :param vv: Attributes VV `GXVV <geosoft.gxapi.GXVV>` :type mesh_name: str :type attribute_name: str :type data_type: int :type attribute_type: int :type patch_id: int :type vv: GXVV .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_attribute_values(mesh_name.encode(), attribute_name.encode(), data_type, attribute_type, patch_id, vv) def get_attribute_values(self, mesh_name, attribute_name, data_type, attribute_type, patch_id, vv): """ Inserts an attribute set to a mesh :param mesh_name: Mesh Name :param attribute_name: Attribute Name :param data_type: :ref:`ATTRIBUTE_DATA_TYPE` :param attribute_type: :ref:`ATTRIBUTE_TYPE` :param patch_id: Patch ID :param vv: Attributes VV `GXVV <geosoft.gxapi.GXVV>` :type mesh_name: str :type attribute_name: str :type data_type: int :type attribute_type: int :type patch_id: int :type vv: GXVV .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_attribute_values(mesh_name.encode(), attribute_name.encode(), data_type, attribute_type, patch_id, vv) @classmethod def import_grid_to_mesh(cls, grid_file_name, geosurface_filename, surface_name): """ Imports a Grid to a Surface. Creates a new Geosurface file for the surface :param grid_file_name: Grid File Name :param geosurface_filename: Surface File Name :param surface_name: Surface Item Name within the file :type grid_file_name: str :type geosurface_filename: str :type surface_name: str :returns: `GXMESH <geosoft.gxapi.GXMESH>` handle, terminates if creation fails :rtype: GXMESH .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMESH._import_grid_to_mesh(GXContext._get_tls_geo(), grid_file_name.encode(), geosurface_filename.encode(), surface_name.encode()) return GXMESH(ret_val) def save(self): """ Saves Mesh to the Project Cache and Geosurface file .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._save() ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXEXT.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXEXT(gxapi_cy.WrapEXT): """ GXEXT class. External (plug-in) image methods. """ def __init__(self, handle=0): super(GXEXT, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXEXT <geosoft.gxapi.GXEXT>` :returns: A null `GXEXT <geosoft.gxapi.GXEXT>` :rtype: GXEXT """ return GXEXT() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def get_info(cls, img, xmin, ymin, xmax, ymax, ipj): """ Retrieves information about an external image format. :param img: Image Name :param xmin: X Min :param ymin: Y Min :param xmax: X Max :param ymax: Y Max :param ipj: Projection Information :type img: str :type xmin: float_ref :type ymin: float_ref :type xmax: float_ref :type ymax: float_ref :type ipj: GXIPJ .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ xmin.value, ymin.value, xmax.value, ymax.value = gxapi_cy.WrapEXT._get_info(GXContext._get_tls_geo(), img.encode(), xmin.value, ymin.value, xmax.value, ymax.value, ipj) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/tests/test_dataframe.py import unittest import os import geosoft import geosoft.gxpy as gxpy import geosoft.gxpy.gx as gx import geosoft.gxpy.system as gsys import geosoft.gxpy.dataframe as gxdf from base import GXPYTest class Test(GXPYTest): def test_df(self): self.start() self.assertEqual(gxdf.__version__, geosoft.__version__) df = gxdf.Data_frame(initial='maptmpl') #self.gxp.log(df) self.assertTrue(len(df) > 0) self.assertTrue('LAYOUT' in df.columns) self.assertTrue('UP_ANG' in df.columns) self.assertTrue('Unlimited figure' in df.index) self.assertEqual(df.loc['portrait C', 'MARGIN_LEFT'], '3') df = gxdf.Data_frame(initial='datumtrf', records="*Cape to WGS 84 (3*)") self.assertEqual(len(df), 1) self.assertEqual(df.loc['*Cape to WGS 84 (3*)', 'DX'], '-138') df = gxdf.Data_frame(initial='datumtrf', records=("*Cape to WGS 84 (3*)","Tete to WGS 84 (6)")) self.assertEqual(len(df), 2) self.assertEqual(df.loc['*Cape to WGS 84 (3*)', 'DX'], '-138') self.assertEqual(df.loc['Tete to WGS 84 (6)', 'CODE'], '6901') df = gxdf.Data_frame(initial='datumtrf', columns="DX") self.assertEqual(len(df.columns), 1) self.assertEqual(df.loc['*Cape to WGS 84 (3*)', 'DX'], '-138') df = gxdf.Data_frame(initial='datumtrf', columns=["DX", "DZ"]) self.assertEqual(len(df.columns), 2) self.assertEqual(df.loc['*Cape to WGS 84 (3*)', 'DX'], '-138') self.assertEqual(df.loc['*Cape to WGS 84 (3*)', 'DZ'], '-289') df = gxdf.Data_frame(initial='datumtrf', columns=("DX", "DZ", "CODE"), records=("*Cape to WGS 84 (3*)","Tete to WGS 84 (6)")) self.assertEqual(len(df), 2) self.assertEqual(len(df.columns), 3) self.assertEqual(df.loc['*Cape to WGS 84 (3*)', 'DX'], '-138') self.assertEqual(df.loc['*Cape to WGS 84 (3*)', 'DZ'], '-289') self.assertEqual(df.loc['Tete to WGS 84 (6)', 'CODE'], '6901') def test_raises(self): self.start() self.assertRaises(gxdf.DfException, gxdf.Data_frame, initial='bogus') self.assertRaises(gxdf.DfException, gxdf.Data_frame, initial='datumtrf', columns="NOT_THERE") self.assertRaises(gxdf.DfException, gxdf.Data_frame, initial='datumtrf', records="NOT_THERE") self.assertRaises(gxdf.DfException, gxdf.Data_frame, initial='datumtrf', records="") def test_dict(self): self.start() m = gxdf.table_record('media', 'Unlimited') self.assertEqual(m['SIZE_X'], '300') self.assertEqual(gxdf.table_record('maptmpl', 'portrait A4')['MEDIA'], 'A4') m = gxdf.table_column('media','FULLSIZE_Y') self.assertEqual(m['letter'], '21.59') self.assertRaises(gxdf.DfException, gxdf.table_record, 'bogus', 'bogus') self.assertRaises(gxdf.DfException, gxdf.table_record, 'maptmpl', 'bogus') self.assertRaises(gxdf.DfException, gxdf.table_column, 'maptmpl', 'bogus') def test_doc_sample(self): self.start() def testraise(index, column): df.loc[index, column] with open(self.gx.temp_file()+'.csv', 'w') as f: rcname = f.name f.write('/ standard Geosoft rock codes\n') f.write('code,label,__DESCRIPTION,PATTERN,PAT_SIZE,PAT_DENSITY,PAT_THICKNESS,COLOR\n') f.write('bau,BAU,BAUXITE,100,,,,RG49B181\n') f.write('bif,BIF,"BANDED IRON FM",202,,,,R\n') f.write('cal,CAL,CALCRETE,315,,,,B\n') f.write('cbt,CBT,CARBONATITE,305,,,,R128G128B192\n') df = gxpy.dataframe.Data_frame(rcname) self.assertEqual(len(df), 4) self.assertEqual(df.loc['bif', 'DESCRIPTION'], "BANDED IRON FM") self.assertEqual(df.loc['bif'][1], "BANDED IRON FM") self.assertEqual(df.iloc[1,0], "BIF") self.assertEqual(df.loc['cal', 'PATTERN'], "315") self.assertRaises(KeyError, testraise, 'cal', 'pattern') if __name__ == '__main__': unittest.main()<file_sep>/geosoft/gxpy/project.py """ Geosoft desktop project interface, which provides access to an active and open Geosoft desktop project. :Classes: ======================== ===================== :class:`Geosoft_project` the geosoft project ======================== ===================== :Constants: :DOC_TYPE_DATABASE: :DOC_TYPE_GRID: :DOC_TYPE_MAP: :DOC_TYPE_3DV: :DOC_TYPE_VOXEL: :DOC_TYPE_VOXI: :DOC_TYPE_GMS3D: :DOC_TYPE_GMS2D: :DOC_TYPE_ALL: .. seealso:: :mod:`geosoft.gxapi.GXPROJ`, :mod:`geosoft.gxapi.GXEDB`, :mod:`geosoft.gxapi.GXEMAP` .. note:: Test example: `geosoft project tests <https://github.com/GeosoftInc/gxpy/blob/master/examples/om-extensions/test_project.py>`_ """ import os import geosoft import geosoft.gxapi as gxapi from .utility import dict_from_lst from . import vv as gxvv __version__ = geosoft.__version__ def _t(s): return s DOC_TYPE_DATABASE = "Database" DOC_TYPE_GRID = "Grid" DOC_TYPE_MAP = "Map" DOC_TYPE_3DV = "3DView" DOC_TYPE_VOXEL = "Voxel" DOC_TYPE_VOXI = "VoxelInversion" DOC_TYPE_GMS3D = "GMS3D" DOC_TYPE_GMS2D = "GMS2D" DOC_TYPE_ALL = "All" class ProjectException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.project`. .. versionadded:: 9.1 """ pass def running_script(): """ :returns: 1 if running from a script, 0 if running interactively. .. versionadded:: 9.1 """ return not gxapi.GXSYS.interactive() class Geosoft_project: """ Use this class to interact with an open Geosoft project. This singleton class is available only from an extension running from an open Geosoft project. """ def _list_open_docs(self, dtype): with gxvv.GXvv(None, 'U2048') as docvv: gxapi.GXPROJ.list_loaded_documents(docvv.gxvv, dtype) return docvv.list() def _list_project_docs(self, dtype): with gxvv.GXvv(None, 'U2048') as docvv: gxapi.GXPROJ.list_documents(docvv.gxvv, dtype) return docvv.list() def _current_doc(self, dtype): s = gxapi.str_ref() gxapi.GXPROJ.current_document_of_type(s, dtype) return s.value def __enter__(self): return self def __exit__(self, type, value, traceback): pass def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): return self.name def __init__(self): s = gxapi.str_ref() gxapi.GXPROJ.get_name(s) self.project_file = os.path.normpath(s.value) self.name = os.path.basename(self.project_file).split('.')[0] @property def gid(self): """ Geosoft ID of the user""" return(geosoft.gxpy.gx.gx().gid) @property def project_databases(self): """list of databases in the project""" return self._list_project_docs(DOC_TYPE_DATABASE) @property def project_grids(self): """list of grids in the project""" return self._list_project_docs(DOC_TYPE_GRID) @property def project_maps(self): """list of maps in the project""" return self._list_project_docs(DOC_TYPE_MAP) @property def project_3dv(self): """list of geosoft_3dv (3D views) in the project""" return self._list_project_docs(DOC_TYPE_3DV) @property def project_voxels(self): """list of voxels/voxettes in the project""" return self._list_project_docs(DOC_TYPE_VOXEL) @property def project_voxi_models(self): """list of VOXI models in the project""" return self._list_project_docs(DOC_TYPE_VOXI) @property def project_gmsys_3d(self): """list of GM-SYS 3D models in the project""" return self._list_project_docs(DOC_TYPE_GMS3D) @property def project_gmsys_2d(self): """list of GM-SYS 2D models in the project""" return self._list_project_docs(DOC_TYPE_GMS2D) @property def open_databases(self): """list of databases open as a database document""" return self._list_open_docs(DOC_TYPE_DATABASE) @property def open_grids(self): """list of grids open as a grid document""" return self._list_open_docs(DOC_TYPE_GRID) @property def open_maps(self): """list of maps open as a map document""" return self._list_open_docs(DOC_TYPE_MAP) @property def open_3dv(self): """list of geosoft_3dv (3d views) open in a 3D viewer""" return self._list_open_docs(DOC_TYPE_3DV) @property def open_voxels(self): """list of voxels/voxets open as a document""" return self._list_open_docs(DOC_TYPE_VOXEL) @property def open_voxi_models(self): """list of VOXI models open as a document""" return self._list_open_docs(DOC_TYPE_VOXI) @property def open_gmsys_3d(self): """list of GM-SYS 3D models open as a document""" return self._list_open_docs(DOC_TYPE_GMS3D) @property def open_gmsys_2d(self): """list of GM-SYS 2D models open as a document""" return self._list_open_docs(DOC_TYPE_GMS2D) @property def current_database(self): """the open database that has current (or most recent) focus""" return self._current_doc(DOC_TYPE_DATABASE) @property def current_grid(self): """the open grid that has current (or most recent) focus""" return self._current_doc(DOC_TYPE_GRID) @property def current_map(self): """the open map that has current (or most recent) focus""" return self._current_doc(DOC_TYPE_MAP) @property def current_3dv(self): """the open geosoft_3dv that has current (or most recent) focus""" return self._current_doc(DOC_TYPE_3DV) @property def current_voxel(self): """the open voxel that has current (or most recent) focus""" return self._current_doc(DOC_TYPE_VOXEL) @property def current_voxi(self): """the open VOXI model that has current (or most recent) focus""" return self._current_doc(DOC_TYPE_VOXI) @property def current_gmsys_3d(self): """the open GM-SYS 3D model that has current (or most recent) focus""" return self._current_doc(DOC_TYPE_GMS3D) @property def current_gmsys_2d(self): """the open GM-SYS 2D model that has current (or most recent) focus""" return self._current_doc(DOC_TYPE_GMS2D) @property def menus(self): """ Oasis montaj menu information: (default_menus, loaded_menus, user_menus) """ def_menus = gxapi.GXLST.create(512) loaded_menus = gxapi.GXLST.create(512) user_menus = gxapi.GXLST.create(512) gxapi.GXSYS.get_loaded_menus(def_menus, loaded_menus, user_menus) return list(dict_from_lst(def_menus).keys()), \ list(dict_from_lst(loaded_menus).keys()), \ list(dict_from_lst(user_menus).keys()) def current_db_state(self): """ Return the state of the current database. :returns: dict of the current database state, {} if there is no current database. =================== ======================================================== 'disp_chan_list' list of displayed channels 'selection' current selection as (line, channel, start_fid, end_fid) =================== ======================================================== .. versionadded:: 9.2 """ sdb = {} if self.current_database: glst = gxapi.GXLST.create(4096) edb = gxapi.GXEDB.current_no_activate() n = edb.disp_chan_lst(glst) if n > 0: sdb['disp_chan_list'] = list(dict_from_lst(glst).keys()) else: sdb['disp_chan_list'] = [] s = gxapi.str_ref() sch = gxapi.str_ref() sln = gxapi.str_ref() sfd = gxapi.str_ref() edb.get_current_selection(s, sch, sln, sfd) if sch.value == '[All]': sch.value = '*' if sln.value == '[All]': sln.value = '*' if sfd.value == '[All]': fd = ('*', '*') elif sfd.value == "[None]": fd = ('', '') else: fd = sfd.value.split(' to ') fd = (fd[0], fd[1]) sdb['selection'] = (sln.value, sch.value, fd[0], fd[1]) return sdb def current_map_state(self): """ Return the state of the current map. :returns: dict of the current map state, {} if no current map. =============== ========================================================= 'current_view' name of the current view 'display_area' (min_x, min_y, max_x, max_y) in units of the current view '3d_view_name' if a 3D view, name of the view 'point' (x, y) of the current selection point 'cursor' (x, y) of the current cursor location =============== ========================================================= .. versionadded:: 9.2 """ smap = {} if self.current_map: fx = gxapi.float_ref() fy = gxapi.float_ref() fx2 = gxapi.float_ref() fy2 = gxapi.float_ref() s = gxapi.str_ref() smap = {} emap = gxapi.GXEMAP.current_no_activate() emap.get_current_view(s) smap['current_view'] = s.value emap.get_display_area(fx, fy, fx2, fy2) smap['display_area'] = (fx.value, fy.value, fx2.value, fy2.value) if emap.is_3d_view(): emap.get_3d_view_name(s) smap['3d_view'] = s.value else: # 2D emap.get_cur_point(fx, fy) smap["point"] = (fx.value, fy.value, None) emap.get_cursor(fx, fy) smap["cursor"] = (fx.value, fy.value, None) return smap def user_message(title, message): """ Display a message to the user :param title: message box title :param message: message .. versionadded:: 9.2 """ gxapi.GXSYS.display_message(title, message) def _user_input_gx(kind): """Resolve and run the user_input GX""" gxapi.GXSYS.set_string("USER_INPUT", "TYPE", str(kind)) dir = os.path.split(__file__)[0] user_input = os.path.join(os.path.join(dir, 'user_input'), 'user_input.gx') ret = gxapi.GXSYS.run_gx(user_input) if ret == -1: gxapi.GXSYS.cancel_() return ret def pause(title='Pause...', cancel=False): """ Display a pause dialog, wait for user to press continue or cancel :param title: The pause dialog title, default is "Pause..." :param cancel: If True, show a cancel button :raises: :py:ex:GXCancel if the user cancels the dialog .. versionadded:: 9.2 """ gxapi.GXSYS.filter_parm_group("USER_INPUT", 1) try: gxapi.GXSYS.set_string("USER_INPUT", "TITLE", str(title)) if not cancel: _user_input_gx(9) else: _user_input_gx(10) finally: gxapi.GXSYS.filter_parm_group("USER_INPUT", 0) def get_user_input(title="Input required...", prompt='?', kind='string', default='', items='', filemask=''): """ Display a dialog prompt on the Geosoft Desktop and wait for user input. This method depends on `user_input.gx <https://github.com/GeosoftInc/gxpy/tree/master/geosoft/gxpy/user_input>`_ and can only be used from an extension running inside a Geosoft Desktop application. :param title: dialog box title. A description can be added as a second-line using a line-break. example: "Your title/nDescriptive help" :param prompt: prompt string to present to the user. :param kind: kind of response required: 'string', 'int', 'float', 'file', 'colour' or 'list' :param items: comma-separated string or list/tuple of items for kind='list' :param default: default value. For multifile can be a string ('|' delimiter) or list/tuple. :param filemask: File type masks, Comma delimited, or a list/tuple. Examples: '.dat', '\*.dat,\*.grd'. For multiple files if a specific type '\*\*,\*.grd' :returns: user response string, or a list of files if multi-file requested :raise: `GXCancel` if the user cancels the dialog .. versionadded:: 9.1 """ gxapi.GXSYS.filter_parm_group("USER_INPUT", 1) try: # what kind of dialog if kind == 'color': kind = 'colour' kind_list = {'string': 0, 'float': 1, 'int': 2, 'list': 3, 'colour': 4, 'file': 5, 'newfile': 6, 'oldfile': 7} kind = kind_list[kind] gxapi.GXSYS.set_string("USER_INPUT", "TITLE", str(title)) gxapi.GXSYS.set_string("USER_INPUT", "PROMPT", str(prompt)) # clean up filemask if not isinstance(filemask, str): if len(filemask) > 0: filemask = ';'.join(filemask) else: filemask = '' filemask = filemask.replace(',', ';') if filemask == '**': filemask = '**;*.*' gxapi.GXSYS.set_string("USER_INPUT", "FILEMASK", filemask) # make a list out of the items. if len(items) > 0: if type(items) is dict: items = [(k) for k in items.keys()] elif isinstance(items, str): items = items.split(',') gxapi.GXSYS.set_string("USER_INPUT", "LIST", ",".join(items)) # make sure default is in the list if default not in items: if len(items) > 0: default = items[0] # resolve default string if kind == kind_list['file']: if isinstance(default, str): default = default.replace(',', '|').replace(';', '|') else: if len(default) > 0: default = '|'.join(default) else: default = '' gxapi.GXSYS.set_string("USER_INPUT", "RESPONSE", str(default)) # show the dialog ret = _user_input_gx(kind) if ret == 0: strr = gxapi.str_ref() gxapi.GXSYS.gt_string("USER_INPUT", "RESPONSE", strr) if kind == kind_list['int']: return int(strr.value) if kind == kind_list['float']: return float(strr.value) if kind == kind_list['file'] and filemask[:2] == '**': return strr.value.split('|') return strr.value raise ProjectException(_t('GX Error ({})').format(ret)) finally: gxapi.GXSYS.filter_parm_group("USER_INPUT", 0) def add_document(doc, type=None, display=True): """ Add a document to the project. The document file can be any supported geosoft document type. :param doc: file name for the document to open :param type: one of DOC_TYPE, default will decode the type from the file extension: ================= DOC_TYPE_DATABASE DOC_TYPE_GRID DOC_TYPE_MAP DOC_TYPE_3DV DOC_TYPE_VOXEL DOC_TYPE_VOXI DOC_TYPE_GMS3D DOC_TYPE_GMS2D DOC_TYPE_ALL ================= :param display: False to prevent opening of the document, though the document will be added as a document in the project explorer. .. versionadded:: 9.3 """ if not type: ext = os.path.splitext(doc)[1].lower() if ext == '.grd' or ('(' in ext): type = DOC_TYPE_GRID elif ext == '.gdb': type = DOC_TYPE_DATABASE elif ext == '.map': type = DOC_TYPE_MAP elif ext == '.geosoft_voxel': type = DOC_TYPE_VOXEL elif ext == '.geosoft_voxi': type = DOC_TYPE_VOXI elif ext == '.geosoft_3dv': type = DOC_TYPE_3DV elif ext == '.geosoft_gmsys2d': type = DOC_TYPE_GMS2D elif ext == '.geosoft_gmsys3d': type = DOC_TYPE_GMS3D else: raise ProjectException('Cannot determine document type for file extension {}'.format(ext)) gxapi.GXPROJ.add_document(doc, type, display) def remove_document(doc): """ Remove a document from the project. The document is identified by the document name, which is either a complete file path name, with qualifiers, or the name of the document in the project storage. :param doc: document name (file and qualifiers if the document source is a file). .. versionadded:: 9.3 """ gxapi.GXPROJ.remove_document(doc)<file_sep>/docs/GXPROJ.rst .. _GXPROJ: GXPROJ class ================================== .. autoclass:: geosoft.gxapi.GXPROJ :members: .. _COMMAND_ENV: COMMAND_ENV constants ----------------------------------------------------------------------- Command environments .. autodata:: geosoft.gxapi.COMMAND_ENV_NORMAL :annotation: .. autoattribute:: geosoft.gxapi.COMMAND_ENV_NORMAL .. autodata:: geosoft.gxapi.COMMAND_ENV_IN3DVIEWER :annotation: .. autoattribute:: geosoft.gxapi.COMMAND_ENV_IN3DVIEWER .. _TOOL_TYPE: TOOL_TYPE constants ----------------------------------------------------------------------- Tool type defines .. autodata:: geosoft.gxapi.TOOL_TYPE_DEFAULT :annotation: .. autoattribute:: geosoft.gxapi.TOOL_TYPE_DEFAULT .. autodata:: geosoft.gxapi.TOOL_TYPE_AUXILIARY :annotation: .. autoattribute:: geosoft.gxapi.TOOL_TYPE_AUXILIARY .. autodata:: geosoft.gxapi.TOOL_TYPE_ALL :annotation: .. autoattribute:: geosoft.gxapi.TOOL_TYPE_ALL .. _PROJ_DISPLAY: PROJ_DISPLAY constants ----------------------------------------------------------------------- How to display an object .. autodata:: geosoft.gxapi.PROJ_DISPLAY_NO :annotation: .. autoattribute:: geosoft.gxapi.PROJ_DISPLAY_NO .. autodata:: geosoft.gxapi.PROJ_DISPLAY_YES :annotation: .. autoattribute:: geosoft.gxapi.PROJ_DISPLAY_YES .. autodata:: geosoft.gxapi.PROJ_DISPLAY_ALWAYS :annotation: .. autoattribute:: geosoft.gxapi.PROJ_DISPLAY_ALWAYS <file_sep>/geosoft/gxapi/GXEMAPTEMPLATE.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXMAPTEMPLATE import GXMAPTEMPLATE ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXEMAPTEMPLATE(gxapi_cy.WrapEMAPTEMPLATE): """ GXEMAPTEMPLATE class. The `GXEMAPTEMPLATE <geosoft.gxapi.GXEMAPTEMPLATE>` class provides access to a map template as displayed within Oasis montaj, but does not change data within the template itself. It performs functions such as setting the currently displayed area, or drawing "tracking" lines or boxes on the template (which are not part of the template itself). **Note:** To obtain access to the map template itself, it is recommended practice to begin with an `GXEMAPTEMPLATE <geosoft.gxapi.GXEMAPTEMPLATE>` object, and use the Lock function to lock the underlying template to prevent external changes. The returned `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>` object may then be safely used to make changes to the template itself. VIRTUAL `GXEMAPTEMPLATE <geosoft.gxapi.GXEMAPTEMPLATE>` SUPPORT These methods are only available when running in an external application. They allow the GX to open a map template and then create a Virtual `GXEMAPTEMPLATE <geosoft.gxapi.GXEMAPTEMPLATE>` from that map template. The GX can then call MakeCurrent and set the current `GXEMAPTEMPLATE <geosoft.gxapi.GXEMAPTEMPLATE>` so that code that follows sees this map template as the current `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>`. Supported methods on Virtual EMAPTEMPLATEs are: `current <geosoft.gxapi.GXEMAPTEMPLATE.current>` `current_no_activate <geosoft.gxapi.GXEMAPTEMPLATE.current_no_activate>` `make_current <geosoft.gxapi.GXEMAPTEMPLATE.make_current>` `have_current <geosoft.gxapi.GXEMAPTEMPLATE.have_current>` `current_if_exists <geosoft.gxapi.GXEMAPTEMPLATE.current_if_exists>` `lock <geosoft.gxapi.GXEMAPTEMPLATE.lock>` `un_lock <geosoft.gxapi.GXEMAPTEMPLATE.un_lock>` `get_name <geosoft.gxapi.GXEMAPTEMPLATE.get_name>` `loaded <geosoft.gxapi.GXEMAPTEMPLATE.loaded>` `load <geosoft.gxapi.GXEMAPTEMPLATE.load>` `load_no_activate <geosoft.gxapi.GXEMAPTEMPLATE.load_no_activate>` `un_load_verify <geosoft.gxapi.GXEMAPTEMPLATE.un_load_verify>` `un_load <geosoft.gxapi.GXEMAPTEMPLATE.un_load>` `create_virtual <geosoft.gxapi.GXEMAPTEMPLATE.create_virtual>` """ def __init__(self, handle=0): super(GXEMAPTEMPLATE, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXEMAPTEMPLATE <geosoft.gxapi.GXEMAPTEMPLATE>` :returns: A null `GXEMAPTEMPLATE <geosoft.gxapi.GXEMAPTEMPLATE>` :rtype: GXEMAPTEMPLATE """ return GXEMAPTEMPLATE() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Drag-and-drop methods def drag_drop_enabled(self): """ Checks if drag-and-drop is enabled for the map :rtype: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._drag_drop_enabled() return ret_val def set_drag_drop_enabled(self, enable): """ Set whether drag-and-drop is enabled for the map. :param enable: Enables/disables drag-and-drop :type enable: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_drag_drop_enabled(enable) # General @classmethod def current(cls): """ This method returns the Current Edited map template. :returns: `GXEMAPTEMPLATE <geosoft.gxapi.GXEMAPTEMPLATE>` Object :rtype: GXEMAPTEMPLATE .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEMAPTEMPLATE._current(GXContext._get_tls_geo()) return GXEMAPTEMPLATE(ret_val) @classmethod def current_no_activate(cls): """ This method returns the Current Edited map template. :returns: `GXEMAPTEMPLATE <geosoft.gxapi.GXEMAPTEMPLATE>` Object :rtype: GXEMAPTEMPLATE .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This function acts just like `current <geosoft.gxapi.GXEMAPTEMPLATE.current>` except that the document is not activated (brought to foreground) and no guarantee is given about which document is currently active. """ ret_val = gxapi_cy.WrapEMAPTEMPLATE._current_no_activate(GXContext._get_tls_geo()) return GXEMAPTEMPLATE(ret_val) @classmethod def current_if_exists(cls): """ This method returns the Current Edited map. :returns: `GXEMAPTEMPLATE <geosoft.gxapi.GXEMAPTEMPLATE>` Object to current edited map. If there is no current map, the user is not prompted for a map, and 0 is returned. :rtype: GXEMAPTEMPLATE .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEMAPTEMPLATE._current_if_exists(GXContext._get_tls_geo()) return GXEMAPTEMPLATE(ret_val) @classmethod def get_map_templates_lst(cls, lst, path): """ Load the file names of open maps into a `GXLST <geosoft.gxapi.GXLST>`. :param lst: `GXLST <geosoft.gxapi.GXLST>` to load :param path: :ref:`EMAPTEMPLATE_PATH` :type lst: GXLST :type path: int :returns: The number of documents loaded into the `GXLST <geosoft.gxapi.GXLST>`. The `GXLST <geosoft.gxapi.GXLST>` is cleared first. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEMAPTEMPLATE._get_map_templates_lst(GXContext._get_tls_geo(), lst, path) return ret_val def get_name(self, name): """ Get the name of the map object of this `GXEMAPTEMPLATE <geosoft.gxapi.GXEMAPTEMPLATE>`. :param name: Name returned :type name: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ name.value = self._get_name(name.value.encode()) @classmethod def have_current(cls): """ This method returns whether a current map is loaded :returns: 0 - no current map. 1 - current map :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEMAPTEMPLATE._have_current(GXContext._get_tls_geo()) return ret_val @classmethod def i_get_specified_map_name(cls, field, value, name): """ Find a loaded map that has a setting in its reg. :param field: `GXREG <geosoft.gxapi.GXREG>` field name :param value: `GXREG <geosoft.gxapi.GXREG>` field value to find :param name: buffer for map name :type field: str :type value: str :type name: str_ref :returns: 0 - Ok 1 - No Map Found :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val, name.value = gxapi_cy.WrapEMAPTEMPLATE._i_get_specified_map_name(GXContext._get_tls_geo(), field.encode(), value.encode(), name.value.encode()) return ret_val def is_locked(self): """ Is this MapTemplate locked :rtype: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._is_locked() return ret_val @classmethod def loaded(cls, name): """ Returns 1 if a map is loaded . :param name: map name :type name: str :returns: 1 if map is loaded, 0 otherwise. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEMAPTEMPLATE._loaded(GXContext._get_tls_geo(), name.encode()) return ret_val def get_window_position(self, left, top, right, bottom, state, is_floating): """ Get the map window's position and dock state :param left: Window left position :param top: Window top position :param right: Window right position :param bottom: Window bottom position :param state: Window state :ref:`EMAPTEMPLATE_WINDOW_STATE` :param is_floating: Docked or floating :ref:`EMAPTEMPLATE_WINDOW_POSITION` :type left: int_ref :type top: int_ref :type right: int_ref :type bottom: int_ref :type state: int_ref :type is_floating: int_ref .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ left.value, top.value, right.value, bottom.value, state.value, is_floating.value = self._get_window_position(left.value, top.value, right.value, bottom.value, state.value, is_floating.value) def set_window_position(self, left, top, right, bottom, state, is_floating): """ Get the map window's position and dock state :param left: Window left position :param top: Window top position :param right: Window right position :param bottom: Window bottom position :param state: Window state :ref:`EMAPTEMPLATE_WINDOW_STATE` :param is_floating: Docked or floating :ref:`EMAPTEMPLATE_WINDOW_POSITION` :type left: int :type top: int :type right: int :type bottom: int :type state: int :type is_floating: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_window_position(left, top, right, bottom, state, is_floating) def read_only(self): """ Checks if a map is currently opened in a read-only mode. :rtype: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._read_only() return ret_val @classmethod def load(cls, name): """ Loads maps into the editor. :param name: list of maps (';' or '|' delimited) to load. :type name: str :returns: `GXEMAPTEMPLATE <geosoft.gxapi.GXEMAPTEMPLATE>` Object to edited map. :rtype: GXEMAPTEMPLATE .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The last map in the list will be the current map. Maps may already be loaded. Only the first file in the list may have a directory path. All other files in the list are assumed to be in the same directory as the first file. """ ret_val = gxapi_cy.WrapEMAPTEMPLATE._load(GXContext._get_tls_geo(), name.encode()) return GXEMAPTEMPLATE(ret_val) @classmethod def load_no_activate(cls, name): """ Loads documents into the workspace :param name: List of documents (';' or '|' delimited) to load. :type name: str :returns: Handle to current edited document, which will be the last database in the list if multiple files were provided. :rtype: GXEMAPTEMPLATE .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This function acts just like `load <geosoft.gxapi.GXEMAPTEMPLATE.load>` except that the document(s) is not activated (brought to foreground) and no guarantee is given about which document is currently active. """ ret_val = gxapi_cy.WrapEMAPTEMPLATE._load_no_activate(GXContext._get_tls_geo(), name.encode()) return GXEMAPTEMPLATE(ret_val) def lock(self): """ This method locks the Edited map. :returns: `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>` Object to map associated with edited map. :rtype: GXMAPTEMPLATE .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._lock() return GXMAPTEMPLATE(ret_val) def make_current(self): """ Makes this `GXEMAPTEMPLATE <geosoft.gxapi.GXEMAPTEMPLATE>` object the current active object to the user. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._make_current() @classmethod def un_load(cls, name): """ Unloads a map template. :param name: Name of the map to unload :type name: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the map template is not loaded, nothing happens. Same as `un_load_verify <geosoft.gxapi.GXEMAPTEMPLATE.un_load_verify>` with FALSE to prompt save. """ gxapi_cy.WrapEMAPTEMPLATE._un_load(GXContext._get_tls_geo(), name.encode()) @classmethod def un_load_all(cls): """ Unloads all opened maps .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapEMAPTEMPLATE._un_load_all(GXContext._get_tls_geo()) @classmethod def un_load_verify(cls, name, prompt): """ Unloads an edited map, optional prompt to save. :param name: Name of map to unload :param prompt: prompt :type name: str :type prompt: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the map is not loaded, nothing happens. If "FALSE", map is saved without a prompt. """ gxapi_cy.WrapEMAPTEMPLATE._un_load_verify(GXContext._get_tls_geo(), name.encode(), prompt) def un_lock(self): """ This method unlocks the Edited map. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._un_lock() # Input def get_box(self, state, min_x, min_y, max_x, max_y): """ Returns the coordinates of a user selected box. :param state: user prompt string :param min_x: X minimum in current view user units. :param min_y: Y minimum in current view user units. :param max_x: X maximum in current view user units. :param max_y: Y maximum in current view user units. :type state: str :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref :returns: 0 if point returned. 1 if user cancelled. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The coordinates are returned in the current template units (See GetUnits and SetUnits in `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>`) """ ret_val, min_x.value, min_y.value, max_x.value, max_y.value = self._get_box(state.encode(), min_x.value, min_y.value, max_x.value, max_y.value) return ret_val def get_line(self, str_val, min_x, min_y, max_x, max_y): """ Returns the end points of a line. :param str_val: user prompt string :param min_x: X1 in view user units :param min_y: Y1 :param max_x: X2 :param max_y: Y2 :type str_val: str :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref :returns: 0 if line returned. 1 if user cancelled. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The coordinates are returned in the current template units (See GetUnits and SetUnits in `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>`) """ ret_val, min_x.value, min_y.value, max_x.value, max_y.value = self._get_line(str_val.encode(), min_x.value, min_y.value, max_x.value, max_y.value) return ret_val def get_point(self, str_val, x, y): """ Returns the coordinates of a user selected point. :param str_val: user prompt string :param x: X coordinate in current view user units. :param y: Y :type str_val: str :type x: float_ref :type y: float_ref :returns: 0 if point returned. 1 if user cancelled. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The coordinates are returned in the current template units (See GetUnits and SetUnits in `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>`) """ ret_val, x.value, y.value = self._get_point(str_val.encode(), x.value, y.value) return ret_val def get_rect(self, str_val, min_x, min_y, max_x, max_y): """ Returns the coordinates of a user selected box starting at a corner. :param str_val: user prompt string :param min_x: X minimum in current view user units. (defines corner) :param min_y: Y :param max_x: X maximum :param max_y: Y :type str_val: str :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref :returns: 0 if point returned. 1 if user cancelled. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The coordinates are returned in the current template units (See GetUnits and SetUnits in `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>`) """ ret_val, min_x.value, min_y.value, max_x.value, max_y.value = self._get_rect(str_val.encode(), min_x.value, min_y.value, max_x.value, max_y.value) return ret_val def track_point(self, flags, x, y): """ Get point without prompt or cursor change with tracking :param flags: :ref:`EMAPTEMPLATE_TRACK` :param x: X coordinate in current view user units. :param y: Y :type flags: int :type x: float_ref :type y: float_ref :returns: 0 if point returned. 1 if user cancelled. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val, x.value, y.value = self._track_point(flags, x.value, y.value) return ret_val # Selection Methods def get_item_selection(self, item): """ Gets info about the current selected item :param item: returned item name :type item: str_ref :returns: Returns ``True`` if the item is a view :rtype: bool .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If nothing is selected the string will be empty and the function will return ``False`` """ ret_val, item.value = self._get_item_selection(item.value.encode()) return ret_val def set_item_selection(self, item): """ Sets the current selected item :param item: item name :type item: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** An empty string will unselect everything. """ self._set_item_selection(item.encode()) # View Window def get_display_area(self, min_x, min_y, max_x, max_y): """ Get the area you are currently looking at. :param min_x: X Min returned :param min_y: Y Min returned :param max_x: X Max returned :param max_y: Y Max returned :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The coordinates are based on the current template units (See GetUnits and SetUnits in `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>`) """ min_x.value, min_y.value, max_x.value, max_y.value = self._get_display_area(min_x.value, min_y.value, max_x.value, max_y.value) def get_template_layout_props(self, snap_to_grid, snap_dist, view_grid, view_rulers, view_units, grid_red, grid_green, grid_blue): """ Get the base layout view properties. :param snap_to_grid: Snap to grid :param snap_dist: Snapping distance (always in mm) :param view_grid: View Grid :param view_rulers: View Rulers :param view_units: :ref:`LAYOUT_VIEW_UNITS` View Units :param grid_red: Grid Red Component (0-255) :param grid_green: Grid Green Component (0-255) :param grid_blue: Grid Blue Component (0-255) :type snap_to_grid: bool_ref :type snap_dist: float_ref :type view_grid: int_ref :type view_rulers: int_ref :type view_units: int_ref :type grid_red: int_ref :type grid_green: int_ref :type grid_blue: int_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This affects the display units and other related properties for the base view of a map. """ snap_to_grid.value, snap_dist.value, view_grid.value, view_rulers.value, view_units.value, grid_red.value, grid_green.value, grid_blue.value = self._get_template_layout_props(snap_to_grid.value, snap_dist.value, view_grid.value, view_rulers.value, view_units.value, grid_red.value, grid_green.value, grid_blue.value) def get_window_state(self): """ Retrieve the current state of the map window :returns: :ref:`EMAPTEMPLATE_WINDOW_STATE` :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_window_state() return ret_val def set_display_area(self, min_x, min_y, max_x, max_y): """ Set the area you wish to see. :param min_x: X Min :param min_y: Y Min :param max_x: X Max :param max_y: Y Max :type min_x: float :type min_y: float :type max_x: float :type max_y: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The coordinates are based on the current template units (See GetUnits and SetUnits in `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>`) """ self._set_display_area(min_x, min_y, max_x, max_y) def set_template_layout_props(self, snap_to_grid, snap_dist, view_grid, view_rulers, view_units, grid_red, grid_green, grid_blue): """ Set the base layout view properties. :param snap_to_grid: Snap to grid :param snap_dist: Snapping distance (always in mm) :param view_grid: View Grid :param view_rulers: View Rulers :param view_units: :ref:`LAYOUT_VIEW_UNITS` View Units :param grid_red: Grid Red Component (0-255) :param grid_green: Grid Green Component (0-255) :param grid_blue: Grid Blue Component (0-255) :type snap_to_grid: bool :type snap_dist: float :type view_grid: int :type view_rulers: int :type view_units: int :type grid_red: int :type grid_green: int :type grid_blue: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This affects the display units and other related properties for the base view of a map. """ self._set_template_layout_props(snap_to_grid, snap_dist, view_grid, view_rulers, view_units, grid_red, grid_green, grid_blue) def set_window_state(self, state): """ Changes the state of the map window :param state: :ref:`EMAPTEMPLATE_WINDOW_STATE` :type state: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_window_state(state) # Virtual @classmethod def create_virtual(cls, name): """ Makes this `GXEMAPTEMPLATE <geosoft.gxapi.GXEMAPTEMPLATE>` object the current active object to the user. :param name: Name of map to create a virtual EMAMTEMPLATE from :type name: str :returns: `GXEMAPTEMPLATE <geosoft.gxapi.GXEMAPTEMPLATE>` Object :rtype: GXEMAPTEMPLATE .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEMAPTEMPLATE._create_virtual(GXContext._get_tls_geo(), name.encode()) return GXEMAPTEMPLATE(ret_val) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXEMAP.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXE3DV import GXE3DV from .GXMAP import GXMAP ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXEMAP(gxapi_cy.WrapEMAP): """ GXEMAP class. The `GXEMAP <geosoft.gxapi.GXEMAP>` class provides access to a map as displayed within Oasis montaj, but (usually) does not change data within the map itself. It performs functions such as setting the currently displayed area, or drawing "tracking" lines or boxes on the map (which are not part of the map itself). **Note:** To obtain access to the map itself, it is recommended practice to begin with an `GXEMAP <geosoft.gxapi.GXEMAP>` object, and use the `lock <geosoft.gxapi.GXEMAP.lock>` function to lock the underlying map to prevent external changes. The returned `GXMAP <geosoft.gxapi.GXMAP>` object (see `GXMAP <geosoft.gxapi.GXMAP>`) may then be safely used to make changes to the map itself. `GXMAP <geosoft.gxapi.GXMAP>` Redraw Rules: 1. Redraws only occur at the end of the proccess (GX or SCRIPT) not during. You can safely call other GX's and the map will not redraw. If you need the map to redraw immediately use `redraw <geosoft.gxapi.GXEMAP.redraw>` instead. 2. If the final GX calls `GXSYS.cancel_ <geosoft.gxapi.GXSYS.cancel_>`, the map redraw is not done. If you need to force a redraw when the user hits cancel use the `redraw <geosoft.gxapi.GXEMAP.redraw>` function. 3. You can set the redraw flag to `EMAP_REDRAW_YES <geosoft.gxapi.EMAP_REDRAW_YES>` or `EMAP_REDRAW_NO <geosoft.gxapi.EMAP_REDRAW_NO>` at any time using `set_redraw_flag <geosoft.gxapi.GXEMAP.set_redraw_flag>`. This flag will only be looked at, when the last call to `un_lock <geosoft.gxapi.GXEMAP.un_lock>` occurs and is ignored on a `GXSYS.cancel_ <geosoft.gxapi.GXSYS.cancel_>`. 4. `redraw <geosoft.gxapi.GXEMAP.redraw>` only works if the current map is not locked. It will do nothing if the map is locked. Issue an `un_lock <geosoft.gxapi.GXEMAP.un_lock>` before using this function. VIRTUAL `GXEMAP <geosoft.gxapi.GXEMAP>` SUPPORT These methods are only available when running in an external application. They allow the GX to open a `GXMAP <geosoft.gxapi.GXMAP>` and then create a Virtual `GXEMAP <geosoft.gxapi.GXEMAP>` from that map. The GX can then call `make_current <geosoft.gxapi.GXEMAP.make_current>` and set the current `GXEMAP <geosoft.gxapi.GXEMAP>` so that code that follows sees this map as the current `GXMAP <geosoft.gxapi.GXMAP>`. Supported methods on Virtual EMAPS are: | `current <geosoft.gxapi.GXEMAP.current>` | `current_no_activate <geosoft.gxapi.GXEMAP.current_no_activate>` | `make_current <geosoft.gxapi.GXEMAP.make_current>` | `have_current <geosoft.gxapi.GXEMAP.have_current>` | `current_if_exists <geosoft.gxapi.GXEMAP.current_if_exists>` | `GXMAP.current <geosoft.gxapi.GXMAP.current>` | `lock <geosoft.gxapi.GXEMAP.lock>` | `un_lock <geosoft.gxapi.GXEMAP.un_lock>` | `is_locked <geosoft.gxapi.GXEMAP.is_locked>` | `get_name <geosoft.gxapi.GXEMAP.get_name>` | `set_redraw_flag <geosoft.gxapi.GXEMAP.set_redraw_flag>` | `redraw <geosoft.gxapi.GXEMAP.redraw>` | `loaded <geosoft.gxapi.GXEMAP.loaded>` | `load <geosoft.gxapi.GXEMAP.load>` | `load_no_activate <geosoft.gxapi.GXEMAP.load_no_activate>` | `un_load_verify <geosoft.gxapi.GXEMAP.un_load_verify>` | `un_load <geosoft.gxapi.GXEMAP.un_load>` | `create_virtual <geosoft.gxapi.GXEMAP.create_virtual>` """ def __init__(self, handle=0): super(GXEMAP, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXEMAP <geosoft.gxapi.GXEMAP>` :returns: A null `GXEMAP <geosoft.gxapi.GXEMAP>` :rtype: GXEMAP """ return GXEMAP() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Drag-and-drop methods def drop_map_clip_data(self, hglobal): """ Drop Map clipboard data on this `GXEMAP <geosoft.gxapi.GXEMAP>` :param hglobal: Handle to Global Clipboard data :type hglobal: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._drop_map_clip_data(hglobal) def drag_drop_enabled(self): """ Checks if drag-and-drop enabled for the map :rtype: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._drag_drop_enabled() return ret_val def set_drag_drop_enabled(self, enable): """ Set whether drag-and-drop is enabled for the map. :param enable: Enables/disables drag-and-drop :type enable: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_drag_drop_enabled(enable) # Drawing def copy_to_clip(self): """ Copy entire map to clipboard. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Four objects are placed on the clipboard: 1. Georefernce Text 2. Bitmap of current window screen resolution 3. EMF of current window screen resolution 4. Entire map as a Geosoft View (go to view mode and hit paste). The coordinates are placed in the current view coordinates. """ self._copy_to_clip() def draw_line(self, min_x, min_y, max_x, max_y): """ Draws a line on the current map. :param min_x: X1 :param min_y: Y1 :param max_x: X2 :param max_y: Y2 :type min_x: float :type min_y: float :type max_x: float :type max_y: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Locations are in the current view user units. The line is temporary and will disappear on the next screen refresh. This function is for you to provide interactive screen feedback to your user. """ self._draw_line(min_x, min_y, max_x, max_y) def draw_rect(self, min_x, min_y, max_x, max_y): """ Draws a rect on the current map. :param min_x: X1 :param min_y: Y1 :param max_x: X2 :param max_y: Y2 :type min_x: float :type min_y: float :type max_x: float :type max_y: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Locations are in the current view user units. The line is temporary and will disappear on the next screen refresh. This function is for you to provide interactive screen feedback to your user. """ self._draw_rect(min_x, min_y, max_x, max_y) def draw_rect_3d(self, x, y, z, pix): """ Plot a square symbol on a section view. :param x: X - True X location :param y: Y - True Y location :param z: Z - True Z location :param pix: Size in pixels ("radius") :type x: float :type y: float :type z: float :type pix: int .. versionadded:: 9.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Plot a square symbol on a section view, but input 3D user coordinates The line is temporary and will disappear on the next screen refresh. This function is for you to provide interactive screen feedback to your user. """ self._draw_rect_3d(x, y, z, pix) def draw_ply(self, polygon): """ Draws a polygon on the current map. :param polygon: `GXPLY <geosoft.gxapi.GXPLY>` Object :type polygon: GXPLY .. versionadded:: 9.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Locations are in the current view user units. The polygon is temporary and will disappear on the next screen refresh. This function is for you to provide interactive screen feedback to your user. """ self._draw_ply(polygon) def get_display_area(self, min_x, min_y, max_x, max_y): """ Get the area you are currently looking at. :param min_x: X Min returned :param min_y: Y Min returned :param max_x: X Max returned :param max_y: Y Max returned :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Coordinates are based on the current view units. For 3D views this will return the full map extents. """ min_x.value, min_y.value, max_x.value, max_y.value = self._get_display_area(min_x.value, min_y.value, max_x.value, max_y.value) def get_display_area_raw(self, min_x, min_y, max_x, max_y): """ Get the area you are currently looking at in raw map units :param min_x: X Min returned :param min_y: Y Min returned :param max_x: X Max returned :param max_y: Y Max returned :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Coordinates are in millimeters. For 3D views this will return the full map extents. """ min_x.value, min_y.value, max_x.value, max_y.value = self._get_display_area_raw(min_x.value, min_y.value, max_x.value, max_y.value) def get_map_layout_props(self, snap_to_grid, snap_dist, view_grid, view_rulers, view_units, grid_red, grid_green, grid_blue): """ Get the base layout view properties. :param snap_to_grid: Snap to grid :param snap_dist: Snapping distance (always in mm) :param view_grid: View Grid :param view_rulers: View Rulers :param view_units: :ref:`LAYOUT_VIEW_UNITS` View Units :param grid_red: Grid Red Component (0-255) :param grid_green: Grid Green Component (0-255) :param grid_blue: Grid Blue Component (0-255) :type snap_to_grid: bool_ref :type snap_dist: float_ref :type view_grid: int_ref :type view_rulers: int_ref :type view_units: int_ref :type grid_red: int_ref :type grid_green: int_ref :type grid_blue: int_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This affects the display units and other related properties for the base view of a map. """ snap_to_grid.value, snap_dist.value, view_grid.value, view_rulers.value, view_units.value, grid_red.value, grid_green.value, grid_blue.value = self._get_map_layout_props(snap_to_grid.value, snap_dist.value, view_grid.value, view_rulers.value, view_units.value, grid_red.value, grid_green.value, grid_blue.value) def get_map_snap(self, snap): """ Get current snapping distance in MM :param snap: Snap value in MM (returned) :type snap: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ snap.value = self._get_map_snap(snap.value) def get_window_state(self): """ Retrieve the current state of the map window :returns: :ref:`EMAP_WINDOW_STATE` :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_window_state() return ret_val def set_display_area(self, min_x, min_y, max_x, max_y): """ Set the area you wish to see. :param min_x: X Min :param min_y: Y Min :param max_x: X Max :param max_y: Y Max :type min_x: float :type min_y: float :type max_x: float :type max_y: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Coordinates are based on the current view user units. The map is immediatly redrawn. """ self._set_display_area(min_x, min_y, max_x, max_y) def set_map_layout_props(self, snap_to_grid, snap_dist, view_grid, view_rulers, view_units, grid_red, grid_green, grid_blue): """ Set the base layout view properties. :param snap_to_grid: Snap to grid :param snap_dist: Snapping distance (always in mm) :param view_grid: View Grid :param view_rulers: View Rulers :param view_units: :ref:`LAYOUT_VIEW_UNITS` View Units :param grid_red: Grid Red Component (0-255) :param grid_green: Grid Green Component (0-255) :param grid_blue: Grid Blue Component (0-255) :type snap_to_grid: bool :type snap_dist: float :type view_grid: int :type view_rulers: int :type view_units: int :type grid_red: int :type grid_green: int :type grid_blue: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This affects the display units and other related properties for the base view of a map. """ self._set_map_layout_props(snap_to_grid, snap_dist, view_grid, view_rulers, view_units, grid_red, grid_green, grid_blue) def set_map_snap(self, snap): """ Set current snapping distance in MM :param snap: Snap value in MM :type snap: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_map_snap(snap) def set_window_state(self, state): """ Changes the state of the map window :param state: :ref:`EMAP_WINDOW_STATE` :type state: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_window_state(state) # General def packed_files(self): """ The number of packed files in the map. :returns: The number of packed files in map. :rtype: int .. versionadded:: 9.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._packed_files() return ret_val def activate_group(self, view_group): """ Activates a group and associated tools. :param view_group: "View/Group" :type view_group: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Activating a group basically enters the edit mode associated with the type of group. E.g. a vector group will enable the edit toolbar for that gorup and an `GXAGG <geosoft.gxapi.GXAGG>` will bring up the image color tool. Be sure to pass a combined name containing both the view name and the group separated by a "/" or "\\". """ self._activate_group(view_group.encode()) def activate_view(self, view): """ Activates a view and associated tools. :param view: "View" :type view: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._activate_view(view.encode()) @classmethod def current(cls): """ This method returns the Current Edited map. :returns: `GXEMAP <geosoft.gxapi.GXEMAP>` Object :rtype: GXEMAP .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEMAP._current(GXContext._get_tls_geo()) return GXEMAP(ret_val) @classmethod def current_no_activate(cls): """ This method returns the Current Edited map. :returns: `GXEMAP <geosoft.gxapi.GXEMAP>` Object :rtype: GXEMAP .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This function acts just like `current <geosoft.gxapi.GXEMAP.current>` except that the document is not activated (brought to foreground) and no guarantee is given about which document is currently active. """ ret_val = gxapi_cy.WrapEMAP._current_no_activate(GXContext._get_tls_geo()) return GXEMAP(ret_val) @classmethod def current_if_exists(cls): """ This method returns the Current Edited map. :returns: `GXEMAP <geosoft.gxapi.GXEMAP>` Object to current edited map. If there is no current map, the user is not prompted for a map, and 0 is returned. :rtype: GXEMAP .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEMAP._current_if_exists(GXContext._get_tls_geo()) return GXEMAP(ret_val) def destroy_view(self, unload_flag): """ Removes the view from the workspace. :param unload_flag: :ref:`EMAP_REMOVE` :type unload_flag: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Can only be run in interactive mode. After this call the `GXEMAP <geosoft.gxapi.GXEMAP>` object will become invalid. If this is the last view on the document and the document has been modified the map will be unloaded and optionally saved depending on the :ref:`EMAP_REMOVE` parameter. """ self._destroy_view(unload_flag) def font_lst(self, lst, which): """ List all Windows and geosoft fonts. :param lst: List Object :param which: :ref:`EMAP_FONT` :type lst: GXLST :type which: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** To get TT and GFN fonts, call twice with the same list and `EMAP_FONT_TT <geosoft.gxapi.EMAP_FONT_TT>`, then `EMAP_FONT_GFN <geosoft.gxapi.EMAP_FONT_GFN>`, or vice-versa to change order of listing. """ self._font_lst(lst, which) def change_current_view(self, view): """ Change the current working view. :param view: View name :type view: str :returns: 0 if view set, 1 if view does not exist. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This function operates on the current map. Unlike `set_current_view <geosoft.gxapi.GXEMAP.set_current_view>` this function's action survive the GX finishing. """ ret_val = self._change_current_view(view.encode()) return ret_val def create_group_snapshot(self, lst): """ Loads an `GXLST <geosoft.gxapi.GXLST>` with the current view/group names existing in a map. Typically used to track group changes that are about to occur. :param lst: `GXLST <geosoft.gxapi.GXLST>` object to fill :type lst: GXLST :returns: 0 if `GXLST <geosoft.gxapi.GXLST>` filled properly 1 if not :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._create_group_snapshot(lst) return ret_val def get_3d_view_name(self, name): """ Get the name of a 3D view if the current view is 3D. :param name: Name returned :type name: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ name.value = self._get_3d_view_name(name.value.encode()) def get_current_group(self, group): """ Get the current group name. :param group: Returned group name :type group: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This function operates on the current map. """ group.value = self._get_current_group(group.value.encode()) def get_current_view(self, view): """ Get the current view name. :param view: Returned view name :type view: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This function operates on the current map. """ view.value = self._get_current_view(view.value.encode()) @classmethod def get_maps_lst(cls, lst, path): """ Load the file names of open maps into a `GXLST <geosoft.gxapi.GXLST>`. :param lst: `GXLST <geosoft.gxapi.GXLST>` to load :param path: :ref:`EMAP_PATH` :type lst: GXLST :type path: int :returns: The number of documents loaded into the `GXLST <geosoft.gxapi.GXLST>`. The `GXLST <geosoft.gxapi.GXLST>` is cleared first. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEMAP._get_maps_lst(GXContext._get_tls_geo(), lst, path) return ret_val def get_name(self, name): """ Get the name of the map object of this `GXEMAP <geosoft.gxapi.GXEMAP>`. :param name: Name returned :type name: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ name.value = self._get_name(name.value.encode()) @classmethod def have_current(cls): """ This method returns whether a current map is loaded :returns: 0 - no current map. 1 - current map :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEMAP._have_current(GXContext._get_tls_geo()) return ret_val @classmethod def i_get_specified_map_name(cls, field, value, name): """ Find a loaded map that has a setting in its reg. :param field: `GXREG <geosoft.gxapi.GXREG>` field name :param value: `GXREG <geosoft.gxapi.GXREG>` field value to find :param name: Buffer for map name :type field: str :type value: str :type name: str_ref :returns: 0 - Ok 1 - No Map Found :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val, name.value = gxapi_cy.WrapEMAP._i_get_specified_map_name(GXContext._get_tls_geo(), field.encode(), value.encode(), name.value.encode()) return ret_val def is_grid(self): """ Is the map a grid map? :returns: 1 - Yes, 0 - No :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._is_grid() return ret_val @classmethod def reload_grid(cls, name): """ Reloads a grid document. :param name: Source file name :type name: str .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Use this method to reload (if loaded) a grid document if the file on disk changed. """ gxapi_cy.WrapEMAP._reload_grid(GXContext._get_tls_geo(), name.encode()) def is_3d_view(self): """ Is the current view a 3D view. :returns: 1 - Yes, 0 - No :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._is_3d_view() return ret_val def get_e_3dv(self): """ Get an `GXE3DV <geosoft.gxapi.GXE3DV>` from the `GXEMAP <geosoft.gxapi.GXEMAP>` :returns: `GXE3DV <geosoft.gxapi.GXE3DV>` object :rtype: GXE3DV .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_e_3dv() return GXE3DV(ret_val) def is_locked(self): """ Checks if map is locked :rtype: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._is_locked() return ret_val @classmethod def loaded(cls, name): """ Returns 1 if a map is loaded . :param name: Map name :type name: str :returns: 1 if map is loaded, 0 otherwise. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEMAP._loaded(GXContext._get_tls_geo(), name.encode()) return ret_val def read_only(self): """ Checks if a map is currently opened in a read-only mode. :rtype: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._read_only() return ret_val def get_window_position(self, left, top, right, bottom, state, is_floating): """ Get the map window's position and dock state :param left: Window left position :param top: Window top position :param right: Window right position :param bottom: Window bottom position :param state: Window state :ref:`EMAP_WINDOW_STATE` :param is_floating: Docked or floating :ref:`EMAP_WINDOW_POSITION` :type left: int_ref :type top: int_ref :type right: int_ref :type bottom: int_ref :type state: int_ref :type is_floating: int_ref .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ left.value, top.value, right.value, bottom.value, state.value, is_floating.value = self._get_window_position(left.value, top.value, right.value, bottom.value, state.value, is_floating.value) def set_window_position(self, left, top, right, bottom, state, is_floating): """ Get the map window's position and dock state :param left: Window left position :param top: Window top position :param right: Window right position :param bottom: Window bottom position :param state: Window state :ref:`EMAP_WINDOW_STATE` :param is_floating: Docked or floating :ref:`EMAP_WINDOW_POSITION` :type left: int :type top: int :type right: int :type bottom: int :type state: int :type is_floating: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_window_position(left, top, right, bottom, state, is_floating) def doubleize_group_snapshot(self, state): """ The `GXLST <geosoft.gxapi.GXLST>` passed in must contain View\\Group strings in the Name field only. The function will compare with a more current `GXLST <geosoft.gxapi.GXLST>` and zoom the map to the new entry. :param state: `GXLST <geosoft.gxapi.GXLST>` object used for comparison :type state: GXLST :returns: 0 if zoom proceeded ok 1 if error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Typically this function is used in conjunction with CreateSnapshot_EMAP. """ ret_val = self._doubleize_group_snapshot(state) return ret_val def set_current_view(self, view): """ Set the current working view. :param view: View name :type view: str :returns: 0 if view set, 1 if view does not exist. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This function operates on the current map. It changes the view only during the execution of the GX. As soon as the GX terminates the view will revert to the original one. """ ret_val = self._set_current_view(view.encode()) return ret_val def get_view_ipj(self, view, ipj): """ Get a view's `GXIPJ <geosoft.gxapi.GXIPJ>`. :param view: View name :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` in which to place the view `GXIPJ <geosoft.gxapi.GXIPJ>` :type view: str :type ipj: GXIPJ .. versionadded:: 9.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This function can be used to obtain a views coordinate system without having to call `lock <geosoft.gxapi.GXEMAP.lock>`. This could be an expensive operation that cause undesirable UX. """ self._get_view_ipj(view.encode(), ipj) @classmethod def load(cls, name): """ Loads maps into the editor. :param name: List of maps (';' or '|' delimited) to load. :type name: str :returns: `GXEMAP <geosoft.gxapi.GXEMAP>` Object to edited map. :rtype: GXEMAP .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The last map in the list will be the current map. Maps may already be loaded. Only the first file in the list may have a directory path. All other files in the list are assumed to be in the same directory as the first file. """ ret_val = gxapi_cy.WrapEMAP._load(GXContext._get_tls_geo(), name.encode()) return GXEMAP(ret_val) @classmethod def load_no_activate(cls, name): """ Loads documents into the workspace :param name: List of documents (';' or '|' delimited) to load. :type name: str :returns: Handle to current edited document, which will be the last database in the list if multiple files were provided. :rtype: GXEMAP .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This function acts just like `load <geosoft.gxapi.GXEMAP.load>` except that the document(s) is not activated (brought to foreground) and no guarantee is given about which document is currently active. """ ret_val = gxapi_cy.WrapEMAP._load_no_activate(GXContext._get_tls_geo(), name.encode()) return GXEMAP(ret_val) @classmethod def load_with_view(cls, name, p2): """ Load an `GXEMAP <geosoft.gxapi.GXEMAP>` with the view from a current `GXEMAP <geosoft.gxapi.GXEMAP>`. :param name: Source Map name :param p2: `GXEMAP <geosoft.gxapi.GXEMAP>` to use as the source view :type name: str :type p2: GXEMAP :returns: New `GXEMAP <geosoft.gxapi.GXEMAP>` handle. :rtype: GXEMAP .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Can only be run in interactive mode. Is used by dbsubset to create a new database with the same view as previously. """ ret_val = gxapi_cy.WrapEMAP._load_with_view(GXContext._get_tls_geo(), name.encode(), p2) return GXEMAP(ret_val) def lock(self): """ This method locks the Edited map. :returns: `GXEMAP <geosoft.gxapi.GXEMAP>` Object to map associated with edited map. :rtype: GXMAP .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The Redraw flag is set to `EMAP_REDRAW_YES <geosoft.gxapi.EMAP_REDRAW_YES>` when this functions is called. """ ret_val = self._lock() return GXMAP(ret_val) def make_current(self): """ Makes this `GXEMAP <geosoft.gxapi.GXEMAP>` object the current active object to the user. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._make_current() def print_(self, entire_map, scale_to_fit, print_to_file, all_pages, centre, copies, first_page, last_page, scale_factor, overlap_size, offset_x, offset_y, file): """ Print the current map to current printer. :param entire_map: lEntireMap (0 or 1) :param scale_to_fit: lScaleToFit 0 - use scale factor 1 - fit to media 2 - fit to roll media :param print_to_file: lPrintToFile(0 or 1) :param all_pages: lAllPages (0 or 1) :param centre: lCentre (0 or 1) :param copies: lCopies :param first_page: lFirstPage :param last_page: lLastPage :param scale_factor: dScaleFactor (2.0 doubles plot size) :param overlap_size: lOverlapSize (mm) :param offset_x: lOffsetX (mm) :param offset_y: lOffsetY (mm) :param file: szFile (if lPrintToFile==1) :type entire_map: int :type scale_to_fit: int :type print_to_file: int :type all_pages: int :type centre: int :type copies: int :type first_page: int :type last_page: int :type scale_factor: float :type overlap_size: int :type offset_x: int :type offset_y: int :type file: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._print_(entire_map, scale_to_fit, print_to_file, all_pages, centre, copies, first_page, last_page, scale_factor, overlap_size, offset_x, offset_y, file.encode()) def redraw(self): """ Redraw the map immediately. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Redraws the map immediately. Map must not be locked. """ self._redraw() def select_group(self, view_group): """ Select a group. :param view_group: "View/Group" :type view_group: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._select_group(view_group.encode()) def set_redraw_flag(self, redraw): """ Set the redraw flag. :param redraw: :ref:`EMAP_REDRAW` :type redraw: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This function is generally used to prevent redrawing of the map, which normally occurs after the last `un_lock <geosoft.gxapi.GXEMAP.un_lock>` call, in cases where it is known that no changes are being made to the map. Typical usage would be to call `lock <geosoft.gxapi.GXEMAP.lock>` followed by `set_redraw_flag <geosoft.gxapi.GXEMAP.set_redraw_flag>` (with `EMAP_REDRAW_NO <geosoft.gxapi.EMAP_REDRAW_NO>`) prior to querying information from the map. And then end with a call to `un_lock <geosoft.gxapi.GXEMAP.un_lock>`. """ self._set_redraw_flag(redraw) @classmethod def un_load(cls, name): """ Unloads a `GXMAP <geosoft.gxapi.GXMAP>`. :param name: Name of the map to unload :type name: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the `GXMAP <geosoft.gxapi.GXMAP>` is not loaded, nothing happens. Same as `un_load_verify <geosoft.gxapi.GXEMAP.un_load_verify>` with FALSE to prompt save. """ gxapi_cy.WrapEMAP._un_load(GXContext._get_tls_geo(), name.encode()) @classmethod def un_load_all(cls): """ Unloads all opened maps .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapEMAP._un_load_all(GXContext._get_tls_geo()) @classmethod def un_load_verify(cls, name, prompt): """ Unloads an edited map, optional prompt to save. :param name: Name of map to unload :param prompt: Prompt? :type name: str :type prompt: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the map is not loaded, nothing happens. If "FALSE", map is saved without a prompt. """ gxapi_cy.WrapEMAP._un_load_verify(GXContext._get_tls_geo(), name.encode(), prompt) def un_lock(self): """ This method unlocks the Edited map. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._un_lock() # Input def get_cur_point(self, x, y): """ Returns the coordinates of the currently selected point in view coordinates :param x: X coordinate in current user units. :param y: Y :type x: float_ref :type y: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ x.value, y.value = self._get_cur_point(x.value, y.value) def get_cur_point_mm(self, x, y): """ Returns the coordinates of the currently selected point in mm on map :param x: X coordinate in map mm :param y: Y :type x: float_ref :type y: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ x.value, y.value = self._get_cur_point_mm(x.value, y.value) def get_cursor(self, x, y): """ Returns the coordinates of the last known cursor location :param x: X coordinate in current view user units :param y: Y :type x: float_ref :type y: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ x.value, y.value = self._get_cursor(x.value, y.value) def get_cursor_mm(self, x, y): """ Returns the coordinates of the last known cursor location in mm on map. :param x: X coordinate in map mm :param y: Y :type x: float_ref :type y: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ x.value, y.value = self._get_cursor_mm(x.value, y.value) def digitize(self, wa, img, digits, prompt, prefix, delim, newline): """ Digitise points from the current map and place in a `GXWA <geosoft.gxapi.GXWA>`. :param wa: `GXWA <geosoft.gxapi.GXWA>` in which to write digitized points :param img: `GXIMG <geosoft.gxapi.GXIMG>` for Z value, or `IMG_NULL <geosoft.gxapi.IMG_NULL>` for no Z. :param digits: Number of significant digits to use, 0 for all. :param prompt: Command line prompt string :param prefix: New line prefix string :param delim: Delimiter :param newline: 0 for no newline 1 for automatic newline at each point :type wa: GXWA :type img: GXIMG :type digits: int :type prompt: str :type prefix: str :type delim: str :type newline: int :returns: 0 if user digitized some points. 1 if user cancelled. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The command line will start to recieve digitized points from the mouse. Whenever the left mouse button is pressed, the current view X,Y are placed on the workspace command line. If a valid `GXIMG <geosoft.gxapi.GXIMG>` is passed, the Z value is also placed on the command line. If auto-newline is specified, the line is immediately placed into `GXWA <geosoft.gxapi.GXWA>`, otherwise the user has the oportunity to enter data before pressing Enter. Locations are in the current view user units """ ret_val = self._digitize(wa, img, digits, prompt.encode(), prefix.encode(), delim.encode(), newline) return ret_val def digitize2(self, vvx, vvy, vvz, img, prompt, newline): """ Digitise points from the current map and place in VVs. :param vvx: Real X `GXVV <geosoft.gxapi.GXVV>` :param vvy: Real Y `GXVV <geosoft.gxapi.GXVV>` :param vvz: Real Z `GXVV <geosoft.gxapi.GXVV>` :param img: `GXIMG <geosoft.gxapi.GXIMG>` for Z value, or `IMG_NULL <geosoft.gxapi.IMG_NULL>` for no Z. :param prompt: Command line prompt string :param newline: 0 for no newline 1 for automatic newline at each point :type vvx: GXVV :type vvy: GXVV :type vvz: GXVV :type img: GXIMG :type prompt: str :type newline: int :returns: 0 if user digitized some points. 1 if user cancelled. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The command line will start to recieve digitized points from the mouse. Whenever the left mouse button is pressed, the current view X,Y are placed on the workspace command line. If a valid `GXIMG <geosoft.gxapi.GXIMG>` is passed, the Z value is also placed on the command line. If auto-newline is specified, the line is immediately placed into the VVs, otherwise the user has the oportunity to enter data before pressing Enter. Locations are in the current view user units """ ret_val = self._digitize2(vvx, vvy, vvz, img, prompt.encode(), newline) return ret_val def digitize_peaks(self, vvx, vvy, vvz, img, prompt, newline): """ Digitise points from the current map and place in VVs. :param vvx: Real X `GXVV <geosoft.gxapi.GXVV>` :param vvy: Real Y `GXVV <geosoft.gxapi.GXVV>` :param vvz: Real Z `GXVV <geosoft.gxapi.GXVV>` :param img: `GXIMG <geosoft.gxapi.GXIMG>` for Z value, or `IMG_NULL <geosoft.gxapi.IMG_NULL>` for no Z. :param prompt: Command line prompt string :param newline: 0 for no newline 1 for automatic newline at each point :type vvx: GXVV :type vvy: GXVV :type vvz: GXVV :type img: GXIMG :type prompt: str :type newline: int :returns: 0 if user digitized some points. 1 if user cancelled. :rtype: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Same as `digitize2 <geosoft.gxapi.GXEMAP.digitize2>`, but the closest peaks to the selected locations are returned instead of the selected location. The method chooses the highest value of the 8 surrounding points, the repeats this process until no higher value can be found in any of the 8 surrounding points. If there are two or more points with a higher value, it will just take the first one and continue, and this method will stall on flat areas as well (since no surrounding point is larger). """ ret_val = self._digitize_peaks(vvx, vvy, vvz, img, prompt.encode(), newline) return ret_val def digitize_polygon(self, vvx, vvy, vvz, img, prompt, newline, pixel_radius): """ Same as iDigitze2_EMAP, but automatically close polygons. :param vvx: Real X `GXVV <geosoft.gxapi.GXVV>` :param vvy: Real Y `GXVV <geosoft.gxapi.GXVV>` :param vvz: Real Z `GXVV <geosoft.gxapi.GXVV>` :param img: `GXIMG <geosoft.gxapi.GXIMG>` for Z value, or `IMG_NULL <geosoft.gxapi.IMG_NULL>` for no Z. :param prompt: Command line prompt string :param newline: 0 for no newline 1 for automatic newline at each point :param pixel_radius: Close the polygon if the selected location is within this radius in screen pixels. :type vvx: GXVV :type vvy: GXVV :type vvz: GXVV :type img: GXIMG :type prompt: str :type newline: int :type pixel_radius: int :returns: 0 if user digitized some points. 1 if user cancelled. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This is the same as `digitize2 <geosoft.gxapi.GXEMAP.digitize2>`, except that it automatically detects, (except for the 2nd and 3rd points) when a selected location is within the entered number of pixels from the starting point. If yes, the polygon is assumed to be closed, and the operation is the same as the RMB "done" command, and the process returns 0. """ ret_val = self._digitize_polygon(vvx, vvy, vvz, img, prompt.encode(), newline, pixel_radius) return ret_val def get_box(self, str_val, min_x, min_y, max_x, max_y): """ Returns the coordinates of a user selected box. :param str_val: User prompt string :param min_x: X minimum in current view user units. :param min_y: Y :param max_x: X maximum :param max_y: Y :type str_val: str :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref :returns: 0 if point returned. 1 if user cancelled. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val, min_x.value, min_y.value, max_x.value, max_y.value = self._get_box(str_val.encode(), min_x.value, min_y.value, max_x.value, max_y.value) return ret_val def get_box2(self, str_val, x1, y1, x2, y2, x3, y3, x4, y4): """ Returns the coordinates of a user selected box in a warped view. :param str_val: User prompt string :param x1: X1 bottom left corner :param y1: Y1 :param x2: X2 bottom right corner :param y2: Y2 :param x3: X3 top right corner :param y3: Y3 :param x4: X4 top left corner :param y4: Y4 :type str_val: str :type x1: float_ref :type y1: float_ref :type x2: float_ref :type y2: float_ref :type x3: float_ref :type y3: float_ref :type x4: float_ref :type y4: float_ref :returns: 0 if point returned. 1 if user cancelled. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the data view has a rotational (or other) warp, then the `get_box <geosoft.gxapi.GXEMAP.get_box>` function returns only opposite diagonal points in the box, not enough info to determine the other two corners. This function returns the exact coordinates of all four corners, calculated from the pixel locations. """ ret_val, x1.value, y1.value, x2.value, y2.value, x3.value, y3.value, x4.value, y4.value = self._get_box2(str_val.encode(), x1.value, y1.value, x2.value, y2.value, x3.value, y3.value, x4.value, y4.value) return ret_val def get_grid(self, str_val, nx, ny, angle, x1, y1, x_len, y_len): """ Position and size a grid on a map. :param str_val: User prompt string :param nx: Number of elements along primary axis to draw. :param ny: Number of elements along secondary axis to draw. :param angle: Angle of primary axis in degrees :param x1: Grid origin X :param y1: Grid origin Y :param x_len: Primary axis length :param y_len: Secondary axis length :type str_val: str :type nx: int :type ny: int :type angle: float_ref :type x1: float_ref :type y1: float_ref :type x_len: float_ref :type y_len: float_ref :returns: 0 if line returned. 1 if user cancelled. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the input angle is `rDUMMY <geosoft.gxapi.rDUMMY>`, an extra step is inserted for the user to define the angle by drawing a line with the mouse. The output primary axis angle will always be in the range -90 < angle <= 90. The grid origin is shifted to whichever corner necessary to make this possible, while keeping the secondary axis at 90 degrees greater than the primary ( going counter-clockwise). The coordinates are returned in the current User projection (See `GXMVIEW.get_user_ipj <geosoft.gxapi.GXMVIEW.get_user_ipj>` and `GXMVIEW.set_user_ipj <geosoft.gxapi.GXMVIEW.set_user_ipj>`.) """ ret_val, angle.value, x1.value, y1.value, x_len.value, y_len.value = self._get_grid(str_val.encode(), nx, ny, angle.value, x1.value, y1.value, x_len.value, y_len.value) return ret_val def get_line(self, str_val, min_x, min_y, max_x, max_y): """ Returns the end points of a line. :param str_val: User prompt string :param min_x: X1 in view user units :param min_y: Y1 :param max_x: X2 :param max_y: Y2 :type str_val: str :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref :returns: 0 if line returned. 1 if user cancelled. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The coordinates are returned in the current User projection (See `GXMVIEW.get_user_ipj <geosoft.gxapi.GXMVIEW.get_user_ipj>` and `GXMVIEW.set_user_ipj <geosoft.gxapi.GXMVIEW.set_user_ipj>`.) """ ret_val, min_x.value, min_y.value, max_x.value, max_y.value = self._get_line(str_val.encode(), min_x.value, min_y.value, max_x.value, max_y.value) return ret_val def get_line_ex(self, str_val, min_x, min_y, max_x, max_y): """ Returns the end points of a line. :param str_val: User prompt string :param min_x: X1 in view user units :param min_y: Y1 :param max_x: X2 :param max_y: Y2 :type str_val: str :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref :returns: 0 if line returned. 1 - Right Mouse 2 - Escape/Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The coordinates are returned in the current User projection (See `GXMVIEW.get_user_ipj <geosoft.gxapi.GXMVIEW.get_user_ipj>` and `GXMVIEW.set_user_ipj <geosoft.gxapi.GXMVIEW.set_user_ipj>`.) """ ret_val, min_x.value, min_y.value, max_x.value, max_y.value = self._get_line_ex(str_val.encode(), min_x.value, min_y.value, max_x.value, max_y.value) return ret_val def get_line_xyz(self, str_val, min_x, min_y, min_z, max_x, max_y, max_z): """ Returns the end points of a line in X,Y and Z :param str_val: User prompt string :param min_x: X1 in view user units :param min_y: Y1 :param min_z: Z1 :param max_x: X2 :param max_y: Y2 :param max_z: Z2 :type str_val: str :type min_x: float_ref :type min_y: float_ref :type min_z: float_ref :type max_x: float_ref :type max_y: float_ref :type max_z: float_ref :returns: 0 if line returned. 1 - Right Mouse 2 - Escape/Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The coordinates are returned in the current User projection (See `GXMVIEW.get_user_ipj <geosoft.gxapi.GXMVIEW.get_user_ipj>` and `GXMVIEW.set_user_ipj <geosoft.gxapi.GXMVIEW.set_user_ipj>`.) This is useful for digitizing a line in an oriented view and getting the true coordinates in (X, Y, Z) at the selected point on the view plane. """ ret_val, min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value = self._get_line_xyz(str_val.encode(), min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value) return ret_val def get_point(self, str_val, x, y): """ Returns the coordinates of a user selected point. :param str_val: User prompt string :param x: X coordinate in current view user units. :param y: Y :type str_val: str :type x: float_ref :type y: float_ref :returns: 0 if point returned. 1 if user cancelled. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This will wait for user to select a point. .. seealso:: iTrackPoint, GetCurPoint, GetCursor """ ret_val, x.value, y.value = self._get_point(str_val.encode(), x.value, y.value) return ret_val def get_point_ex(self, str_val, x, y): """ Returns the coordinates of a user selected point. :param str_val: User prompt string :param x: X coordinate in current view user units. :param y: Y :type str_val: str :type x: float_ref :type y: float_ref :returns: 0 if point returned. 1 if user used right mouse and then Done. 2 if user cancelled. 3 if capture is lost. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This will wait for user to select a point. .. seealso:: iTrackPoint, GetCurPoint, GetCursor """ ret_val, x.value, y.value = self._get_point_ex(str_val.encode(), x.value, y.value) return ret_val def get_point_3d(self, str_val, x, y, z): """ Returns the coordinates of a user selected point. :param str_val: User prompt string :param x: X coordinate in current view user units. :param y: Y :param z: Z :type str_val: str :type x: float_ref :type y: float_ref :type z: float_ref :returns: 0 if point returned. 1 if user used right mouse and then Done. 2 if user cancelled. 3 if capture is lost. :rtype: int .. versionadded:: 9.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This will wait for user to select a point. .. seealso:: iTrackPoint, GetCurPoint, GetCursor """ ret_val, x.value, y.value, z.value = self._get_point_3d(str_val.encode(), x.value, y.value, z.value) return ret_val def get_poly_line(self, str_val, vv_x, vv_y): """ Returns a polyline. :param str_val: User prompt string :param vv_x: X :param vv_y: Y :type str_val: str :type vv_x: GXVV :type vv_y: GXVV :returns: 0 if line returned. 1 if user cancelled. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The coordinates are returned in the current User projection (See `GXMVIEW.get_user_ipj <geosoft.gxapi.GXMVIEW.get_user_ipj>` and `GXMVIEW.set_user_ipj <geosoft.gxapi.GXMVIEW.set_user_ipj>`.) """ ret_val = self._get_poly_line(str_val.encode(), vv_x, vv_y) return ret_val def get_poly_line_xyz(self, str_val, vv_x, vv_y, vv_z): """ Returns a polyline. :param str_val: User prompt string :param vv_x: X :param vv_y: Y :param vv_z: Z :type str_val: str :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :returns: 0 if line returned. 1 if user cancelled. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The coordinates are returned in the current User projection (See `GXMVIEW.get_user_ipj <geosoft.gxapi.GXMVIEW.get_user_ipj>` and `GXMVIEW.set_user_ipj <geosoft.gxapi.GXMVIEW.set_user_ipj>`.) In this version of the method X, Y and Z (depth) are returned. Initially created to deal with crooked sections. """ ret_val = self._get_poly_line_xyz(str_val.encode(), vv_x, vv_y, vv_z) return ret_val def get_rect(self, str_val, min_x, min_y, max_x, max_y): """ Returns the coordinates of a user selected box starting at a corner. :param str_val: User prompt string :param min_x: X minimum in current view user units. (defines corner) :param min_y: Y :param max_x: X maximum :param max_y: Y :type str_val: str :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref :returns: 0 if point returned. 1 if user cancelled. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The coordinates are returned in the current User projection (See `GXMVIEW.get_user_ipj <geosoft.gxapi.GXMVIEW.get_user_ipj>` and `GXMVIEW.set_user_ipj <geosoft.gxapi.GXMVIEW.set_user_ipj>`.) If the user `GXIPJ <geosoft.gxapi.GXIPJ>` distorts the coordinates from being rectilinear (e.g. for a TriPlot graph), then care should be taken since the (Xmin, Ymin) and (Xmax, Ymax) values returned do not necessarily correspond to the lower-left and upper-right corners. In fact, the returned values are calculated by taking the starting (fixed) corner and the tracked (opposite) corner, and finding the min and max for X and Y among these two points. With a warped User projection, those two corner locations could easily be (Xmin, Ymax) and (Xmax, Ymin). This becomes quite important if you want to use the rectangle for a masking operation, because the "other" two corner's coordinates may need to be constructed based on a knowledge of the User projection, and may not be directly obtained from the returned X and Y min and max values. What appears to be a rectangle as seen on the map is not necessarily a rectangle in the User coordinates. """ ret_val, min_x.value, min_y.value, max_x.value, max_y.value = self._get_rect(str_val.encode(), min_x.value, min_y.value, max_x.value, max_y.value) return ret_val def track_point(self, flags, x, y): """ Get point without prompt or cursor change with tracking :param flags: :ref:`EMAP_TRACK` :param x: X coordinate in current view user units. :param y: Y :type flags: int :type x: float_ref :type y: float_ref :returns: 0 if point returned. 1 if user cancelled. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val, x.value, y.value = self._track_point(flags, x.value, y.value) return ret_val # Map Viewport Mode Methods def get_aoi_area(self, min_x, min_y, max_x, max_y): """ Get the area of interest. :param min_x: X Min returned :param min_y: Y Min returned :param max_x: X Max returned :param max_y: Y Max returned :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Coordinates are based on the current view units. """ min_x.value, min_y.value, max_x.value, max_y.value = self._get_aoi_area(min_x.value, min_y.value, max_x.value, max_y.value) def set_aoi_area(self, min_x, min_y, max_x, max_y): """ Set the area of interest. :param min_x: X Min :param min_y: Y Min :param max_x: X Max :param max_y: Y Max :type min_x: float :type min_y: float :type max_x: float :type max_y: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Coordinates are based on the current view user units. The map is immediatly redrawn. """ self._set_aoi_area(min_x, min_y, max_x, max_y) def set_viewport_mode(self, mode): """ Set the viewport mode. :param mode: :ref:`EMAP_VIEWPORT` :type mode: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This is handy for using a map to define an area of interest. Use in conjunction with Get/Set AOIArea. If this is used inside montaj it is important to set or provide for a method to set the map mode back to normal as this is not exposed in the interface. """ self._set_viewport_mode(mode) # Tracking Methods def get_selected_vertices(self, vv_x, vv_y): """ Get the verticies of selected object :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` Handle :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` Handle :type vv_x: GXVV :type vv_y: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Works only in Vertex Edit Mode """ self._get_selected_vertices(vv_x, vv_y) # Virtual @classmethod def create_virtual(cls, name): """ Makes this `GXEMAP <geosoft.gxapi.GXEMAP>` object the current active object to the user. :param name: Name of map to create a virtual `GXEMAP <geosoft.gxapi.GXEMAP>` from :type name: str :returns: `GXEMAP <geosoft.gxapi.GXEMAP>` Object :rtype: GXEMAP .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapEMAP._create_virtual(GXContext._get_tls_geo(), name.encode()) return GXEMAP(ret_val) # External Window @classmethod def load_control(cls, map_file, window): """ Version of `load <geosoft.gxapi.GXEMAP.load>` that can be used to load a database via subclassing into a Windows control. :param map_file: Map filename :param window: Window handle to receive document :type map_file: str :type window: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapEMAP._load_control(GXContext._get_tls_geo(), map_file.encode(), window) @classmethod def load_with_view_control(cls, map_file, emap, window): """ Version of `GXEDB.load_with_view <geosoft.gxapi.GXEDB.load_with_view>` that can be used to load a database via subclassing into a Windows control. :param map_file: Map filename :param emap: `GXEMAP <geosoft.gxapi.GXEMAP>` handle to use as the source view :param window: Window handle to receive document :type map_file: str :type emap: GXEMAP :type window: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapEMAP._load_with_view_control(GXContext._get_tls_geo(), map_file.encode(), emap, window) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXSURFACE.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXSURFACEITEM import GXSURFACEITEM ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXSURFACE(gxapi_cy.WrapSURFACE): """ GXSURFACE class. The `GXSURFACE <geosoft.gxapi.GXSURFACE>` class allows you to create, read and alter Geosurface files (``*.geosoft_surface``). A Geosurface file can contain one or more surface items (see `GXSURFACEITEM <geosoft.gxapi.GXSURFACEITEM>` class). In turn each item can contains one or more triangular polyhedral meshes. """ def __init__(self, handle=0): super(GXSURFACE, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXSURFACE <geosoft.gxapi.GXSURFACE>` :returns: A null `GXSURFACE <geosoft.gxapi.GXSURFACE>` :rtype: GXSURFACE """ return GXSURFACE() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, surface_file, ipj): """ Create a new Geosurface file :param surface_file: Geosurface file name :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` containing coordinate system of the Geosurface :type surface_file: str :type ipj: GXIPJ :returns: `GXSURFACE <geosoft.gxapi.GXSURFACE>` Object :rtype: GXSURFACE .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSURFACE._create(GXContext._get_tls_geo(), surface_file.encode(), ipj) return GXSURFACE(ret_val) @classmethod def open(cls, surface_file, mode): """ Open a Geosurface file :param surface_file: Geosurface file name :param mode: :ref:`SURFACE_OPEN` :type surface_file: str :type mode: int :returns: `GXSURFACE <geosoft.gxapi.GXSURFACE>` Object :rtype: GXSURFACE .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSURFACE._open(GXContext._get_tls_geo(), surface_file.encode(), mode) return GXSURFACE(ret_val) def get_ipj(self, ipj): """ Get the coordinate system of the `GXSURFACE <geosoft.gxapi.GXSURFACE>`. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` in which to place the Geosurface coordinate system :type ipj: GXIPJ .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_ipj(ipj) def set_ipj(self, ipj): """ Change the coordinate system of the `GXSURFACE <geosoft.gxapi.GXSURFACE>`. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` containing the new coordinate system of the Geosurface :type ipj: GXIPJ .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_ipj(ipj) def get_surface_items(self, lst): """ Get the surfaces items in a Geosurface file :param lst: `GXLST <geosoft.gxapi.GXLST>` to fill :type lst: GXLST .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_surface_items(lst) def get_surface_item(self, guid): """ Get the an existing surface item from the `GXSURFACE <geosoft.gxapi.GXSURFACE>` :param guid: Item GUID :type guid: str :returns: `GXSURFACEITEM <geosoft.gxapi.GXSURFACEITEM>` Object :rtype: GXSURFACEITEM .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_surface_item(guid.encode()) return GXSURFACEITEM(ret_val) def add_surface_item(self, surfaceitem): """ Add a new surface item to the `GXSURFACE <geosoft.gxapi.GXSURFACE>` :param surfaceitem: `GXSURFACEITEM <geosoft.gxapi.GXSURFACEITEM>` to add :type surfaceitem: GXSURFACEITEM .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._add_surface_item(surfaceitem) @classmethod def get_surface_names(cls, surface_file, lst): """ Get the surface item names in a Geosurface file :param surface_file: Geosurface file :param lst: `GXLST <geosoft.gxapi.GXLST>` to fill :type surface_file: str :type lst: GXLST .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSURFACE._get_surface_names(GXContext._get_tls_geo(), surface_file.encode(), lst) @classmethod def get_closed_surface_names(cls, surface_file, lst): """ Get the names of closed surface items in a Geosurface file (may return an empty list) :param surface_file: Geosurface file :param lst: `GXLST <geosoft.gxapi.GXLST>` to fill (may return an empty `GXLST <geosoft.gxapi.GXLST>` if none of the surfaces are closed) :type surface_file: str :type lst: GXLST .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSURFACE._get_closed_surface_names(GXContext._get_tls_geo(), surface_file.encode(), lst) def get_extents(self, min_x, min_y, min_z, max_x, max_y, max_z): """ Get the spatial range of all surface items. :param min_x: Minimum valid data in X. :param min_y: Minimum valid data in Y. :param min_z: Minimum valid data in Z. :param max_x: Maximum valid data in X. :param max_y: Maximum valid data in Y. :param max_z: Maximum valid data in Z. :type min_x: float_ref :type min_y: float_ref :type min_z: float_ref :type max_x: float_ref :type max_y: float_ref :type max_z: float_ref .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value = self._get_extents(min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value) @classmethod def crc(cls, surface_file, output, crc): """ Compute an XML CRC of a Geosurface file. :param surface_file: Geosurface file :param output: Output file :param crc: CRC (unused, always set to 0) :type surface_file: str :type output: str :type crc: int_ref :returns: CRC Value (always 0) :rtype: int .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, crc.value = gxapi_cy.WrapSURFACE._crc(GXContext._get_tls_geo(), surface_file.encode(), output.encode(), crc.value) return ret_val @classmethod def sync(cls, name): """ Syncronize the Metadata for this Geosurface :param name: Geosurface file :type name: str .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSURFACE._sync(GXContext._get_tls_geo(), name.encode()) @classmethod def create_from_dxf(cls, ipj, surface_file, dxf_file): """ Create Geosurface file from DXF file. :param surface_file: Geosurface file :param dxf_file: DXF file :type ipj: GXIPJ :type surface_file: str :type dxf_file: str .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSURFACE._create_from_dxf(GXContext._get_tls_geo(), ipj, surface_file.encode(), dxf_file.encode()) @classmethod def create_from_vulcan_triangulation(cls, triangulation_file, ipj, surface_file): """ Create Geosurface file from a Maptek Vulcan triangulation file. :param triangulation_file: 00t file :param surface_file: Geosurface file :type triangulation_file: str :type ipj: GXIPJ :type surface_file: str .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapSURFACE._create_from_vulcan_triangulation(GXContext._get_tls_geo(), triangulation_file.encode(), ipj, surface_file.encode()) @classmethod def append_vulcan_triangulation(cls, triangulation_file, ipj, surface_file): """ Create new surface from a Maptek Vulcan triangulation file and add to an existing geosurface. :param triangulation_file: 00t file :param surface_file: Geosurface file :type triangulation_file: str :type ipj: GXIPJ :type surface_file: str .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapSURFACE._append_vulcan_triangulation(GXContext._get_tls_geo(), triangulation_file.encode(), ipj, surface_file.encode()) @classmethod def dump_geometry_to_text_file(cls, surface_filename, text_filename): """ Dump surface geometry to a text file. :param surface_filename: Geosurface file :param text_filename: Text file :type surface_filename: str :type text_filename: str .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSURFACE._dump_geometry_to_text_file(GXContext._get_tls_geo(), surface_filename.encode(), text_filename.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXREG.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXREG(gxapi_cy.WrapREG): """ GXREG class. The `GXREG <geosoft.gxapi.GXREG>` class is used for storing and retrieving named variables. Many classes contain `GXREG <geosoft.gxapi.GXREG>` objects for storing information particular to the class. The `GXMETA <geosoft.gxapi.GXMETA>` class supersedes the `GXREG <geosoft.gxapi.GXREG>` class and is gradually replacing the use of the `GXREG <geosoft.gxapi.GXREG>` class in newer applications. """ def __init__(self, handle=0): super(GXREG, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXREG <geosoft.gxapi.GXREG>` :returns: A null `GXREG <geosoft.gxapi.GXREG>` :rtype: GXREG """ return GXREG() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def clear(self): """ Clears all the parameters in a `GXREG <geosoft.gxapi.GXREG>` object .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._clear() def copy(self, srce): """ Copy :param srce: Source :type srce: GXREG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._copy(srce) @classmethod def create(cls, l_parm_length): """ Create a handle to a `GXREG <geosoft.gxapi.GXREG>` object :param l_parm_length: Maximum size of "parameter=setting" string. :type l_parm_length: int :returns: `GXREG <geosoft.gxapi.GXREG>` Object :rtype: GXREG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapREG._create(GXContext._get_tls_geo(), l_parm_length) return GXREG(ret_val) @classmethod def create_s(cls, bf): """ Create a handle to a `GXREG <geosoft.gxapi.GXREG>` object from a `GXBF <geosoft.gxapi.GXBF>` :param bf: `GXBF <geosoft.gxapi.GXBF>` handle for file containing serialized `GXREG <geosoft.gxapi.GXREG>` :type bf: GXBF :returns: `GXREG <geosoft.gxapi.GXREG>` Object :rtype: GXREG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapREG._create_s(GXContext._get_tls_geo(), bf) return GXREG(ret_val) def get(self, parm, data): """ Gets a string for a specified parameter in the `GXREG <geosoft.gxapi.GXREG>` object :param parm: Name of the parameter :param data: String to get :type parm: str :type data: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ data.value = self._get(parm.encode(), data.value.encode()) def get_int(self, parm, data): """ Gets an int for a specified parameter in the `GXREG <geosoft.gxapi.GXREG>` object :param parm: Name of the parameter :param data: Int to get :type parm: str :type data: int_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If parameter is not present in `GXREG <geosoft.gxapi.GXREG>`, `iDUMMY <geosoft.gxapi.iDUMMY>` is returned. """ data.value = self._get_int(parm.encode(), data.value) def get_one(self, loc, parm, data): """ Gets n-th entry of the `GXREG <geosoft.gxapi.GXREG>` object :param loc: Sequential number of `GXREG <geosoft.gxapi.GXREG>` entry :param parm: String to put parameter name :param data: String to put data into. :type loc: int :type parm: str_ref :type data: str_ref .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ parm.value, data.value = self._get_one(loc, parm.value.encode(), data.value.encode()) def get_double(self, parm, data): """ Gets an real for a specified parameter in the `GXREG <geosoft.gxapi.GXREG>` object :param parm: Name of the parameter :param data: Real to get :type parm: str :type data: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If parameter is not present in `GXREG <geosoft.gxapi.GXREG>`, `rDUMMY <geosoft.gxapi.rDUMMY>` is returned. """ data.value = self._get_double(parm.encode(), data.value) def entries(self): """ Get the number of parms in a `GXREG <geosoft.gxapi.GXREG>` object :returns: Number of parms in a `GXREG <geosoft.gxapi.GXREG>` object. :rtype: int .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._entries() return ret_val def load_ini(self, ini): """ Load a registry from an INI file. :param ini: INI file name :type ini: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Items are loaded into the `GXREG <geosoft.gxapi.GXREG>` in the format "GROUP.ITEM". """ self._load_ini(ini.encode()) def match_string(self, parm, data): """ Replace a string with reg settings. :param parm: String to Replace :param data: Output Buffer :type parm: str :type data: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ data.value = self._match_string(parm.encode(), data.value.encode()) def merge(self, srce, type): """ Merge :param srce: Source :param type: :ref:`REG_MERGE` :type srce: GXREG :type type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._merge(srce, type) def save_ini(self, ini): """ Save a `GXREG <geosoft.gxapi.GXREG>` to an INI file. :param ini: INI file name :type ini: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Only `GXREG <geosoft.gxapi.GXREG>` parameters in the form "GROUP.ITEM" are dumped to the INI file, because they match the INI format which groups items under [GROUP] headings. Single-word items (without a separating period) are skipped. """ self._save_ini(ini.encode()) def serial(self, bf): """ Serialize a `GXREG <geosoft.gxapi.GXREG>` object into a file. :param bf: `GXBF <geosoft.gxapi.GXBF>` to serialize `GXREG <geosoft.gxapi.GXREG>` into :type bf: GXBF .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._serial(bf) def set(self, parm, data): """ Sets a string parameter in the `GXREG <geosoft.gxapi.GXREG>` object :param parm: Name of the parameter :param data: String to set it to An empty string sets the setting to an empty string, but does NOT remove the parameter from the `GXREG <geosoft.gxapi.GXREG>`. :type parm: str :type data: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** To remove a parameter completely, use one of the following: `set_int <geosoft.gxapi.GXREG.set_int>`(Reg, sParam, `iDUMMY <geosoft.gxapi.iDUMMY>`); or `set_double <geosoft.gxapi.GXREG.set_double>`(Reg, sParam, `rDUMMY <geosoft.gxapi.rDUMMY>`); """ self._set(parm.encode(), data.encode()) def set_int(self, parm, data): """ Sets an int for a specified parameter in the `GXREG <geosoft.gxapi.GXREG>` object :param parm: Name of the parameter :param data: Int to set, `iDUMMY <geosoft.gxapi.iDUMMY>` to remove the parameter :type parm: str :type data: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_int(parm.encode(), data) def set_double(self, parm, p3): """ Sets an real for a specified parameter in the `GXREG <geosoft.gxapi.GXREG>` object :param parm: Name of the parameter :param p3: Real to set, `rDUMMY <geosoft.gxapi.rDUMMY>` to remove the parameter :type parm: str :type p3: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_double(parm.encode(), p3) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXBIGRID.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXBIGRID(gxapi_cy.WrapBIGRID): """ GXBIGRID class. The Bigrid class is used to grid data using a optimized algorithm that assumes data is collected in semi-straight lines. """ def __init__(self, handle=0): super(GXBIGRID, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXBIGRID <geosoft.gxapi.GXBIGRID>` :returns: A null `GXBIGRID <geosoft.gxapi.GXBIGRID>` :rtype: GXBIGRID """ return GXBIGRID() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def clear(self): """ Clears all the parameters in a `GXBIGRID <geosoft.gxapi.GXBIGRID>` object .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._clear() @classmethod def create(cls): """ Create a handle to a Bigrid object :returns: `GXBIGRID <geosoft.gxapi.GXBIGRID>` Object :rtype: GXBIGRID .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The Bigrid object is initially empty. It will store the control file parameters which the Bigrid program needs to execute. Use the LoadParms_BIGRID method to get the control file parameters into the `GXBIGRID <geosoft.gxapi.GXBIGRID>` object. """ ret_val = gxapi_cy.WrapBIGRID._create(GXContext._get_tls_geo()) return GXBIGRID(ret_val) def load_parms(self, file): """ Retrieves a Bigrid object's control parameters from a file, or sets the parameters to default if the file doesn't exist. :param file: Name of file to get the parameter settings from :type file: str :returns: 0 - Ok 1 - Error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If the control file name passed into this function is a file which does not exist, then the defaults for a Bigrid control file will be generated and put into the `GXBIGRID <geosoft.gxapi.GXBIGRID>` object. Otherwise, the control file's settings are retrieved from the file and loaded into the `GXBIGRID <geosoft.gxapi.GXBIGRID>` object. """ ret_val = self._load_parms(file.encode()) return ret_val def load_warp(self, title, cell, warp): """ Load a warp projection. :param title: New grid title :param cell: New grid cell size as a string, blank for default :param warp: Warp projection file name :type title: str :type cell: str :type warp: str :returns: 0 - Ok 1 - Error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._load_warp(title.encode(), cell.encode(), warp.encode()) return ret_val def run(self, zchan, in_dat, out_dat): """ Executes the Bigrid program, using the input channel and output file parameters. :param zchan: Not used, pass as "" :param in_dat: Handle to source `GXDAT <geosoft.gxapi.GXDAT>` object (from database) :param out_dat: Handle to output grid file `GXDAT <geosoft.gxapi.GXDAT>` :type zchan: str :type in_dat: GXDAT :type out_dat: GXDAT .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._run(zchan.encode(), in_dat, out_dat) def run2(self, zchan, in_dat, out_dat, ipj): """ Executes the Bigrid program, using the input channel and output file parameters with a projection handle. :param zchan: Not used, pass as "" :param in_dat: Handle to source `GXDAT <geosoft.gxapi.GXDAT>` object (from database) :param out_dat: Handle to output grid file `GXDAT <geosoft.gxapi.GXDAT>` :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` handle of the projection system :type zchan: str :type in_dat: GXDAT :type out_dat: GXDAT :type ipj: GXIPJ .. versionadded:: 6.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._run2(zchan.encode(), in_dat, out_dat, ipj) def save_parms(self, name): """ Puts the Bigrid object's control parameters back into its control file. :param name: Name of file to put the parameter settings into :type name: str .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If the control file did not previously exist, it will be created. Otherwise, the old file will be overwritten. """ self._save_parms(name.encode()) @classmethod def get_defaults(cls, db, x, y, z, cell, maxLineSeparation, maxPointSeparation, trendAngle, lowPassWavelength, highPass, noneLinear, preFilter): """ Get default values for max line separation, max point separation and trend angle. :param db: Handle to a database :param x: Y Channel :param y: X Channel :param z: Data channel :param cell: cell size :param maxLineSeparation: max line separation :param maxPointSeparation: max point separation :param trendAngle: trend angle :param lowPassWavelength: low-pass filter wavelength :param highPass: high-pass filter wavelength :param noneLinear: non-linear filter tolerance :param preFilter: pre-filter sample increment :type db: GXDB :type x: str :type y: str :type z: str :type cell: float :type maxLineSeparation: float_ref :type maxPointSeparation: float_ref :type trendAngle: float_ref :type lowPassWavelength: float_ref :type highPass: float_ref :type noneLinear: float_ref :type preFilter: float_ref :returns: 0 - Ok 1 - Error :rtype: int .. versionadded:: 2021.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val, maxLineSeparation.value, maxPointSeparation.value, trendAngle.value, lowPassWavelength.value, highPass.value, noneLinear.value, preFilter.value = gxapi_cy.WrapBIGRID._get_defaults(GXContext._get_tls_geo(), db, x.encode(), y.encode(), z.encode(), cell, maxLineSeparation.value, maxPointSeparation.value, trendAngle.value, lowPassWavelength.value, highPass.value, noneLinear.value, preFilter.value) return ret_val @classmethod def get_default_cell_size(cls, db, x, y, z, cell): """ Get default cell size value. :param db: Handle to a database :param x: Y Channel :param y: X Channel :param z: Data channel :param cell: cell size :type db: GXDB :type x: str :type y: str :type z: str :type cell: float_ref :returns: 0 - Ok 1 - Error :rtype: int .. versionadded:: 2023.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val, cell.value = gxapi_cy.WrapBIGRID._get_default_cell_size(GXContext._get_tls_geo(), db, x.encode(), y.encode(), z.encode(), cell.value) return ret_val ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/tests/test_grid_utility.py import unittest import os import numpy as np import geosoft import geosoft.gxpy.system as gsys import geosoft.gxpy.grid as gxgrd import geosoft.gxpy.grid_utility as gxgrdu import geosoft.gxpy.geometry as gxgeo import geosoft.gxpy.coordinate_system as gxcs import geosoft.gxpy.gdb as gxgdb from base import GXPYTest class Test(GXPYTest): @classmethod def setUpClass(cls): cls.setUpGXPYTest() cls.folder, files = gsys.unzip(os.path.join(os.path.dirname(cls._test_case_py), 'testgrids.zip'), folder=cls._gx.temp_folder()) cls.g1f = os.path.join(cls.folder, 'test_grid_1.grd') cls.g2f = os.path.join(cls.folder, 'test_grid_2.grd') cls.gcf = os.path.join(cls.folder, 'test_bool1_color.grd') cls.mag = os.path.join(cls.folder, 'mag.grd') def test_grc(self): self.start() self.assertEqual(gxgrd.__version__, geosoft.__version__) def test_mosaic(self): self.start() with gxgrd.Grid.open(self.g1f) as g: m1s = os.path.join(self.folder, 'm1.grd(GRD)') gxgrd.Grid.copy(g, m1s).close() with gxgrd.Grid.open(self.g2f) as g: m2s = os.path.join(self.folder, 'm2.grd(GRD)') gxgrd.Grid.copy(g, m2s).close() glist = [m1s, m2s] mosaicGrid = os.path.join(self.folder, 'test_mosaic.grd') with gxgrdu.grid_mosaic(mosaicGrid, glist) as grd: properties = grd.properties() self.assertAlmostEqual(properties.get('dx'),0.01) self.assertAlmostEqual(properties.get('dy'),0.01) self.assertAlmostEqual(properties.get('x0'),7.0) self.assertAlmostEqual(properties.get('y0'),44.0) self.assertEqual(properties.get('rot'),0.0) self.assertEqual(properties.get('nx'),201) self.assertEqual(properties.get('ny'),101) self.assertEqual(str(properties.get('coordinate_system')),'WGS 84') m = os.path.join(self.folder, 'test_mosaic.hgd(HGD)') gxgrdu.grid_mosaic(m, glist).close() with gxgrd.Grid.open(m) as grd: grd.delete_files() properties = grd.properties() self.assertAlmostEqual(properties.get('dx'),0.01) self.assertAlmostEqual(properties.get('dy'),0.01) self.assertAlmostEqual(properties.get('x0'),7.0) self.assertAlmostEqual(properties.get('y0'),44.0) self.assertEqual(properties.get('rot'),0.0) self.assertEqual(properties.get('nx'),201) self.assertEqual(properties.get('ny'),101) self.assertEqual(str(properties.get('coordinate_system')),'WGS 84') def test_bool(self): self.start() g1 = gxgrd.Grid.open(self.g1f) g2 = gxgrd.Grid.open(self.g2f) grd = gxgrdu.grid_bool(g1, g2, os.path.join(self.folder, 'testBool.grd(GRD;TYPE=SHORT)'), size=3) grd.delete_files() properties = grd.properties() g1.close() g2.close() grd.close() self.assertAlmostEqual(properties.get('dx'),0.01) self.assertAlmostEqual(properties.get('dy'),0.01) self.assertAlmostEqual(properties.get('x0'),7.0) self.assertAlmostEqual(properties.get('y0'),44.0) self.assertEqual(properties.get('rot'),0.0) self.assertEqual(properties.get('nx'),201) self.assertEqual(properties.get('ny'),101) self.assertEqual(str(properties.get('coordinate_system')),'WGS 84') self.assertEqual(properties.get('dtype'),np.int16) grd = gxgrdu.grid_bool(self.g1f, self.g2f, os.path.join(self.folder, 'testBool.grd(GRD;TYPE=SHORT)'), size=3) grd.delete_files() properties = grd.properties() grd.close() self.assertAlmostEqual(properties.get('dx'), 0.01) self.assertAlmostEqual(properties.get('dy'), 0.01) self.assertAlmostEqual(properties.get('x0'), 7.0) self.assertAlmostEqual(properties.get('y0'), 44.0) self.assertEqual(properties.get('rot'), 0.0) self.assertEqual(properties.get('nx'), 201) self.assertEqual(properties.get('ny'), 101) self.assertEqual(str(properties.get('coordinate_system')), 'WGS 84') self.assertEqual(properties.get('dtype'), np.int16) def test_remove_trend(self): self.start() with gxgrd.Grid.open(self.mag) as grd: dtg = gxgrdu.remove_trend(grd, method=gxgrdu.TREND_ALL) stt = dtg.statistics() self.assertAlmostEqual(stt['mean'], 0.3517859789498061 ) dtg = gxgrdu.remove_trend(self.mag, method=gxgrdu.TREND_EDGE) stt = dtg.statistics() self.assertAlmostEqual(stt['mean'], 38.13803714657877) def test_derivatives(self): self.start() with gxgrd.Grid.open(self.mag, mode=gxgrd.FILE_READWRITE) as g: g.unit_of_measure = 'nT' with gxgrd.Grid.open(self.mag) as grd: dxy = gxgrdu.derivative(grd, gxgrdu.DERIVATIVE_XY) self.assertAlmostEqual(dxy.statistics()['sd'], 0.7241102775692331) self.assertEqual(dxy.unit_of_measure, 'nT/m') with gxgrd.Grid.open(self.mag) as grd: das = gxgrdu.derivative(grd, gxgrdu.DERIVATIVE_XYZ, fft=False) self.assertAlmostEqual(das.statistics()['sd'], 1.0226482933289056) self.assertEqual(das.unit_of_measure, 'nT/m') with gxgrd.Grid.open(self.mag) as grd: dtd = gxgrdu.derivative(grd, gxgrdu.TILT_ANGLE, fft=False) self.assertAlmostEqual(dtd.statistics()['sd'], 0.8209237171466927) self.assertEqual(dtd.unit_of_measure, 'radians') with gxgrd.Grid.open(self.mag, dtype=np.float64) as grd: dxg = gxgrdu.derivative(grd, gxgrdu.DERIVATIVE_X) self.assertAlmostEqual(dxg.statistics()['sd'], 0.7668436702132574) self.assertEqual(dxg.dtype, np.float64) self.assertEqual(dxg.unit_of_measure, 'nT/m') with gxgrd.Grid.open(self.mag, dtype=np.float64) as grd: dzg = gxgrdu.derivative(grd, gxgrdu.DERIVATIVE_Z, fft=False) self.assertAlmostEqual(dzg.statistics()['sd'], 0.9377582582050702) self.assertEqual(dzg.dtype, np.float64) self.assertEqual(dzg.unit_of_measure, 'nT/m') with gxgrd.Grid.open(self.mag, dtype=np.float64) as grd: dzg = gxgrdu.derivative(grd, gxgrdu.DERIVATIVE_Z) self.assertAlmostEqual(dzg.statistics()['sd'], 0.9175367050980974, 2) self.assertEqual(dzg.dtype, np.float64) self.assertEqual(dzg.unit_of_measure, 'nT/m') dzg = gxgrdu.derivative(self.mag, gxgrdu.DERIVATIVE_Z, fft=False) self.assertAlmostEqual(dzg.statistics()['sd'], 0.9377582582050702, 2) self.assertEqual(dzg.unit_of_measure, 'nT/m') dxg = gxgrdu.derivative(self.mag, gxgrdu.DERIVATIVE_X) self.assertAlmostEqual(dxg.statistics()['sd'], 0.7668436702132574, 2) self.assertEqual(dxg.unit_of_measure, 'nT/m') def test_contour_xy(self): self.start() with gxgrd.Grid.open(self.mag) as grd: xyp = gxgrdu.contour_points(self.mag, grd.statistics()['mean']) self.assertTrue(isinstance(xyp, list)) self.assertTrue(isinstance(xyp[0], gxgeo.PPoint)) self.assertEqual(xyp[0][0].z, 0.0) # oriented grid with gxgrd.Grid.open(self.g1f) as g: v = g.statistics()['mean'] with gxgrd.Grid.copy(g) as gm: cs_name = gxcs.name_from_hcs_orient_vcs(gm.coordinate_system.hcs, '0, 0, 1000, 0, -90, 25', '') gm.coordinate_system = cs_name xyp = gxgrdu.contour_points(gm, v) self.assertTrue(isinstance(xyp, list)) self.assertTrue(isinstance(xyp[0], gxgeo.PPoint)) self.assertEqual(len(xyp), 45) self.assertRaises(gxgrdu.GridUtilityException, gxgrdu.contour_points, gm, 0) xyp = gxgrdu.contour_points(gm, 500, return_as=gxgrdu.RETURN_PPOINT) self.assertTrue(isinstance(xyp, gxgeo.PPoint)) self.assertEqual(len(xyp), 343) xyp = gxgrdu.contour_points(gm, 250, return_as=gxgrdu.RETURN_GDB) self.assertTrue(isinstance(xyp, gxgdb.Geosoft_gdb)) self.assertEqual(len(xyp.list_lines()), 9) def test_tilt_depth(self): self.start() td = gxgrdu.tilt_depth(self.mag, return_as=gxgrdu.RETURN_GDB, fft=False) self.assertTrue(isinstance(td, gxgdb.Geosoft_gdb)) self.assertTrue(td.coordinate_system == 'AGD66 / AMG zone 53') n = 0 for ln in td.list_lines(): d = td.read_line(ln, 'X') n += len(d[0]) self.assertEqual(n, 1673) td = gxgrdu.tilt_depth(self.mag, resolution=1000, gdb='temp.gdb', overwrite=True, fft=False) self.assertTrue(isinstance(td, gxgdb.Geosoft_gdb)) self.assertTrue(td.coordinate_system == 'AGD66 / AMG zone 53') n = 0 for ln in td.list_lines(): d = td.read_line(ln, 'X') n += len(d[0]) self.assertEqual(n, 399) td.close(discard=True) td = gxgrdu.tilt_depth(self.mag, resolution=1000, return_as=gxgrdu.RETURN_LIST_OF_PPOINT, fft=False) self.assertTrue(isinstance(td, list)) self.assertTrue(td[0].coordinate_system == 'AGD66 / AMG zone 53') self.assertTrue(isinstance(td[0], gxgeo.PPoint)) n = 0 for p in td: n += len(p) self.assertEqual(n, 399) def test_calculate_slope_standard_deviation(self): self.start() slope_stddev = gxgrdu.calculate_slope_standard_deviation(self.mag) self.assertAlmostEqual(0.64497375, slope_stddev) def test_feather_edge(self): self.start() with gxgrd.Grid.open(self.mag) as g: pg = g.gxpg(True) pg.re_allocate(g.ny + 20, g.nx + 20) with gxgrd.Grid.from_data_array(pg) as gp: filled_gd = gxgrdu.flood(gp) self.assertEqual(filled_gd.statistics()['num_dummy'], 0) feath_pg = gxgrdu.feather(filled_gd, 20) self.assertEqual(feath_pg.statistics()['mean'], 4986.261784683294, 1) with gxgrd.Grid.open(self.mag) as g: pg = g.gxpg(True) pg.re_allocate(g.ny + 20, g.nx + 20) with gxgrd.Grid.from_data_array(pg) as gp: filled_gd = gxgrdu.flood(gp, tolerance=50, max_iterations=5, file_name='filled', overwrite=True) self.assertEqual(filled_gd.statistics()['num_dummy'], 0) feath_gd = gxgrdu.feather(filled_gd, 20, file_name='feather', overwrite=True) self.assertAlmostEqual(feath_gd.statistics()['mean'], 4986.439654577116, 1) filled_gd.close(discard=True) feath_gd.close(discard=True) def test_expression(self): self.start() with gxgrd.Grid.open(self.mag) as grd: x = gxgrdu.expression({'first': grd, 'second': grd}, 'first-second') self.assertEqual(x.statistics()['mean'], 0.) with gxgrd.Grid.open(self.mag) as grd: x = gxgrdu.expression((grd, grd), 'g1-g2') self.assertEqual(x.statistics()['mean'], 0.) ############################################################################################### if __name__ == '__main__': unittest.main() <file_sep>/docs/GXE3DV.rst .. _GXE3DV: GXE3DV class ================================== .. autoclass:: geosoft.gxapi.GXE3DV :members: <file_sep>/geosoft/gxpy/metadata.py """ Geosoft metadata. :Classes: ================= ========================= :class:`Metadata` metadata ================= ========================= .. seealso:: :mod:`geosoft.gxapi.GXMETA` .. note:: Regression tests provide usage examples: `metadata tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_metadata.py>`_ .. versionadded:: 9.3 """ import os import geosoft import geosoft.gxapi as gxapi from . import gx as gx from . import utility as gxu import json __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) class MetadataException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.metadata`. .. versionadded:: 9.3 """ pass META_TYPE_NODE = 0 #: META_TYPE_ATTRIBUTE = 1 #: META_INVALID = -1 #: META_ROOT_NODE = -100 #: def _umn(meta_type, node): name = node.strip('/') if meta_type == META_TYPE_NODE: return 'CLASS:/{}'.format(name) elif meta_type == META_TYPE_ATTRIBUTE: return 'ATTRIB:/{}'.format(name) def get_node_from_meta_dict(meta_node, meta_dict): """ Get the node content from a metadata dictionary. :param meta_node: node wanted, '/' delimited. e.g. 'geosoft/dataset/title' :param meta_dict: metadata dictionary (from `Metadata.meta_dict`) :return: node content, or None if not found .. versionadded:: 9.3.1 """ if not meta_node: return None tree = meta_node.split('/') root = meta_dict for node in tree: if node: if node not in root: return None root = root[node] return root def set_node_in_meta_dict(meta_node, meta_dict, content, replace=False): """ Set a node in a metadata dictionary. Tree nodes are added if absent. :param meta_node: node to set, '/' delimited. e.g. 'geosoft/dataset/title' :param meta_dict: meta dictionary (from `Metadata.meta_dict`) :param content: content to set to the node :param replace: True to replace nodes that are attributes. The default is False, in which case an error is raised if a node in the tree is an attribute. .. versionadded:: 9.3.1 """ if meta_node[-1] == '/': meta_node = meta_node[:-1] tree = meta_node.split('/') root = meta_dict for node in tree[:-1]: if node not in root: root[node] = {} elif not isinstance(root[node], dict): if replace: root[node] = {} else: raise MetadataException(_t('Cannot replace attribte {}. All nodes in the tree must be dict.'). format(root)) root = root[node] root[tree[-1]] = content class Metadata: """ Simple interface to work with Geosoft metadata objects `geosoft.gxapi.GXMETA`. :param gxmeta: `geosoft.gxapi.GXMETA` instance, or None (default) in which case an empty metadata instance is created. Geosoft metadata objects contain metadata organized as a tree of information, with each node of the tree containing 0 or more attributes and 0 or more nested nodes. One can think of a metadata structure as a dictionary in which items that reference a dictionary are nodes, that in turn can hold other nodes, and each node can also hold attributes. We refer to this as a "nested dictionary". The `meta_dict()` method will return the metadata content as a nested dictionary, and the `update_dict()` method will add a dictionary to the metadata instance. While geosoft metadata can contain custom typed attributes and indeed any Geosoft object, this simple interface currently supports only Python types int, float, string and Python structures like tuple, arrays and dictionaries. Nodes can identified by a string in the form: `/node/[...]/`. For example `'/geosoft/data/'` is equivalent to a dictionary structure `{'geosoft': 'data': {}}`. Attributes are identified by a string in the form: `/node/[...]/attribute`. For example, an attribute `'geosoft/data/keywords'` with keyword content 'mag' and 'Red Lake' can be represented in a Python dictionary as `{'geosoft': 'data': {'keywords': ('mag', 'Red Lake')}`. .. versionadded:: 9.3 """ def __enter__(self): return self def __exit__(self, _type, _value, _traceback): self.__del__() def __del__(self): if hasattr(self, '_gxmeta'): self._gxmeta = None def __init__(self, gxmeta=None): if gxmeta: self._gxmeta = gxmeta else: self._gxmeta = gxapi.GXMETA.create() @property def gxmeta(self): """ The :class:`geosoft.gxapi.GXMETA` instance handle. .. versionadded:: 9.3 """ return self._gxmeta def meta_type(self, meta_node): """ Return if the content of this node is a node (`META_TYPE_NODE`) or an attribute (`META_TYPE_ATTRIBUTE`). Returns `META_INVALID` if neither. :param meta_node: metadata node as a string. e.g. 'geosoft/dataset/title' .. versionadded:: 9.3 """ if self.gxmeta.resolve_umn(_umn(META_TYPE_NODE, meta_node)) != META_INVALID: return META_TYPE_NODE elif self.gxmeta.resolve_umn(_umn(META_TYPE_ATTRIBUTE, meta_node)) != META_INVALID: return META_TYPE_ATTRIBUTE return META_INVALID def has_node(self, node_name): """ Returns `True` if this node exists in the metadata. :param node_name: name of a node (eg. `'geosoft/data/'`) .. versionadded:: 9.3 """ return self.meta_type(node_name) == META_TYPE_NODE def has_attribute(self, attribute_name): """ Returns `True` if this attribute exists in the metadata. :param attribute_name: name of a attribute (eg. `'geosoft/data/keywords'`) .. versionadded:: 9.3 """ return self.meta_type(attribute_name) == META_TYPE_ATTRIBUTE def node_token(self, node_name): """ Returns the metadata token (integer) of a node. The node is created if it does not exist. :param node_name: name of the node (eg. `'my_metadata/parameters'`) :returns: metadata token number .. versionadded::9.3 """ node_name = node_name.strip('/') tree = node_name.split('/') branch = META_ROOT_NODE for node in tree: branch = self.gxmeta.create_class(node, branch) return self.gxmeta.resolve_umn(_umn(META_TYPE_NODE, node_name)) def attribute_token(self, attr_name): """ Returns the metadata token (integer) of an attribute. :param attr_name: name of the attribute (eg. `'my_metadata/parameters/frequency'`) :returns: metadata token number or `META_INVALID` if the attribute does not exist. .. versionadded::9.3 """ if self.has_attribute(attr_name): return self.gxmeta.resolve_umn(_umn(META_TYPE_ATTRIBUTE, attr_name)) return META_INVALID def node_attribute_token(self, attr_name): """ returns the node and attribute number of an attribute. :param attr_name: attribute name :returns: (node token, attribute token) .. versionadded:: 9.3 """ node_attr = tuple(attr_name.strip('/').rsplit('/', 1)) if len(node_attr) == 1: return 0, self.attribute_token(attr_name) node = node_attr[0] if not self.has_attribute(attr_name): raise MetadataException('Attribute "{}" not found'.format(attr_name)) return self.node_token(node), self.attribute_token(attr_name) def set_attribute(self, attr_name, value): """ Set an attribute to a value. The attribute is created if it does not exist. :param attr_name: attribute name (eg. `'/my_metadata/parameters/frequency'`) :param value: int, float, string or a Python structure such as tuple, array or dict. .. versionadded:: 9.3 """ node_attr = attr_name.strip('/').rsplit('/', 1) if len(node_attr) < 2: node = '' attr = node_attr[0] else: node = node_attr[0] attr = node_attr[1] if self.has_attribute(attr_name): self.gxmeta.delete_attrib(self.attribute_token(attr_name)) node = self.node_token(node) if isinstance(value, str): a = self.gxmeta.create_attrib(attr, node, gxapi.META_CORE_TYPE_String) self.gxmeta.set_attrib_string(node, a, value) elif isinstance(value, float): a = self.gxmeta.create_attrib(attr, node, gxapi.META_CORE_TYPE_R8) self.gxmeta.set_attrib_double(node, a, value) elif isinstance(value, int): a = self.gxmeta.create_attrib(attr, node, gxapi.META_CORE_TYPE_I4) self.gxmeta.set_attrib_int(node, a, value) else: jstr = '__json__{}'.format(json.dumps(value)) a = self.gxmeta.create_attrib(attr, node, gxapi.META_CORE_TYPE_String) self.gxmeta.set_attrib_string(node, a, jstr) def get_attribute(self, attr_name): """ Retrieve an attribute setting. :param attr_name: attribute name (eg. '/my_metadata/parameters/frequency') :returns: attribute setting .. versionadded:: 9.3 """ if not self.has_attribute(attr_name): return None node, attr = self.node_attribute_token(attr_name) sr = gxapi.str_ref() self.gxmeta.get_attrib_string(node, attr, sr) try: i = int(sr.value) return i except ValueError: try: f = float(sr.value) return f except ValueError: if sr.value.startswith('__json__'): return json.loads(sr.value[8:]) return sr.value def meta_dict(self): """ Metadata content as a nested dictionary. Attributes will be normal Python objects where the attribute type is supported by Python. This includes basic types (like int and float), lists/tuples, and Python dictionaries, which are nodes in the metadata structucture. Geosoft objects in an attribute, will appear only as a descriptive text string. :return: nested dictionary structure of metadata .. versionadded:: 9.3 """ def parse_attr(s): parts = s.split('=', 1) if len(parts) >= 2: val = parts[1][1:-1] if val.startswith('__json__'): val = val[8:].replace('\\"', '"') val = json.loads(val) return parts[0].lstrip(), val else: return parts[0].lstrip(), None def add_meta(ff): def parse_node(s): nest = 0 while s[0] == ' ': nest += 1 s = s[3:] return s[1:], nest def read_node(ffl): _d = {} node_name, nest = parse_node(ffl[0]) ffl = ffl[1:] while ffl: while ffl[0].lstrip()[0] != '\\': _name, _val = parse_attr(ffl[0]) _d[_name] = _val ffl = ffl[1:] if not ffl: return node_name, _d, ffl else: _, next_nest = parse_node(ffl[0]) if next_nest <= nest: return node_name, _d, ffl nn, _dd, ffl = read_node(ffl) _d[nn] = _dd return node_name, _d, ffl dct = {} while ff: if ff[0][0] != '\\': name, val = parse_attr(ff[0]) dct[name] = val ff = ff[1:] else: name, dd, ff = read_node(ff) dct[name] = dd return dct def metafile(): mf = os.path.join(gx.gx().temp_folder(), 'meta_' + gxu.uuid()) wa = gxapi.GXWA.create(mf, gxapi.WA_NEW) self.gxmeta.write_text(wa) wa = None return mf def metafile_to_list(metaf): ff = [] with open(metaf, 'r') as f: for line in f: line = line.rstrip() if line: ff.append(line) os.remove(metaf) return ff metalist = metafile_to_list(metafile()) return add_meta(metalist) def update_dict(self, metadict, trunk_node=''): """ Update the metadata from the content of a dictionary. :param metadict: dictionary of metadata to add/update :param trunk_node: trunk to which to add this meta, default is '' which adds from the root. .. versionadded:: 9.3.1 """ def update(tnode, new): for k, v in new.items(): node = '{}/{}'.format(tnode, k) if isinstance(v, dict): update(node, v) else: self.set_attribute(node, v) update(trunk_node, metadict) <file_sep>/geosoft/gxpy/tests/test_view.py import unittest import os import numpy as np import geosoft import geosoft.gxapi as gxapi import geosoft.gxpy.map as gxmap import geosoft.gxpy.view as gxv import geosoft.gxpy.coordinate_system as gxcs import geosoft.gxpy.group as gxg import geosoft.gxpy.geometry as gxgm import geosoft.gxpy.system as gxsys import geosoft.gxpy.grid as gxgrd import geosoft.gxpy.agg as gxagg from base import GXPYTest def draw_2d_stuff(g, size=1.0): plinelist = [[110, 5], [120, 20], [130, 15], [150, 50], [160, 70], [175, 35], [190, 65], [220, 50], [235, 18.5]] pp = gxgm.PPoint.from_list(plinelist) * size g.pen = g.new_pen(line_style=2, line_pitch=2.0) g.polyline(pp) g.pen = g.new_pen(line_style=4, line_pitch=2.0, line_smooth=gxg.SMOOTH_AKIMA) g.polyline(pp) ppp = np.array(plinelist) pp = gxgm.PPoint(ppp[3:, :]) * size g.pen = g.new_pen(line_style=5, line_pitch=5.0, line_smooth=gxg.SMOOTH_CUBIC, line_color=gxg.C_RED, line_thick=0.25, fill_color=gxg.C_LT_BLUE) g.polygon(pp) g.pen = g.new_pen(fill_color=gxg.C_LT_GREEN) p1 = gxgm.Point((100, 0, 0)) * size p2 = gxgm.Point((100, 0, 0)) * size pp = (pp - p1) / 2 + p2 g.polygon(pp) pp += gxgm.Point((0, 25, 0)) * size g.pen = g.new_pen(fill_color=gxg.C_LT_RED) g.polygon(pp) class Test(GXPYTest): @classmethod def setUpClass(cls): cls.setUpGXPYTest() cls.folder, files = gxsys.unzip(os.path.join(os.path.dirname(cls._test_case_py), 'section_grids.zip'), folder=cls._gx.temp_folder()) cls.section = os.path.join(cls.folder, 'section.grd') cls.swing = os.path.join(cls.folder, 'swing_section.grd') cls.crooked = os.path.join(cls.folder, 'crooked_section.grd') def test_version(self): self.start() self.assertEqual(gxmap.__version__, geosoft.__version__) def test_create(self): self.start() with gxmap.Map.new() as gmap: vlist = gmap.view_list self.assertEqual(len(vlist), 2) self.assertTrue('base' in vlist) self.assertTrue('data' in vlist) with gxv.View.open(gmap, 'base') as v: self.assertTrue(v.guid) self.assertEqual(v.name, "base") self.assertEqual(v.scale, 1.0) self.assertEqual(v.aspect, 1.0) self.assertEqual(v.units_name, 'mm') self.assertEqual(v.units_per_metre, 1000.0) self.assertEqual(v.units_per_map_cm, 10.0) with gxv.View.new(gmap, 'ft12000', coordinate_system='ft', scale=12000, area=(0, 0, 50000, 40000)) as v: self.assertEqual(v.name, "ft12000") self.assertAlmostEqual(v.scale, 12000.0) self.assertAlmostEqual(v.aspect, 1.0) self.assertEqual(v.units_name, 'ft') self.assertAlmostEqual(v.units_per_metre, 3.280839895) self.assertAlmostEqual(v.units_per_map_cm, 393.7007874) with gxv.View.new(gmap) as vw: self.assertEqual(vw.name, "_unnamed_view") self.assertEqual(vw.scale, 100.0) self.assertEqual(vw.aspect, 1.0) self.assertEqual(vw.units_name, 'unknown') self.assertEqual(vw.units_per_metre, 1.0) with gxmap.Map.new() as gmap: with gxv.View.new(gmap, "test") as vw: self.assertEqual(vw.name, "test") with gxmap.Map.new() as gmap: area = (100, 500, 15100, 10500) scale = 20000 location = (0,0) xcm = (area[2] - area[0])*100.0/scale ycm = (area[3] - area[1])*100.0/scale with gxv.View.new(gmap, "test", map_location=location, area=area, scale=scale, coordinate_system="WGS 84 / UTM zone 34N") as vw: self.assertEqual(vw.extent_clip,area) self.assertEqual(vw.extent_map_cm(vw.extent_clip), (0, 0, xcm, ycm)) self.assertEqual(vw.scale, scale, scale) self.assertTrue(vw.coordinate_system.same_as(gxcs.Coordinate_system("WGS 84 / UTM zone 34N"))) self.assertEqual(vw.units_per_metre, 1.0) self.assertEqual(vw.units_name, 'm') with gxmap.Map.new() as gmap: area = (100, 500, 15100, 10500) scale = 12000 loc = (7.5, 2.0) mpu = 1.0 / float(gxcs.parameters(gxcs.PARM_UNITS, 'ftUS')['FACTOR']) xcm = 100.0 * ((area[2] - area[0]) / scale) / mpu ycm = 100.0 * ((area[3] - area[1]) / scale) / mpu with gxv.View.new(gmap, "test", map_location=loc, area=area, scale=scale, coordinate_system=("WGS 84 / UTM zone 34N", '', '', 'ftUS', '')) as vw: self.assertEqual(vw.extent_clip,area) mx = vw.extent_map_cm(vw.extent_clip) self.assertAlmostEqual(mx[0], loc[0]) self.assertAlmostEqual(mx[1], loc[1]) self.assertAlmostEqual(mx[2], loc[0] + xcm) self.assertAlmostEqual(mx[3], loc[1] + ycm) self.assertAlmostEqual(vw.scale, scale) self.assertAlmostEqual(vw.aspect, 1.0) self.assertFalse(vw.coordinate_system.same_as(gxcs.Coordinate_system("WGS 84 / UTM zone 34N"))) self.assertTrue(vw.coordinate_system.same_as(gxcs.Coordinate_system(("WGS 84 / UTM zone 34N", '', '', 'ftUS', '')))) self.assertAlmostEqual(vw.units_per_metre, 3.28083333333334) self.assertEqual(vw.units_name, 'ftUS') with gxmap.Map.new() as gmap: area = (100, 500, 15100, 10500) scale = 12000 loc = (7.5, 2.0) mpu = 1.0 / float(gxcs.parameters(gxcs.PARM_UNITS, 'ftUS')['FACTOR']) xcm = 100.0 * ((area[2] - area[0]) / scale) / mpu ycm = 100.0 * ((area[3] - area[1]) / scale) / mpu with gxv.View.new(gmap, "test", map_location=loc, area=area, scale=scale, coordinate_system='ftUS') as vw: self.assertEqual(vw.extent_clip,area) mx = vw.extent_map_cm(vw.extent_clip) self.assertAlmostEqual(mx[0], loc[0]) self.assertAlmostEqual(mx[1], loc[1]) self.assertAlmostEqual(mx[2], loc[0] + xcm) self.assertAlmostEqual(mx[3], loc[1] + ycm) self.assertAlmostEqual(vw.scale, scale) self.assertAlmostEqual(vw.aspect, 1.0) self.assertTrue(vw.coordinate_system.same_as(gxcs.Coordinate_system(('', '', '', 'ftUS', '')))) self.assertAlmostEqual(vw.units_per_metre, 3.28083333333334) self.assertEqual(vw.units_name, 'ftUS') with gxmap.Map.new() as gmap: with gxv.View.new(gmap, "test", area=(100, 500, 15100, 10500), scale=(50000, 10000), map_location=(10, 25)) as vw: self.assertEqual(vw.extent_clip,(100, 500, 15100, 10500)) self.assertEqual(vw.scale, 50000) self.assertEqual(vw.aspect, 0.2) self.assertEqual(vw.extent_map_cm(vw.extent_clip), (10., 25., 40., 125.)) self.assertTrue(vw.coordinate_system.same_as(gxcs.Coordinate_system())) def test_scale(self): self.start() with gxmap.Map.new() as gmap: with gxv.View.new(gmap, 'ft12000', coordinate_system='ft', scale=12000, map_location=(10, 5), area=(0, 0, 50000, 40000)) as v: vmin = (v.extent_clip[0], v.extent_clip[1]) self.assertEqual(v.view_to_map_cm(vmin), (10.0, 5.0)) vmax = v.view_to_map_cm(v.extent_clip[2], v.extent_clip[3]) mmax = v.map_cm_to_view(vmax) self.assertEqual(mmax, (50000.0, 40000.0)) def test_reopen_map_view(self): self.start() testmap = os.path.join(self.gx.temp_folder(), "test_view_reopen_map_view") with gxmap.Map.new(testmap, overwrite=True) as gmap: mapfile = gmap.file_name with gxv.View.new(gmap, "test_view") as v: with gxg.Draw(v) as g: g.rectangle(v.extent_clip) with gxv.View.open(gmap, "test_view") as v: pass gxmap.delete_files(mapfile) def test_cs(self): self.start() testmap = os.path.join(self.gx.temp_folder(), "test_view_cs") with gxmap.Map.new(testmap, overwrite=True) as gmap: with gxv.View.new(gmap, "rectangle_test", coordinate_system="wgs 84") as v: self.assertEqual("WGS 84", str(v.coordinate_system)) with gxv.View.new(gmap, "vcs", coordinate_system="wgs 84 / UTM zone 15N [special]") as v: self.assertTrue("WGS 84 / UTM zone 15N [special]" in str(v.coordinate_system)) def test_copy_view(self): self.start() testmap = os.path.join(self.gx.temp_folder(), "test_view_cs") with gxmap.Map.new(testmap, overwrite=True) as gmap: with gxv.View.new(gmap, 'test_a') as v: with gxg.Draw(v, '2D stuff') as g: g.rectangle(v.extent_clip) draw_2d_stuff(g) with gxv.View.new(gmap, 'test_b', copy='test_a') as v: mdf = v.mdf('base') self.assertEqual(mdf[0], (36.39513677811551, 39.99513677811551, 0.0, 6.395136778115507, 19.99513677811551, 0.0)) self.assertEqual(mdf[1], (100.0, 1.0, 0.0, 0.0)) mdf2 = v.mdf() self.assertEqual(mdf2, ((30.0, 20.0, 0.0, 0.0, 0.0, 0.0), (100.0, 1.0, 0.0, 0.0))) self.assertEqual(len(v.group_list), 1) self.assertEqual(v.group_list[0], '2D stuff') self.assertEqual(v.extent_all, v.extent_visible) self.assertEqual(v.extent_map_cm(), (0.0, 0.0, 30.0, 20.0)) self.assertEqual(v.extent_group('2D stuff', unit=gxv.UNIT_MAP), (0.0, 0.0, 30.0, 20.0)) self.assertEqual(v.extent_group('2D stuff'), (0.0, 0.0, 30.0, 20.0)) self.assertEqual(len(gmap.view_list), 4) def test_group_list(self): self.start() with gxagg.Aggregate_image.new(self.section) as agg: with gxv.View.new(area=agg.extent_2d, coordinate_system=agg.coordinate_system) as v: view_name = v.name gxg.Aggregate_group.new(v, agg) map_file = v.map.file_name with gxmap.Map.open(map_file) as m: with gxv.View.open(m, view_name) as v: self.assertEqual(len(v.group_list_agg), 1) self.assertEqual(v.group_list_agg[0], 'section') self.assertEqual(len(v.group_list_csymb), 0) self.assertEqual(len(v.group_list_marked), 0) self.assertEqual(len(v.group_list_visible), 1) self.assertEqual(len(v.group_list_voxel), 0) self.assertEqual(len(v.group_list_vectorvoxel), 0) def test_3dview(self): self.start() v3d_file = None try: with gxv.View_3d.new('test_3d', overwrite=True) as v: v3d_file = v.file_name self.assertTrue(v3d_file.lower().endswith('.geosoft_3dv')) self.assertEqual(v.name, 'test_3d') self.assertEqual(v.map.name, 'test_3d') with gxg.Draw(v, '2D stuff') as g: draw_2d_stuff(g) v.new_drawing_plane('plane_0') self.assertEqual(v.current_3d_drawing_plane, 'plane_0') self.assertRaises(gxv.ViewException, v.new_drawing_plane, 'plane_0') v.new_drawing_plane('vertical', rotation=(90.0, 0, 0)) self.assertEqual(v.current_3d_drawing_plane, 'vertical') with gxg.Draw(v, '2D stuff vertical', plane='vertical') as g: g.rectangle(v.extent_clip) draw_2d_stuff(g) with gxg.Draw_3d(v, '3D stuff') as g: g.box_3d(((20, 10, -10), (80, 50, 30)), pen=g.new_pen(line_color='R255G100B50')) self.assertTrue('Plane' in v.plane_list) self.assertTrue('plane_0' in v.plane_list) self.assertTrue('vertical' in v.plane_list) self.assertEqual(v.plane_number('Plane'), 0) self.assertEqual(v.plane_name('vertical'), 'vertical') self.assertEqual(v.plane_name(2), 'vertical') self.assertRaises(gxv.ViewException, v.plane_number, 'bogus') self.assertRaises(gxv.ViewException, v.plane_number, -1) self.assertRaises(gxv.ViewException, v.plane_name, 3) self.assertRaises(gxv.ViewException, v.plane_name, 'bogus') self.assertEqual(v.get_class_name('Plane'), 'vertical') self.crc_map(v3d_file) finally: if v3d_file: gxmap.delete_files(v3d_file) def test_planes(self): self.start() v3d_file = None try: with gxv.View_3d.new('test_3d', overwrite=True) as v: v3d_file = v.file_name with gxg.Draw(v, 'default_plane') as g: draw_2d_stuff(g) self.assertEqual(v.current_3d_drawing_plane, 'Plane') self.assertRaises(gxv.ViewException, v.new_drawing_plane, 'Plane') v.new_drawing_plane('vertical', rotation=(90.0, 0, 0)) self.assertEqual(v.current_3d_drawing_plane, 'vertical') with gxg.Draw(v, '2D stuff vertical') as g: g.rectangle(v.extent_clip) draw_2d_stuff(g) with gxg.Draw(v, 'rectangle_plane', plane='Plane') as g: g.rectangle(v.extent_clip) self.assertTrue('vertical' in v.plane_list) self.assertTrue('Plane' in v.plane_list) gop = v.groups_on_plane_list('Plane') self.assertEqual(len(gop), 2) self.assertTrue('default_plane' in gop) self.assertTrue('rectangle_plane' in gop) self.crc_map(v3d_file) finally: if v3d_file: gxmap.delete_files(v3d_file) def test_3d_map(self): self.start() v3d_file = None map_file = None try: with gxmap.Map.new() as map: map_file = map.file_name with gxv.View.open(map, '*base') as v: with gxg.Draw(v, 'edge') as g: g.rectangle(v.extent_clip) with gxv.View_3d.new('test_3d', overwrite=True) as v: v3d_file = v.map.file_name with gxg.Draw(v, '2D stuff') as g: draw_2d_stuff(g) v.new_drawing_plane('vertical', rotation=(90.0, 0, 0)) with gxg.Draw(v, '2D stuff vertical', plane='vertical') as g: g.rectangle(v.extent_clip) draw_2d_stuff(g) with gxg.Draw_3d(v, '3D stuff') as g: g.box_3d(((20, 10, -10), (80, 50, 30)), pen=g.new_pen(line_color='R255G100B50')) with gxmap.Map.open(map_file) as map: map.create_linked_3d_view(v, 'linked_view') self.crc_map(map_file) finally: if v3d_file: gxmap.delete_files(v3d_file) if map_file: gxmap.delete_files(map_file) def test_3d_open(self): self.start() v3d_file = None try: with gxv.View_3d.new('test_3d', overwrite=True) as v: v3d_file = v.map.file_name with gxg.Draw(v, '2D stuff') as g: draw_2d_stuff(g) self.assertRaises(gxv.ViewException, gxv.View_3d.open, 'bogus') with gxv.View_3d.open(v3d_file) as v: v.new_drawing_plane('vertical', rotation=(90.0, 0, 0)) with gxg.Draw(v, '2D stuff vertical', plane='vertical') as g: g.rectangle(v.extent_clip) draw_2d_stuff(g) with gxg.Draw_3d(v, '3D stuff') as g: g.box_3d(((20, 10, -10), (80, 50, 30)), pen=g.new_pen(line_color='R255G100B50')) self.crc_map(v3d_file) finally: if v3d_file: gxmap.delete_files(v3d_file) def test_metadata(self): self.start() testmap = os.path.join(self.gx.temp_folder(), "test") with gxmap.Map.new(testmap) as gmap: with gxv.View.new(gmap, "test", area=(100, 500, 15100, 10500), scale=(50000, 10000), map_location=(10, 25)) as vw: m = vw.metadata gm = m['geosoft'] self.assertEqual(len(gm), 2) self.assertTrue('dataset' in gm) newstuff = {'maki': {'a': 1, 'b': (4, 5, 6), 'units': 'nT'}} vw.metadata = newstuff with gxmap.Map.open(testmap) as gmap: with gxv.View.open(gmap, "test") as vw: m = vw.metadata gm = m['geosoft'] self.assertEqual(len(gm), 2) self.assertTrue('dataset' in gm) maki = m['maki'] self.assertEqual(maki['b'], ['4', '5', '6']) self.assertEqual(maki['units'], 'nT') with gxv.View_3d.new(testmap, overwrite=True) as vw: m = vw.metadata gm = m['geosoft'] self.assertEqual(len(gm), 2) self.assertTrue('dataset' in gm) newstuff = {'maki': {'a': 1, 'b': (4, 5, 6), 'units': 'nT'}} vw.metadata = newstuff with gxv.View_3d.open(testmap) as vw: m = vw.metadata gm = m['geosoft'] self.assertEqual(len(gm), 2) self.assertTrue('dataset' in gm) maki = m['maki'] self.assertEqual(maki['b'], ['4', '5', '6']) self.assertEqual(maki['units'], 'nT') def test_section(self): self.start() with gxagg.Aggregate_image.new(self.section) as agg: with gxv.View.new(area=agg.extent_2d, coordinate_system=agg.coordinate_system) as v: self.assertTrue(v.coordinate_system.is_oriented) self.assertEqual(v.extent_xyz, (515694.9128668542, 7142239.234535628, 1425.0, 516233.9140090464, 7142637.2015803885, 1835.0)) gxg.Aggregate_group.new(v, agg) map_file = v.map.file_name self.crc_map(map_file) def test_swing(self): self.start() with gxagg.Aggregate_image.new(self.swing) as agg: with gxv.View.new(area=agg.extent_2d, coordinate_system=agg.coordinate_system) as v: self.assertTrue(v.coordinate_system.is_oriented) self.assertEqual(v.extent_xyz, (716313.064376335, 1716142.3054918314, -0.6066017177982133, 717108.3819305873, 1716809.6889240067, 360.01785668734107)) gxg.Aggregate_group.new(v, agg) map_file = v.map.file_name self.crc_map(map_file) def test_crooked(self): self.start() with gxagg.Aggregate_image.new(self.crooked) as agg: with gxv.View.new(area=agg.extent_2d, coordinate_system=agg.coordinate_system) as v: self.assertTrue(v.coordinate_system.is_oriented) self.assertEqual(v.extent_xyz, (632840.885099, 4633310.4612, 1203.0, 634556.6023, 4635124.0248, 1217.0)) gxg.Aggregate_group.new(v, agg) map_file = v.map.file_name self.crc_map(map_file) def test_crooked_path(self): self.start() cs = gxgrd.Grid.open(self.crooked).coordinate_system cp = gxv.CrookedPath(cs) self.assertEqual(len(cp.xy), 1629) cp = gxv.CrookedPath(cs.gxipj) self.assertEqual(len(cp.xy), 1629) xy = cp.xy[:100] cp = gxv.CrookedPath(xy, coordinate_system="NAD83 / UTM zone 50N", name="Maki") self.assertEqual(len(cp.xy), 100) self.assertTrue(cp.coordinate_system == "NAD83 / UTM zone 50N") self.assertEqual(cp.name, "Maki") cp.name = "Billy" self.assertEqual(cp.name, "Billy") self.assertEqual(len(cp.ppoint), 100) cs = gxcs.Coordinate_system("NAD83 / UTM zone 50N") self.assertRaises(gxv.ViewException, gxv.CrookedPath, cs) cp.set_in_geosoft_ipj(cs) cp =gxv.CrookedPath(cs, name="hmmm") self.assertEqual(len(cp.ppoint), 100) self.assertEqual(cp.extent_xyz, (632840.88509899995, 4633409.6098999996, 0.0, 633012.53350000002, 4633574.2674000002, 0.0)) with gxgrd.Grid.open(self.crooked) as crooked: cp = gxv.CrookedPath(crooked.coordinate_system) v = gxv.View.new(area=crooked.extent_2d()) self.assertFalse(v.is_crooked_path) v = gxv.View.new(area=crooked.extent_2d(), crooked_path=cp) self.assertTrue(v.is_crooked_path) self.assertEqual(len(v.crooked_path()), 1629) def test_plane_relief_surface(self): self.start() with gxv.View_3d.new('test_plane_relief', overwrite=True) as vw: vw.new_drawing_plane('plane_no_relief') vw.new_drawing_plane('plane_relief') vw.set_plane_relief_surface(self.section, refine=4, base=100, scale=2, min=200, max=400) no_relief_info = vw.get_plane_relief_surface_info('plane_no_relief') self.assertEqual('', no_relief_info.surface_grid_name) self.assertEqual(3, no_relief_info.refine) self.assertEqual(0, no_relief_info.base) self.assertEqual(1, no_relief_info.scale) self.assertEqual(None, no_relief_info.min) self.assertEqual(None, no_relief_info.max) with_relief_info = vw.get_plane_relief_surface_info(1) self.assertEqual('.\__tmp__\_gx_uuid_test_view.py_1\section.grd', with_relief_info.surface_grid_name) self.assertEqual(4, with_relief_info.refine) self.assertEqual(100, with_relief_info.base) self.assertEqual(2, with_relief_info.scale) self.assertEqual(200, with_relief_info.min) self.assertEqual(400, with_relief_info.max) def test_from_gxapi(self): self.start() gxapi_map = gxapi.GXMAP.create('test_from_gxapi.map', gxmap.WRITE_NEW) gxmview = gxapi.GXMVIEW.create(gxapi_map, 'someview', gxv.WRITE_NEW) with gxv.View.from_gxapi(gxapi_map, gxmview) as v: self.assertEqual('someview', v.name) def test_3d_from_gxapi(self): self.start() gxapi_map = gxapi.GXMAP.create('test_3d_from_gxapi.geosoft_3dv', gxmap.WRITE_NEW) gxmview = gxapi.GXMVIEW.create(gxapi_map, '3D', gxv.WRITE_NEW) h3dn = gxapi.GX3DN.create() gxmview.set_3dn(h3dn) with gxv.View_3d.from_gxapi(gxapi_map, gxmview) as v: self.assertEqual('test_3d_from_gxapi', v.name) if __name__ == '__main__': unittest.main() <file_sep>/docs/GXDB.rst .. _GXDB: GXDB class ================================== .. autoclass:: geosoft.gxapi.GXDB :members: .. _DB_ACTIVITY_BLOB: DB_ACTIVITY_BLOB constants ----------------------------------------------------------------------- Activity Blob .. autodata:: geosoft.gxapi.DB_ACTIVITY_BLOB :annotation: .. autoattribute:: geosoft.gxapi.DB_ACTIVITY_BLOB .. _DB_CATEGORY_BLOB: DB_CATEGORY_BLOB constants ----------------------------------------------------------------------- Blob Categories .. autodata:: geosoft.gxapi.DB_CATEGORY_BLOB_NORMAL :annotation: .. autoattribute:: geosoft.gxapi.DB_CATEGORY_BLOB_NORMAL .. _DB_CATEGORY_CHAN: DB_CATEGORY_CHAN constants ----------------------------------------------------------------------- Channel Categories For STRING type channels, use negative integers to specify channel width. For example, use -10 to define a string channel with up to 10 characters. Use the GS_SIMPLE_TYPE() macro to convert to INT,REAL or string. .. autodata:: geosoft.gxapi.DB_CATEGORY_CHAN_BYTE :annotation: .. autoattribute:: geosoft.gxapi.DB_CATEGORY_CHAN_BYTE .. autodata:: geosoft.gxapi.DB_CATEGORY_CHAN_USHORT :annotation: .. autoattribute:: geosoft.gxapi.DB_CATEGORY_CHAN_USHORT .. autodata:: geosoft.gxapi.DB_CATEGORY_CHAN_SHORT :annotation: .. autoattribute:: geosoft.gxapi.DB_CATEGORY_CHAN_SHORT .. autodata:: geosoft.gxapi.DB_CATEGORY_CHAN_LONG :annotation: .. autoattribute:: geosoft.gxapi.DB_CATEGORY_CHAN_LONG .. autodata:: geosoft.gxapi.DB_CATEGORY_CHAN_FLOAT :annotation: .. autoattribute:: geosoft.gxapi.DB_CATEGORY_CHAN_FLOAT .. autodata:: geosoft.gxapi.DB_CATEGORY_CHAN_DOUBLE :annotation: .. autoattribute:: geosoft.gxapi.DB_CATEGORY_CHAN_DOUBLE .. autodata:: geosoft.gxapi.DB_CATEGORY_CHAN_UBYTE :annotation: .. autoattribute:: geosoft.gxapi.DB_CATEGORY_CHAN_UBYTE .. autodata:: geosoft.gxapi.DB_CATEGORY_CHAN_ULONG :annotation: .. autoattribute:: geosoft.gxapi.DB_CATEGORY_CHAN_ULONG .. autodata:: geosoft.gxapi.DB_CATEGORY_CHAN_LONG64 :annotation: .. autoattribute:: geosoft.gxapi.DB_CATEGORY_CHAN_LONG64 .. autodata:: geosoft.gxapi.DB_CATEGORY_CHAN_ULONG64 :annotation: .. autoattribute:: geosoft.gxapi.DB_CATEGORY_CHAN_ULONG64 .. _DB_CATEGORY_LINE: DB_CATEGORY_LINE constants ----------------------------------------------------------------------- Line Categories .. autodata:: geosoft.gxapi.DB_CATEGORY_LINE_FLIGHT :annotation: .. autoattribute:: geosoft.gxapi.DB_CATEGORY_LINE_FLIGHT .. autodata:: geosoft.gxapi.DB_CATEGORY_LINE_GROUP :annotation: .. autoattribute:: geosoft.gxapi.DB_CATEGORY_LINE_GROUP .. autodata:: geosoft.gxapi.DB_CATEGORY_LINE_NORMAL :annotation: .. autoattribute:: geosoft.gxapi.DB_CATEGORY_LINE_NORMAL .. _DB_CATEGORY_USER: DB_CATEGORY_USER constants ----------------------------------------------------------------------- User Categories .. autodata:: geosoft.gxapi.DB_CATEGORY_USER_NORMAL :annotation: .. autoattribute:: geosoft.gxapi.DB_CATEGORY_USER_NORMAL .. _DB_CHAN_FORMAT: DB_CHAN_FORMAT constants ----------------------------------------------------------------------- Channel formats .. autodata:: geosoft.gxapi.DB_CHAN_FORMAT_NORMAL :annotation: .. autoattribute:: geosoft.gxapi.DB_CHAN_FORMAT_NORMAL .. autodata:: geosoft.gxapi.DB_CHAN_FORMAT_EXP :annotation: .. autoattribute:: geosoft.gxapi.DB_CHAN_FORMAT_EXP .. autodata:: geosoft.gxapi.DB_CHAN_FORMAT_TIME :annotation: .. autoattribute:: geosoft.gxapi.DB_CHAN_FORMAT_TIME .. autodata:: geosoft.gxapi.DB_CHAN_FORMAT_DATE :annotation: .. autoattribute:: geosoft.gxapi.DB_CHAN_FORMAT_DATE .. autodata:: geosoft.gxapi.DB_CHAN_FORMAT_GEOGR :annotation: .. autoattribute:: geosoft.gxapi.DB_CHAN_FORMAT_GEOGR .. autodata:: geosoft.gxapi.DB_CHAN_FORMAT_SIGDIG :annotation: .. autoattribute:: geosoft.gxapi.DB_CHAN_FORMAT_SIGDIG .. autodata:: geosoft.gxapi.DB_CHAN_FORMAT_HEX :annotation: .. autoattribute:: geosoft.gxapi.DB_CHAN_FORMAT_HEX .. _DB_CHAN_PROTECTION: DB_CHAN_PROTECTION constants ----------------------------------------------------------------------- Channel Read-only Protection Status .. autodata:: geosoft.gxapi.DB_CHAN_UNPROTECTED :annotation: .. autoattribute:: geosoft.gxapi.DB_CHAN_UNPROTECTED .. autodata:: geosoft.gxapi.DB_CHAN_PROTECTED :annotation: .. autoattribute:: geosoft.gxapi.DB_CHAN_PROTECTED .. _DB_CHAN_SYMBOL: DB_CHAN_SYMBOL constants ----------------------------------------------------------------------- Channel symbol for special channels .. autodata:: geosoft.gxapi.DB_CHAN_X :annotation: .. autoattribute:: geosoft.gxapi.DB_CHAN_X .. autodata:: geosoft.gxapi.DB_CHAN_Y :annotation: .. autoattribute:: geosoft.gxapi.DB_CHAN_Y .. autodata:: geosoft.gxapi.DB_CHAN_Z :annotation: .. autoattribute:: geosoft.gxapi.DB_CHAN_Z .. _DB_COMP: DB_COMP constants ----------------------------------------------------------------------- Supported compression levels .. autodata:: geosoft.gxapi.DB_COMP_NONE :annotation: .. autoattribute:: geosoft.gxapi.DB_COMP_NONE .. autodata:: geosoft.gxapi.DB_COMP_SPEED :annotation: .. autoattribute:: geosoft.gxapi.DB_COMP_SPEED .. autodata:: geosoft.gxapi.DB_COMP_SIZE :annotation: .. autoattribute:: geosoft.gxapi.DB_COMP_SIZE .. _DB_COORDPAIR: DB_COORDPAIR constants ----------------------------------------------------------------------- Used to indicate the matching coordinate pair of a channel. .. autodata:: geosoft.gxapi.DB_COORDPAIR_NONE :annotation: .. autoattribute:: geosoft.gxapi.DB_COORDPAIR_NONE .. autodata:: geosoft.gxapi.DB_COORDPAIR_X :annotation: .. autoattribute:: geosoft.gxapi.DB_COORDPAIR_X .. autodata:: geosoft.gxapi.DB_COORDPAIR_Y :annotation: .. autoattribute:: geosoft.gxapi.DB_COORDPAIR_Y .. _DB_GROUP_CLASS_SIZE: DB_GROUP_CLASS_SIZE constants ----------------------------------------------------------------------- Class name max size .. autodata:: geosoft.gxapi.DB_GROUP_CLASS_SIZE :annotation: .. autoattribute:: geosoft.gxapi.DB_GROUP_CLASS_SIZE .. _DB_INFO: DB_INFO constants ----------------------------------------------------------------------- Integer Database Information .. autodata:: geosoft.gxapi.DB_INFO_BLOBS_MAX :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_BLOBS_MAX .. autodata:: geosoft.gxapi.DB_INFO_LINES_MAX :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_LINES_MAX .. autodata:: geosoft.gxapi.DB_INFO_CHANS_MAX :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_CHANS_MAX .. autodata:: geosoft.gxapi.DB_INFO_USERS_MAX :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_USERS_MAX .. autodata:: geosoft.gxapi.DB_INFO_BLOBS_USED :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_BLOBS_USED .. autodata:: geosoft.gxapi.DB_INFO_LINES_USED :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_LINES_USED .. autodata:: geosoft.gxapi.DB_INFO_CHANS_USED :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_CHANS_USED .. autodata:: geosoft.gxapi.DB_INFO_USERS_USED :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_USERS_USED .. autodata:: geosoft.gxapi.DB_INFO_PAGE_SIZE :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_PAGE_SIZE .. autodata:: geosoft.gxapi.DB_INFO_DATA_SIZE :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_DATA_SIZE .. autodata:: geosoft.gxapi.DB_INFO_LOST_SIZE :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_LOST_SIZE .. autodata:: geosoft.gxapi.DB_INFO_FREE_SIZE :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_FREE_SIZE .. autodata:: geosoft.gxapi.DB_INFO_COMP_LEVEL :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_COMP_LEVEL .. autodata:: geosoft.gxapi.DB_INFO_BLOB_SIZE :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_BLOB_SIZE .. autodata:: geosoft.gxapi.DB_INFO_FILE_SIZE :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_FILE_SIZE .. autodata:: geosoft.gxapi.DB_INFO_INDEX_SIZE :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_INDEX_SIZE .. autodata:: geosoft.gxapi.DB_INFO_MAX_BLOCK_SIZE :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_MAX_BLOCK_SIZE .. autodata:: geosoft.gxapi.DB_INFO_CHANGESLOST :annotation: .. autoattribute:: geosoft.gxapi.DB_INFO_CHANGESLOST .. _DB_LINE_LABEL_FORMAT: DB_LINE_LABEL_FORMAT constants ----------------------------------------------------------------------- Line Label Formats .. autodata:: geosoft.gxapi.DB_LINE_LABEL_FORMAT_LINE :annotation: .. autoattribute:: geosoft.gxapi.DB_LINE_LABEL_FORMAT_LINE .. autodata:: geosoft.gxapi.DB_LINE_LABEL_FORMAT_VERSION :annotation: .. autoattribute:: geosoft.gxapi.DB_LINE_LABEL_FORMAT_VERSION .. autodata:: geosoft.gxapi.DB_LINE_LABEL_FORMAT_TYPE :annotation: .. autoattribute:: geosoft.gxapi.DB_LINE_LABEL_FORMAT_TYPE .. autodata:: geosoft.gxapi.DB_LINE_LABEL_FORMAT_FLIGHT :annotation: .. autoattribute:: geosoft.gxapi.DB_LINE_LABEL_FORMAT_FLIGHT .. autodata:: geosoft.gxapi.DB_LINE_LABEL_FORMAT_FULL :annotation: .. autoattribute:: geosoft.gxapi.DB_LINE_LABEL_FORMAT_FULL .. autodata:: geosoft.gxapi.DB_LINE_LABEL_FORMAT_DATE :annotation: .. autoattribute:: geosoft.gxapi.DB_LINE_LABEL_FORMAT_DATE .. autodata:: geosoft.gxapi.DB_LINE_LABEL_FORMAT_LINK :annotation: .. autoattribute:: geosoft.gxapi.DB_LINE_LABEL_FORMAT_LINK .. _DB_LINE_SELECT: DB_LINE_SELECT constants ----------------------------------------------------------------------- Select modes .. autodata:: geosoft.gxapi.DB_LINE_SELECT_INCLUDE :annotation: .. autoattribute:: geosoft.gxapi.DB_LINE_SELECT_INCLUDE .. autodata:: geosoft.gxapi.DB_LINE_SELECT_EXCLUDE :annotation: .. autoattribute:: geosoft.gxapi.DB_LINE_SELECT_EXCLUDE .. _DB_LINE_TYPE: DB_LINE_TYPE constants ----------------------------------------------------------------------- Line types .. autodata:: geosoft.gxapi.DB_LINE_TYPE_NORMAL :annotation: .. autoattribute:: geosoft.gxapi.DB_LINE_TYPE_NORMAL .. autodata:: geosoft.gxapi.DB_LINE_TYPE_BASE :annotation: .. autoattribute:: geosoft.gxapi.DB_LINE_TYPE_BASE .. autodata:: geosoft.gxapi.DB_LINE_TYPE_TIE :annotation: .. autoattribute:: geosoft.gxapi.DB_LINE_TYPE_TIE .. autodata:: geosoft.gxapi.DB_LINE_TYPE_TEST :annotation: .. autoattribute:: geosoft.gxapi.DB_LINE_TYPE_TEST .. autodata:: geosoft.gxapi.DB_LINE_TYPE_TREND :annotation: .. autoattribute:: geosoft.gxapi.DB_LINE_TYPE_TREND .. autodata:: geosoft.gxapi.DB_LINE_TYPE_SPECIAL :annotation: .. autoattribute:: geosoft.gxapi.DB_LINE_TYPE_SPECIAL .. autodata:: geosoft.gxapi.DB_LINE_TYPE_RANDOM :annotation: .. autoattribute:: geosoft.gxapi.DB_LINE_TYPE_RANDOM .. _DB_LOCK: DB_LOCK constants ----------------------------------------------------------------------- Lock Modes .. autodata:: geosoft.gxapi.DB_LOCK_NONE :annotation: .. autoattribute:: geosoft.gxapi.DB_LOCK_NONE .. autodata:: geosoft.gxapi.DB_LOCK_READONLY :annotation: .. autoattribute:: geosoft.gxapi.DB_LOCK_READONLY .. autodata:: geosoft.gxapi.DB_LOCK_READWRITE :annotation: .. autoattribute:: geosoft.gxapi.DB_LOCK_READWRITE .. _DB_NAME: DB_NAME constants ----------------------------------------------------------------------- Get Database file names .. autodata:: geosoft.gxapi.DB_NAME_FILE :annotation: .. autoattribute:: geosoft.gxapi.DB_NAME_FILE .. _DB_OWN: DB_OWN constants ----------------------------------------------------------------------- Symbol Ownership .. autodata:: geosoft.gxapi.DB_OWN_SHARED :annotation: .. autoattribute:: geosoft.gxapi.DB_OWN_SHARED .. autodata:: geosoft.gxapi.DB_OWN_USER :annotation: .. autoattribute:: geosoft.gxapi.DB_OWN_USER .. _DB_SYMB_TYPE: DB_SYMB_TYPE constants ----------------------------------------------------------------------- Symbol types .. autodata:: geosoft.gxapi.DB_SYMB_BLOB :annotation: .. autoattribute:: geosoft.gxapi.DB_SYMB_BLOB .. autodata:: geosoft.gxapi.DB_SYMB_LINE :annotation: .. autoattribute:: geosoft.gxapi.DB_SYMB_LINE .. autodata:: geosoft.gxapi.DB_SYMB_CHAN :annotation: .. autoattribute:: geosoft.gxapi.DB_SYMB_CHAN .. autodata:: geosoft.gxapi.DB_SYMB_USER :annotation: .. autoattribute:: geosoft.gxapi.DB_SYMB_USER .. _DB_SYMB_NAME_SIZE: DB_SYMB_NAME_SIZE constants ----------------------------------------------------------------------- Size of Symbol Names .. autodata:: geosoft.gxapi.DB_SYMB_NAME_SIZE :annotation: .. autoattribute:: geosoft.gxapi.DB_SYMB_NAME_SIZE .. _DB_WAIT: DB_WAIT constants ----------------------------------------------------------------------- Wait Times .. autodata:: geosoft.gxapi.DB_WAIT_NONE :annotation: .. autoattribute:: geosoft.gxapi.DB_WAIT_NONE .. autodata:: geosoft.gxapi.DB_WAIT_INFINITY :annotation: .. autoattribute:: geosoft.gxapi.DB_WAIT_INFINITY .. _DB_ARRAY_BASETYPE: DB_ARRAY_BASETYPE constants ----------------------------------------------------------------------- Array channel base coordinate type .. autodata:: geosoft.gxapi.DB_ARRAY_BASETYPE_NONE :annotation: .. autoattribute:: geosoft.gxapi.DB_ARRAY_BASETYPE_NONE .. autodata:: geosoft.gxapi.DB_ARRAY_BASETYPE_TIME_WINDOWS :annotation: .. autoattribute:: geosoft.gxapi.DB_ARRAY_BASETYPE_TIME_WINDOWS .. autodata:: geosoft.gxapi.DB_ARRAY_BASETYPE_TIMES :annotation: .. autoattribute:: geosoft.gxapi.DB_ARRAY_BASETYPE_TIMES .. autodata:: geosoft.gxapi.DB_ARRAY_BASETYPE_FREQUENCIES :annotation: .. autoattribute:: geosoft.gxapi.DB_ARRAY_BASETYPE_FREQUENCIES .. autodata:: geosoft.gxapi.DB_ARRAY_BASETYPE_ELEVATIONS :annotation: .. autoattribute:: geosoft.gxapi.DB_ARRAY_BASETYPE_ELEVATIONS .. autodata:: geosoft.gxapi.DB_ARRAY_BASETYPE_DEPTHS :annotation: .. autoattribute:: geosoft.gxapi.DB_ARRAY_BASETYPE_DEPTHS .. autodata:: geosoft.gxapi.DB_ARRAY_BASETYPE_VELOCITIES :annotation: .. autoattribute:: geosoft.gxapi.DB_ARRAY_BASETYPE_VELOCITIES .. autodata:: geosoft.gxapi.DB_ARRAY_BASETYPE_DISCRETE_TIME_WINDOWS :annotation: .. autoattribute:: geosoft.gxapi.DB_ARRAY_BASETYPE_DISCRETE_TIME_WINDOWS .. _NULLSYMB: NULLSYMB constants ----------------------------------------------------------------------- Database Null .. autodata:: geosoft.gxapi.NULLSYMB :annotation: .. autoattribute:: geosoft.gxapi.NULLSYMB <file_sep>/examples/tutorial/Hello World/hello_world_simple.py import geosoft.gxpy as gxpy # a GX context is required, and must be assigned to a variable that persists through the life of execution. gxc = gxpy.gx.GXpy() # gid is a property of the context that holds the user's Geosoft ID. Say hello. print('Hello {}'.format(gxc.gid)) <file_sep>/docs/GXSHD.rst .. _GXSHD: GXSHD class ================================== .. autoclass:: geosoft.gxapi.GXSHD :members: .. _SHD_FIX: SHD_FIX constants ----------------------------------------------------------------------- Interactive tracking constraints. .. autodata:: geosoft.gxapi.SHD_FIX_NONE :annotation: .. autoattribute:: geosoft.gxapi.SHD_FIX_NONE .. autodata:: geosoft.gxapi.SHD_FIX_INCLINATION :annotation: .. autoattribute:: geosoft.gxapi.SHD_FIX_INCLINATION .. autodata:: geosoft.gxapi.SHD_FIX_DECLINATION :annotation: .. autoattribute:: geosoft.gxapi.SHD_FIX_DECLINATION <file_sep>/examples/tutorial/Hello World/hello_world_debug.py import geosoft.gxapi as gxapi import geosoft.gxpy as gxpy def rungx(): gxc = gxpy.gx.gx() gxapi.GXSYS.display_message("GX Python", "Hello {}".format(gxc.gid)) if __name__ == "__main__": gxc = gxpy.gx.GXpy() print('Hello {}'.format(gxc.gid))<file_sep>/geosoft/gxpy/grid_utility.py """ Geosoft grid and image utilities. .. seealso:: `geosoft.gxpy.grid`, `geosoft.gxapi.GXIMG`, `geosoft.gxapi.GXIMU` .. note:: Regression tests provide usage examples: `Tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_grid_utility.py>`_ """ import os import numpy as np import math import geosoft import geosoft.gxapi as gxapi from . import gx as gx from . import vv as gxvv from . import grid as gxgrd from . import map as gxmap from . import view as gxv from . import group as gxgrp from . import gdb as gxgdb from . import geometry as gxgeo from . import utility as gxu from . import geometry_utility as gxgeou from . import grid_fft as gxfft __version__ = geosoft.__version__ BOOL_AND = gxapi.IMU_BOOL_OPT_AND BOOL_OR = gxapi.IMU_BOOL_OPT_OR BOOL_XOR = gxapi.IMU_BOOL_OPT_XOR BOOL_SIZE_GRID1 = gxapi.IMU_BOOL_SIZING_0 BOOL_SIZE_GRID2 = gxapi.IMU_BOOL_SIZING_1 BOOL_SIZE_MIN = gxapi.IMU_BOOL_SIZING_MIN BOOL_SIZE_MAX = gxapi.IMU_BOOL_SIZING_MAX BOOL_OVERLAP_AVERAGE = gxapi.IMU_BOOL_OLAP_AVE BOOL_OVERLAP_GRID1 = gxapi.IMU_BOOL_OLAP_1 BOOL_OVERLAP_GRID2 = gxapi.IMU_BOOL_OLAP_2 BOOL_OVERLAP_MINUS = gxapi.IMU_BOOL_OLAP_MINUS TREND_EDGE = gxapi.IMU_TREND_EDGE TREND_ALL = gxapi.IMU_TREND_ALL DERIVATIVE_X = 0 DERIVATIVE_Y = 1 DERIVATIVE_Z = 2 DERIVATIVE_XY = 3 DERIVATIVE_XYZ = 4 TILT_ANGLE = 5 RETURN_PPOINT = 0 RETURN_LIST_OF_PPOINT = 1 RETURN_GDB = 2 def _t(s): return geosoft.gxpy.system.translate(s) class GridUtilityException(geosoft.GXRuntimeError): """ Exceptions from `geosoft.gxpy.grid_utility`. .. versionadded:: 9.4 """ pass def remove_trend(grid, file_name=None, method=TREND_EDGE, overwrite=False): """ Calculate a polynomial trend surface and return trend-removed grid. :param grid: `geosoft.gxpy.grid.Grid` instance, or a file name :param file_name: trend-removed grid file name, if `None` a temporary grid is created. :param method: base trend on `TREND_EDGE` for edge data or `TREND_ALL` for all data :param overwrite: True to overwrite existing file_name :return: `Grid` instance .. versionadded 9.4 """ if not isinstance(grid, gxgrd.Grid): grid = gxgrd.Grid.open(grid, dtype=np.float64, mode=gxgrd.FILE_READ) # need GS_DOUBLE grids if grid.gxtype != gxapi.GS_DOUBLE: ing = grid.copy(grid, gx.gx().temp_file('.grd(GRD)'), dtype=np.float64) ing.delete_files() else: ing = grid if file_name is None: file_name = gx.gx().temp_file('.grd(GRD)') dtg = grid.new(file_name=file_name, properties=ing.properties(), overwrite=overwrite) gxapi.GXIMU.grid_trnd(ing.gximg, dtg.gximg, 0, method, 1, gxapi.GXVM.create(gxapi.GS_REAL, 10), 3) return dtg def derivative(grid, derivative_type, file_name=None, overwrite=False, dtype=None, fft=True): """ Return a derivative of a grid or a tilt-angle :param grid: `geosoft.gxpy.grid.Grid` instance, or a file name :param derivative_type: Which derivative to calculate, (grid_data_uom/distance): ========================== ==================================================================== DERIVATIVE_X in the grid X direction DERIVATIVE_Y in the grid Y direction DERIVATIVE_Z in the grid Z direction DERIVATIVE_XYZ the total derivative sqrt(dx**2 + dy**2 + dz**2) (analytic signal) TILT_ANGLE tilt angle, atan2(dz, sqrt(dx**2 + dy**2)) (radians) ========================== ==================================================================== :param file_name: returned derivative file name, `None` for a temporary file :param overwrite: True to overwrite existing file :param dtype: dtype for the return grid, default is the same as the passed grid. :param fft: `False` calculate Z derivative with a space-domain convolution rather than an FFT. :return: `geosoft.gxpy.grid.Grid` instance that contains the derivative result .. note:: Derivative units_of_measure are grid_unit_of_measure / distance, except for the tilt angle, which is radians. Horizontal derivatives are calculated in the space domain based on the difference between neighboring cell values, and the Z derivative can be calculated using an FFT or a 5x5 space-domain convolution. An FFT calculation will generally produce a better result and it will be able to work with longer wavelengths, but at the expense of speed and edge effects in cases of very powerful anomalies along the edge of a grid. .. versionadded 9.4 """ def vertical_derivative(g, dt): # float64 grids for grid_vd if not isinstance(g, gxgrd.Grid): g = gxgrd.Grid.open(g, dtype=np.float64, mode=gxgrd.FILE_READ) if g.dtype != np.float64: g = g.copy(g, gx.gx().temp_file('.grd(GRD)'), dtype=np.float32, overwrite=True) g.delete_files() if fft: with gxfft.GridFFT(g) as gfft: gfft.filter(filters=['DRVZ 1']) dzg = gfft.result_grid(file_name=file_name, overwrite=overwrite) else: dzg = gxgrd.Grid.new(file_name=file_name, properties=g.properties(), overwrite=overwrite) gxapi.GXIMU.grid_vd(g.gximg, dzg.gximg) dzg.unit_of_measure = g.unit_of_measure + '/' + g.coordinate_system.unit_of_measure return gxgrd.reopen(dzg, dtype=dt) def tilt_angle(g, fn=None): dx = derivative(g, DERIVATIVE_X) dy = derivative(g, DERIVATIVE_Y) dz = derivative(g, DERIVATIVE_Z, fft=fft) result = expression((dx, dy, dz), 'atan2(g3,sqrt(g1**2+g2**2))', result_file_name=fn, overwrite=overwrite) result.unit_of_measure = 'radians' return gxgrd.reopen(result) def horizontal_gradient(g): dx = derivative(g, DERIVATIVE_X) dy = derivative(g, DERIVATIVE_Y) result = expression((dx, dy), 'sqrt(g1**2+g2**2)', result_file_name=file_name, overwrite=overwrite) result.unit_of_measure = g.unit_of_measure + '/' + g.coordinate_system.unit_of_measure return result def total_gradient(g): dx = derivative(g, DERIVATIVE_X) dy = derivative(g, DERIVATIVE_Y) dz = derivative(g, DERIVATIVE_Z, fft=fft) result = expression((dx, dy, dz), 'sqrt(g1**2+g2**2+g3**2)', result_file_name=file_name, overwrite=overwrite) result.unit_of_measure = g.unit_of_measure + '/' + g.coordinate_system.unit_of_measure return result if derivative_type == DERIVATIVE_Z: return vertical_derivative(grid, dt=dtype) # need float32 grids for grid_filt if not isinstance(grid, gxgrd.Grid): grid = gxgrd.Grid.open(grid, dtype=np.float32, mode=gxgrd.FILE_READ) if dtype is None: return_dtype = grid.dtype else: return_dtype = dtype if grid.dtype != np.float32: grid = grid.copy(grid, gx.gx().temp_file('.grd(GRD)'), dtype=np.float32, overwrite=True) grid.delete_files() if derivative_type == DERIVATIVE_XY: rgrd = horizontal_gradient(grid) if rgrd.dtype != return_dtype: return gxgrd.reopen(rgrd, dtype=return_dtype) else: return rgrd if derivative_type == DERIVATIVE_XYZ: rgrd = total_gradient(grid) if rgrd.dtype != return_dtype: return gxgrd.reopen(rgrd, dtype=return_dtype) else: return rgrd if derivative_type == TILT_ANGLE: if file_name is None: file_name = gx.gx().temp_file('.grd(GRD)') rgrd = tilt_angle(grid, fn=file_name) if rgrd.dtype != return_dtype: return gxgrd.reopen(rgrd, dtype=return_dtype) else: return rgrd # dxy grid if file_name is None: file_name = gx.gx().temp_file('.grd(GRD)') dxy = gxgrd.Grid.new(file_name=file_name, properties=grid.properties(), overwrite=overwrite) # filter if derivative_type == DERIVATIVE_X: filter_vv = gxvv.GXvv([0., 0., 0., -0.5, 0., +0.5, 0., 0., 0.], dtype=np.float64) mult = 1.0 / grid.dx else: filter_vv = gxvv.GXvv([0., 0.5, 0., 0., 0., 0., 0., -0.5, 0.], dtype=np.float64) mult = 1.0 / grid.dy gxapi.GXIMU.grid_filt(grid.gximg, dxy.gximg, 1, mult, gxapi.IMU_FILT_DUMMY_NO, gxapi.IMU_FILT_HZDRV_NO, gxapi.IMU_FILT_FILE_NO, "", filter_vv.gxvv) dxy.unit_of_measure = grid.unit_of_measure + '/' + grid.coordinate_system.unit_of_measure return gxgrd.reopen(dxy, dtype=return_dtype) def tilt_depth(grid, resolution=None, return_as=RETURN_PPOINT, gdb=None, overwrite=False, fft=True): """ Return estimate of the depth sources of potential filed anomalies. :param grid: `geosoft.gxpy.grid.Grid` instance or a grid file name. Ideally the grid should be RTP. :param resolution: zero-contour sampling resolution, defaults to 4 times the grid cell size. :param return_as: return results as: ===================== ==================================================================== RETURN_PPOINT return results as a single `geosoft.gxpy.geometry.PPoint` instance RETURN_LIST_OF_PPOINT return results as a list of `geosoft.gxpy.geometry.PPoint` instances RETURN_GDB return result as a `geosoft.gxpy.gdb.Geosoft_gdb` instance ===================== ==================================================================== :param gdb: return database name, or a `geosoft.gxpy.gdv.Geosoft_database` instance. If not specified and `return_as=RETURN_GDB`, a temporary database is created. :param overwrite: True to overwrite existing gdb. :param fft: `False` to use a space-domain convolution. The default uses an FFT, which will in general produce a cleaner and more accurate result, though it may be slower. :return: depends on `return_as` setting .. note:: Given a TMI grid, or the vertical derivative of the gravity anomaly, calculate contact source depths using the tilt-depth method. The contact source depth is the reciprocol of the horizontal gradient of the tilt-derivative at the zero-contour of the tilt derivative. .. versionadded:: 9.4 """ if gdb is not None: return_as = RETURN_GDB gxc = gx.gx() gxc.log('Calculate tilt-angle...') ta = derivative(grid, TILT_ANGLE, fft=fft) gxc.log('Find zero contour of the tilt-angle...') if resolution is None: resolution = min(ta.dx, ta.dy) * 4. gdb = contour_points(ta, 0., resolution=resolution, return_as=RETURN_GDB, gdb=gdb, overwrite=overwrite) gxc.log('Calculate tilt-derivative...') tad = derivative(ta, DERIVATIVE_XY, fft=fft) # get gradient of the TD at the zero locations gxc.log('Calculate depth = reciprocal(tilt-derivative) at zero contour of the tilt-angle...') for ln in gdb.list_lines(): xyz, chlist, fid = gdb.read_line(ln, channels=('X', 'Y', 'Z')) zero_tad = sample(tad, xyz) zero_tad[zero_tad == 0.] = np.nan np.reciprocal(zero_tad, out=zero_tad) xyz[:, 2] = zero_tad gdb.write_line(ln, xyz, chlist, fid) if return_as == RETURN_GDB: return gdb pplist = [] for ln in gdb.list_lines(): xyz = gdb.read_line(ln, channels=('X', 'Y', 'Z'))[0] pplist.append(gxgeo.PPoint(xyz, coordinate_system=gdb.coordinate_system)) gdb.close(discard=True) if return_as == RETURN_LIST_OF_PPOINT: return pplist return gxgeo.PPoint.merge(pplist) def sample(grid, xyz): """ Return grid values sampled at the point locations. :param grid: `geosoft.gxpy.grid.Grid` instance or a grid file name. :param xyz: `geosoft.gxpy.geometry.PPoint` instance, or a numpy array shapped (-1, 3) that holds the desired (x, y, z) locations. If a PPoint instance is passed it will be reporjected to the grid coordinate system if necessary. :return: 1-dimensional numpy array of grid data values that match the passes PPoint or XYZ. .. note:: Sampled data values use linear interpolation between grid points. .. versionadded:: 9.1 """ if not isinstance(grid, gxgrd.Grid): grid = gxgrd.Grid(grid, dtype=np.float64) if isinstance(xyz, gxgeo.Geometry): if xyz.coordinate_system != grid.coordinate_system: xyz = gxgeo.PPoint(xyz, coordinate_system=grid.coordinate_system) if xyz.coordinate_system.is_oriented: xyz = xyz.coordinate_system.oriented_from_xyz(xyz) xyz = xyz.pp vvx, vvy, vvz = gxvv.vvset_from_np(xyz) gxapi.GXIMU.get_zvv(grid.gximg, vvx.gxvv, vvy.gxvv, vvz.gxvv) return vvz.np def grid_mosaic(mosaic, grid_list, type_decorate=''): """ Combine a set of grids into a single grid. Raises an error if the resulting grid is too large. :param mosaic: name of the output grid, returned. Decorate with '(HGD)' to get an HGD :param grid_list: list of input grid names :param type_decorate: decoration for input grids if not default :returns: `geosoft.gxpy.grid.Grid` instance .. note:: If the coordinate systems are different the grids are reprojected to the coordinate system of the first grid. .. versionadded:: 9.4 """ def props(gn, repro=None): with gxgrd.Grid.open(gn) as gg: if repro: gg.gximg.create_projected2(repro[0], repro[1]) return gg.properties() def dimension(glist): def dimg(_gd, _rep=None): prp = props(_gd, _rep) _x0 = prp.get('x0') _y0 = prp.get('y0') _xm = _x0 + (prp.get('nx') - 1) * prp.get('dx') _ym = _y0 + (prp.get('ny') - 1) * prp.get('dy') _ipj = prp.get('coordinate_system').gxipj cell = prp.get('dx') return _x0, _y0, _xm, _ym, (_ipj, cell) def ndim(_x0, _xm, _dx): return int((_xm - _x0 + _dx / 2.0) / _dx) + 1 dx0, dy0, dxm, dym, drepro = dimg(glist[0]) for gd in glist[1:]: xx0, yy0, xxm, yym, r = dimg(gd, drepro) if xx0 < dx0: dx0 = xx0 if yy0 < dy0: dy0 = yy0 if xxm > dxm: dxm = xxm if yym > dym: dym = yym # calculate new grid dimension _p = props(glist[0]) nnx = ndim(dx0, dxm, _p.get('dx')) nny = ndim(dy0, dym, _p.get('dy')) return dx0, dy0, nnx, nny, dxm, dym def locate(_x0, _y0, _p): _dx = _p.get('dx') _dy = _p.get('dy') dsx = round((p.get('x0') - _x0) / _dx) dsy = round((p.get('y0') - _y0) / _dy) return dsx, dsy def paste(gn, _mpg): with gxgrd.Grid.open(gn) as _g: _p = _g.properties() _nx = _p.get('nx') _ny = _p.get('ny') gpg = _g.gxpg() destx, desty = locate(x0, y0, _p) gxc.log(' +{} nx,ny({},{})'.format(_g, _nx, _ny)) gxc.log(' Copy ({},{}) -> ({},{}) of ({},{})'.format(_nx, _ny, destx, desty, mnx, mny)) _mpg.copy_subset(gpg, desty, destx, 0, 0, _ny, _nx) return gxc = gx.gx() if len(grid_list) == 0: raise GridUtilityException(_t('At least one grid is required')) # create list of grids, all matching on coordinate system of first grid grids = [] for i in range(len(grid_list)): grids.append(gxgrd.decorate_name(grid_list[i], type_decorate)) # output grid x0, y0, nx, ny, xm, ym = dimension(grids) p = props(grids[0]) p['x0'] = x0 p['y0'] = y0 p['nx'] = nx p['ny'] = ny gxc.log('') gxc.log('Mosaic: dim({},{}) x({},{}) y({},{}), cell({})...'.format(nx, ny, x0, xm, y0, ym, p.get('dx'))) master = gxgrd.Grid.new(mosaic, p) gxc.log('Memory image ready ({}) dim({},{}) x0,y0({},{})'. format(master, master.nx, master.ny, master.x0, master.y0)) # paste grids onto master mnx = master.nx mny = master.ny mpg = master.gxpg() for g in grids: paste(g, mpg) gxc.log('Mosaic completed: {}'.format(mosaic)) return master def grid_bool(g1, g2, joined_grid=None, opt=1, size=3, olap=1): """ Combine two grids into a single grid, with boolean logic to determine the result. :param g1: Grids to merge :param g2: :param joined_grid: joined output grid name, overwritten if it exists. Default makes a temporary grid. :param opt: option logic to determine output grid points: =============== ========================= BOOL_AND blank unless g1 and g2 BOOL_OR blank unless g1 or g2 BOOL_XOR blank where g1 and g2 =============== ========================= :param size: size of the output grid, default is minimum size =============== ======================================= BOOL_SIZE_GRID1 output size matches g1 BOOL_SIZE_GRID2 output size matches g2 BOOL_SIZE_MIN output size minimised to non-blank area BOOL_SIZE_MAX output size g1 + g2: =============== ======================================= :param olap: what to do with overlapping valid points, default uses grid 1 ==================== ================================== BOOL_OVERLAP_AVERAGE average values where grids overlap BOOL_OVERLAP_GRID1 use g1 where grids overlap BOOL_OVERLAP_GRID2 use g2 where grids overlap BOOL_OVERLAP_MINUS use g1 - g2 where grids overlap ==================== ================================== :returns: `Grid` instance of the merged output grid, must be closed with a call to close(). .. note:: If the grid coordinate systems differ, g2 is reprojected to the coordinate system og g1. .. versionadded:: 9.4 """ close_g1 = close_g2 = False if not isinstance(g1, gxgrd.Grid): g1 = gxgrd.Grid.open(g1) close_g1 = True if not isinstance(g2, gxgrd.Grid): g2 = gxgrd.Grid.open(g2) close_g2 = True if joined_grid is None: joined_grid = gx.gx().temp_file('.grd(GRD)') gxapi.GXIMU.grid_bool(g1.gximg, g2.gximg, joined_grid, opt, size, olap) if close_g1: g1.close() if close_g2: g2.close() return gxgrd.Grid.open(joined_grid) def contour_points(grid, value, max_segments=1000, resolution=None, return_as=RETURN_LIST_OF_PPOINT, gdb=None, overwrite=False): """ Return a set of point segments that represent the spatial locations of contours threaded through the grid. :param grid: grid file of `geosoft.gxpy.grid.Grid` instance :param value: contour value :param max_segments: maximum expected number of segments, raises error if there are more actual segments. :param resolution: the separation between points along the contours. If not specified the minimum grid cell size is used. Set `resolution=0`, for use the locations as returned by the contouring algorithm. :param return_as: return results as: ===================== ==================================================================== RETURN_PPOINT return results as a single `geosoft.gxpy.geometry.PPoint` instance RETURN_LIST_OF_PPOINT return results as a list of `geosoft.gxpy.geometry.PPoint` instances RETURN_GDB return result as a `geosoft.gxpy.gdb.Geosoft_gdb` instance ===================== ==================================================================== :param gdb: return database name, or a `geosoft.gxpy.gdv.Geosoft_database` instance. If not specified and `return_as=RETURN_GDB`, a temporary database is created. :param overwrite: `True` to overwrite gdb if it exists. :return: depends on `return_as` setting .. note:: Contours through 3D oriented grids will be oriented in 3D. Grids that are not 3D oriented will have a z value 0.0. .. versionadded:: 9.4 """ if isinstance(grid, gxgrd.Grid): extent = grid.extent with grid.copy(grid) as g: temp_grid = g.file_name grid = g.file_name_decorated if resolution is None: resolution = min(g.dx, g.dy) else: with gxgrd.Grid.open(grid) as g: extent = g.extent if resolution is None: resolution = min(g.dx, g.dy) temp_grid = None # create a contour group for this value, export to a shape file with gxmap.Map.new(data_area=extent.extent_xy, overwrite=True) as gmap: map_file = gmap.file_name with gxv.View.open(gmap, "data") as v: gxgrp.contour(v, '_', grid, parameters=(',0,0', '', '', '', '', '1', str(value) + ',,,0')) shp_file = gx.gx().temp_file('shp') gmap.gxmap.export_all_in_view(shp_file, 'data', 1.0, 1.0, gxapi.MAP_EXPORT_BITS_24, gxapi.MAP_EXPORT_METHOD_STANDARD, gxapi.MAP_EXPORT_FORMAT_SHP, '') shp_file = shp_file[:-4] + '_lnz.shp' if not os.path.exists(shp_file): raise GridUtilityException(_t('The grid data does not intersect value {}').format(value)) if gdb is not None: return_as = RETURN_GDB # import shape to database if not isinstance(gdb, gxgdb.Geosoft_gdb): gdb = gxgdb.Geosoft_gdb.new(name=gdb, max_lines=max_segments, max_channels=10, overwrite=overwrite) gis = gxapi.GXGIS.create(shp_file, '', gxapi.GIS_TYPE_ARCVIEW) gis.load_shapes_gdb(gdb.gxdb) gdb.coordinate_system = extent.coordinate_system # discard the temp files gxu.delete_files_by_root(temp_grid) gxu.delete_files_by_root(map_file) gxu.delete_files_by_root(shp_file[:-4]) # resample to resolution if resolution > 0.: for ln in gdb.list_lines(): xyz, chlist, _ = gdb.read_line(ln, channels=('X', 'Y', 'Z')) xyz = gxgeou.resample(xyz, resolution) gdb.write_line(ln, xyz, chlist) if return_as == RETURN_GDB: return gdb # make points from segments pplist = [] for ln in gdb.list_lines(): xyz = gdb.read_line(ln, channels=('X', 'Y', 'Z'))[0] xyz[:, 2] = 0. if resolution > 0.: xyz = gxgeou.resample(xyz, resolution) if extent.coordinate_system.is_oriented: xyz = extent.coordinate_system.xyz_from_oriented(xyz) pplist.append(gxgeo.PPoint(xyz, coordinate_system=extent.coordinate_system)) # discard the database gdb.close(discard=True) if return_as == RETURN_PPOINT: return gxgeo.PPoint.merge(pplist) return pplist def calculate_slope_standard_deviation(grid): """ Return the standard deviation of the slopes. :param grid: `geosoft.gxpy.grid.Grid` instance, or a grid file name :returns: Standard deviation of grid slopes .. Note:: This method calculates the standard deviation of the horizontal differences in the X and Y directions for the supplied image. This is useful for shading routines. A good default scaling factor is 2.5 / standard deviation. The image will be sub-sampled to a statistically meaningful number. The cell sizes are used to determine the slopes. .. versionadded:: 9.4 """ close_g = False if not isinstance(grid, gxgrd.Grid): grid = gxgrd.Grid.open(grid) close_g = True try: return gxapi.GXIMU.slope_standard_deviation(grid.gximg) finally: if close_g: grid.close() def flood(grid, file_name=None, overwrite=False, tolerance=None, max_iterations=250, pass_tol=99.): """ Flood blank areas in a grid based on a minimum-curvature surface. :param grid: `geosoft.gxpy.grid.Grid` instance, or a grid file name :param file_name: flooded grid file name, temporary created if `None`. :param overwrite: `True` to overwrite existing file :param tolerance: data fit tolerance, default is 0.001 times the data standard deviation :param max_iterations: maximum iterations for fiting the surface :param pass_tol: percentage of data that needs to pass the tolerance test when definint the minimum-curfacture surface. The default is 99%. :return: `geosoft.gxpy.grid.Grid` instance of a flooded grid. .. seealso:: `geosoft.gxpy.grid.Grid.minimum_curvature` .. versionadded:: 9.4 """ def pg_rows(n): if n >= grid.ny: return None pg.read_row(n, 0, 0, rvv.gxvv) xyv[:, 1] = n xyv[:, 2] = rvv.np return xyv if not isinstance(grid, gxgrd.Grid): grid = gxgrd.Grid.open(grid) pg = grid.gxpg(False) rvv = gxvv.GXvv(dtype=grid.dtype) rvv.length = grid.nx xyv = np.empty((grid.nx, 3)) xyv[:, 0] = [i for i in range(grid.nx)] bkd = max(grid.nx, grid.ny) if tolerance is None: tolerance = grid.statistics()['sd'] * 0.001 filled_grid = gxgrd.Grid.minimum_curvature(pg_rows, file_name=file_name, overwrite=overwrite, cs=1, area=(0, 0, grid.nx - 1, grid.ny - 1), bkd=bkd, itrmax=max_iterations, pastol=pass_tol, tol=tolerance, icgr=16, max_segments=grid.ny) filled_grid.set_properties(grid.properties()) return filled_grid def feather(grid, width, edge_value=None, file_name=None, overwrite=False): """ Feather the edge of a grid to a constant value at the edge. :param grid: `geosoft.gxpy.grid.Grid` instance, or a file name :param file_name: feathered grid file name, temporary created if `None`. :param overwrite: `True` to overwrite existing file :param width: feather width in cells around the grid, must be <= half the grid dimension :param edge_value: edge value, default is the data mean :return: feathered grid `geosoft.gxpy.grid.Grid` .. versionadded:: 9.4 """ def _feather(dlen, w): ff = np.ones(dlen) e = np.array([math.cos((i + 1) * math.pi/w) for i in range(w)]) * 0.5 + 0.5 ff[-len(e):] = e ff[:len(e)] = e[::-1] return ff if not isinstance(grid, gxgrd.Grid): grid = gxgrd.Grid.open(grid) if (width > grid.nx // 2) or (width > grid.ny // 2): raise GridUtilityException(_t('Width {} must be less than half the dimension ({}, {})') .format(width, grid.nx, grid.ny)) if edge_value is None: edge_value = grid.statistics()['mean'] pg = grid.gxpg() pgf = gxapi.GXPG.create(pg.n_rows(), pg.n_cols(), pg.e_type()) vv = gxvv.GXvv(dtype=gxu.dtype_gx(pg.e_type())) f = _feather(pg.n_cols(), width) for row in range(pg.n_rows()): pg.read_row(row, 0, 0, vv.gxvv) df = (vv.np - edge_value) * f + edge_value pgf.write_row(row, 0, 0, gxvv.GXvv(df).gxvv) f = _feather(pg.n_rows(), width) for col in range(pg.n_cols()): pgf.read_col(col, 0, 0, vv.gxvv) df = (vv.np - edge_value) * f + edge_value pgf.write_col(col, 0, 0, gxvv.GXvv(df).gxvv) return gxgrd.Grid.from_data_array(pgf, file_name=file_name, overwrite=overwrite, properties=grid.properties()) def expression(grids, expr, result_file_name=None, overwrite=False): """ Apply an expressing to grids. :param grids: dictionary of named grid operands, or a list of grids (see example below). If a list is provided the operand names will be 'g1', 'g2', 'g3', etc... :param expr: expression string to apply, conforms to Python/C math expression syntax. The expression can have multiple lines, each line terminated by a ';' character. :param result_file_name: optional result grid file name, if `None` a temporary grid is created. :param overwrite: True to overwrite existing grid :return: `Grid` instance that contains the resuilt of the expression. *Example* .. code:: import geosoft.gxpy.grid as gxgrd # add using file names sum = gxgrd.expression(('some_grid', 'some_other_grid'), 'g1+g2') # add using Grid instances grid_1 = gxgrd.Grid.open('some_grid') grid_2 = gxgrd.Grid.open('some_other_grid') sum = gxgrd.expression((grid_1, grid_2), 'g1+g2') # add using named operands sum = gxgrd.expression({'a': grid_1, 'b': grid_2}, 'a+b') .. versionadded 9.4 """ exp = gxapi.GXIEXP.create() # build default operands dict from list of grids if not isinstance(grids, dict): i = 1 gd = {} for g in grids: gd['g{}'.format(i)] = g i += 1 grids = gd # add grids to the expression properties = None delete_list = [] for k, g in grids.items(): if not isinstance(g, gxgrd.Grid): g = gxgrd.Grid.open(g, dtype=np.float64) elif g.dtype != np.float64: g = gxgrd.Grid.copy(g, gx.gx().temp_file('.grd(GRD)'), dtype=np.float64) delete_list.append(g) exp.add_grid(g.gximg, k) if properties is None: properties = g.properties() if result_file_name is None: result_file_name = gx.gx().temp_file('.grd(GRD)') result = gxgrd.Grid.new(file_name=result_file_name, properties=properties, overwrite=overwrite) exp.add_grid(result.gximg, '_') # apply expression expr = ('_=' + expr).strip() if expr[-1] != ';': expr = expr + ';' exp.do_formula(expr, 100) # delete temporary grids for g in delete_list: g.delete_files() return gxgrd.reopen(result) <file_sep>/docs/GXSURFACE.rst .. _GXSURFACE: GXSURFACE class ================================== .. autoclass:: geosoft.gxapi.GXSURFACE :members: .. _SURFACE_OPEN: SURFACE_OPEN constants ----------------------------------------------------------------------- Open Modes .. autodata:: geosoft.gxapi.SURFACE_OPEN_READ :annotation: .. autoattribute:: geosoft.gxapi.SURFACE_OPEN_READ .. autodata:: geosoft.gxapi.SURFACE_OPEN_READWRITE :annotation: .. autoattribute:: geosoft.gxapi.SURFACE_OPEN_READWRITE <file_sep>/docs/GXEMAPTEMPLATE.rst .. _GXEMAPTEMPLATE: GXEMAPTEMPLATE class ================================== .. autoclass:: geosoft.gxapi.GXEMAPTEMPLATE :members: .. _EMAPTEMPLATE_PATH: EMAPTEMPLATE_PATH constants ----------------------------------------------------------------------- Four forms .. autodata:: geosoft.gxapi.EMAPTEMPLATE_PATH_FULL :annotation: .. autoattribute:: geosoft.gxapi.EMAPTEMPLATE_PATH_FULL .. autodata:: geosoft.gxapi.EMAPTEMPLATE_PATH_DIR :annotation: .. autoattribute:: geosoft.gxapi.EMAPTEMPLATE_PATH_DIR .. autodata:: geosoft.gxapi.EMAPTEMPLATE_PATH_NAME_EXT :annotation: .. autoattribute:: geosoft.gxapi.EMAPTEMPLATE_PATH_NAME_EXT .. autodata:: geosoft.gxapi.EMAPTEMPLATE_PATH_NAME :annotation: .. autoattribute:: geosoft.gxapi.EMAPTEMPLATE_PATH_NAME .. _EMAPTEMPLATE_TRACK: EMAPTEMPLATE_TRACK constants ----------------------------------------------------------------------- Tracking Options .. autodata:: geosoft.gxapi.EMAPTEMPLATE_TRACK_ERASE :annotation: .. autoattribute:: geosoft.gxapi.EMAPTEMPLATE_TRACK_ERASE .. autodata:: geosoft.gxapi.EMAPTEMPLATE_TRACK_RMENU :annotation: .. autoattribute:: geosoft.gxapi.EMAPTEMPLATE_TRACK_RMENU .. autodata:: geosoft.gxapi.EMAPTEMPLATE_TRACK_CYCLE :annotation: .. autoattribute:: geosoft.gxapi.EMAPTEMPLATE_TRACK_CYCLE .. _EMAPTEMPLATE_WINDOW_POSITION: EMAPTEMPLATE_WINDOW_POSITION constants ----------------------------------------------------------------------- Window Positioning Options .. autodata:: geosoft.gxapi.EMAPTEMPLATE_WINDOW_POSITION_DOCKED :annotation: .. autoattribute:: geosoft.gxapi.EMAPTEMPLATE_WINDOW_POSITION_DOCKED .. autodata:: geosoft.gxapi.EMAPTEMPLATE_WINDOW_POSITION_FLOATING :annotation: .. autoattribute:: geosoft.gxapi.EMAPTEMPLATE_WINDOW_POSITION_FLOATING .. _EMAPTEMPLATE_WINDOW_STATE: EMAPTEMPLATE_WINDOW_STATE constants ----------------------------------------------------------------------- Window State Options .. autodata:: geosoft.gxapi.EMAPTEMPLATE_WINDOW_RESTORE :annotation: .. autoattribute:: geosoft.gxapi.EMAPTEMPLATE_WINDOW_RESTORE .. autodata:: geosoft.gxapi.EMAPTEMPLATE_WINDOW_MINIMIZE :annotation: .. autoattribute:: geosoft.gxapi.EMAPTEMPLATE_WINDOW_MINIMIZE .. autodata:: geosoft.gxapi.EMAPTEMPLATE_WINDOW_MAXIMIZE :annotation: .. autoattribute:: geosoft.gxapi.EMAPTEMPLATE_WINDOW_MAXIMIZE <file_sep>/docs/GXGIS.rst .. _GXGIS: GXGIS class ================================== .. autoclass:: geosoft.gxapi.GXGIS :members: .. _GIS_MAP2D: GIS_MAP2D constants ----------------------------------------------------------------------- View type to create .. autodata:: geosoft.gxapi.GIS_MAP2D_PLAN :annotation: .. autoattribute:: geosoft.gxapi.GIS_MAP2D_PLAN .. autodata:: geosoft.gxapi.GIS_MAP2D_EWSECTION :annotation: .. autoattribute:: geosoft.gxapi.GIS_MAP2D_EWSECTION .. autodata:: geosoft.gxapi.GIS_MAP2D_NSSECTION :annotation: .. autoattribute:: geosoft.gxapi.GIS_MAP2D_NSSECTION .. _GIS_TYPE: GIS_TYPE constants ----------------------------------------------------------------------- Type of file .. autodata:: geosoft.gxapi.GIS_TYPE_MAPINFO :annotation: .. autoattribute:: geosoft.gxapi.GIS_TYPE_MAPINFO .. autodata:: geosoft.gxapi.GIS_TYPE_ARCVIEW :annotation: .. autoattribute:: geosoft.gxapi.GIS_TYPE_ARCVIEW .. autodata:: geosoft.gxapi.GIS_TYPE_DGN :annotation: .. autoattribute:: geosoft.gxapi.GIS_TYPE_DGN .. autodata:: geosoft.gxapi.GIS_TYPE_SURPAC :annotation: .. autoattribute:: geosoft.gxapi.GIS_TYPE_SURPAC .. autodata:: geosoft.gxapi.GIS_TYPE_DATAMINE :annotation: .. autoattribute:: geosoft.gxapi.GIS_TYPE_DATAMINE .. autodata:: geosoft.gxapi.GIS_TYPE_GEMCOM :annotation: .. autoattribute:: geosoft.gxapi.GIS_TYPE_GEMCOM .. autodata:: geosoft.gxapi.GIS_TYPE_MICROMINE :annotation: .. autoattribute:: geosoft.gxapi.GIS_TYPE_MICROMINE .. autodata:: geosoft.gxapi.GIS_TYPE_MINESIGHT :annotation: .. autoattribute:: geosoft.gxapi.GIS_TYPE_MINESIGHT <file_sep>/docs/GXVVEXP.rst .. _GXVVEXP: GXVVEXP class ================================== .. autoclass:: geosoft.gxapi.GXVVEXP :members: <file_sep>/geosoft/gxapi/GXFLT.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXFLT(gxapi_cy.WrapFLT): """ GXFLT class. The `GXFLT <geosoft.gxapi.GXFLT>` class allows the application of user-defined convolution filters to data in an OASIS database """ def __init__(self, handle=0): super(GXFLT, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXFLT <geosoft.gxapi.GXFLT>` :returns: A null `GXFLT <geosoft.gxapi.GXFLT>` :rtype: GXFLT """ return GXFLT() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, input): """ Create a filter from a comma/space delimited string. :param input: Filter string :type input: str :returns: `GXFLT <geosoft.gxapi.GXFLT>` Object :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Terminates process if filter not found. Sample Fraser Filter string: "-1,-1,1,1" """ ret_val = gxapi_cy.WrapFLT._create(GXContext._get_tls_geo(), input.encode()) return ret_val @classmethod def load(cls, file): """ Load and return handle to a convolution filter. :param file: Name of the filter File :type file: str :returns: `GXFLT <geosoft.gxapi.GXFLT>` Object :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Terminates process if filter not found. A filter file is an ASCII file that contains filter coefficients, which are simply mumbers. There can be one coefficient to a line. Blank lines and comment lines are skipped. Comment lines beginn with a forward slash character in column 1. Following is an example Fraser Filter file: /---------------------- / Fraser Filter /---------------------- -1 -1 1 1 """ ret_val = gxapi_cy.WrapFLT._load(GXContext._get_tls_geo(), file.encode()) return ret_val ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXCOM.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXCOM(gxapi_cy.WrapCOM): """ GXCOM class. This class is used to communicate with external serial devices. It allows the setting of timeouts. """ def __init__(self, handle=0): super(GXCOM, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXCOM <geosoft.gxapi.GXCOM>` :returns: A null `GXCOM <geosoft.gxapi.GXCOM>` :rtype: GXCOM """ return GXCOM() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, port, baud, data_size, parity, stop_bits, flow_control, time_out): """ Create `GXCOM <geosoft.gxapi.GXCOM>` object. :param port: Port name to open ("COM1" is example) :param baud: :ref:`COM_BAUD` :param data_size: :ref:`COM_DATASIZE` :param parity: :ref:`COM_PARITY` :param stop_bits: :ref:`COM_STOPBITS` :param flow_control: :ref:`COM_FLOWCONTROL` :param time_out: Timeout in Ms (500) :type port: str :type baud: int :type data_size: int :type parity: int :type stop_bits: int :type flow_control: int :type time_out: int :returns: `GXCOM <geosoft.gxapi.GXCOM>` Object :rtype: GXCOM .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapCOM._create(GXContext._get_tls_geo(), port.encode(), baud, data_size, parity, stop_bits, flow_control, time_out) return GXCOM(ret_val) @classmethod def create_no_terminate(cls, port, baud, data_size, parity, stop_bits, flow_control, time_out): """ Create `GXCOM <geosoft.gxapi.GXCOM>` object. :param port: Port name to open ("COM1" is example) :param baud: :ref:`COM_BAUD` :param data_size: :ref:`COM_DATASIZE` :param parity: :ref:`COM_PARITY` :param stop_bits: :ref:`COM_STOPBITS` :param flow_control: :ref:`COM_FLOWCONTROL` :param time_out: Timeout in Ms (500) :type port: str :type baud: int :type data_size: int :type parity: int :type stop_bits: int :type flow_control: int :type time_out: int :returns: `GXCOM <geosoft.gxapi.GXCOM>` Object :rtype: GXCOM .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapCOM._create_no_terminate(GXContext._get_tls_geo(), port.encode(), baud, data_size, parity, stop_bits, flow_control, time_out) return GXCOM(ret_val) def read_line_no_terminate(self, line): """ Reads a Line from the `GXCOM <geosoft.gxapi.GXCOM>` :param line: String for line :type line: str_ref :returns: 0 - if successful in reading a line 1 - if an error was encountered :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val, line.value = self._read_line_no_terminate(line.value.encode()) return ret_val def read_chars_no_terminate(self, line): """ Reads characters from the `GXCOM <geosoft.gxapi.GXCOM>`, times out and does not terminate :param line: String for characters :type line: str_ref :returns: 0 - if successful 1 - if time out or error :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val, line.value = self._read_chars_no_terminate(line.value.encode()) return ret_val def read_line(self, line): """ Reads a Line from the `GXCOM <geosoft.gxapi.GXCOM>` :param line: String for line :type line: str_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ line.value = self._read_line(line.value.encode()) def write_chars_no_terminate(self, line): """ Writes characters to the `GXCOM <geosoft.gxapi.GXCOM>`. Does not terminate upon error :param line: Line to write :type line: str :returns: 0 - if successful in writing a string 1 - if time out or error was encountered :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._write_chars_no_terminate(line.encode()) return ret_val def purge_comm(self): """ Purges the input and output buffers. .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._purge_comm() def read_chars(self, line): """ Reads characters from the `GXCOM <geosoft.gxapi.GXCOM>` :param line: String for characters :type line: str_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ line.value = self._read_chars(line.value.encode()) def read_em61_lines_wa(self, lines, wa): """ Reads Lines from the `GXCOM <geosoft.gxapi.GXCOM>` to a `GXWA <geosoft.gxapi.GXWA>`: Geonics EM61 only :param lines: Number of lines :param wa: Where to put lines :type lines: int :type wa: GXWA .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._read_em61_lines_wa(lines, wa) def read_file2_wa(self, wa): """ Reads entire dataset from the `GXCOM <geosoft.gxapi.GXCOM>` to a `GXWA <geosoft.gxapi.GXWA>` :param wa: Where to put lines :type wa: GXWA .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._read_file2_wa(wa) def read_lines_wa(self, lines, wa): """ Reads Lines from the `GXCOM <geosoft.gxapi.GXCOM>` to a `GXWA <geosoft.gxapi.GXWA>` :param lines: Number of lines :param wa: Where to put lines :type lines: int :type wa: GXWA .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._read_lines_wa(lines, wa) def set_time_out(self, time_out): """ Set the timeout value. :param time_out: Timeout in Ms (500) :type time_out: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_time_out(time_out) def write_chars(self, line): """ Writes characters to the `GXCOM <geosoft.gxapi.GXCOM>` :param line: Line to write :type line: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._write_chars(line.encode()) def write_line(self, line): """ Writes a Line to the `GXCOM <geosoft.gxapi.GXCOM>` :param line: Line to write :type line: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._write_line(line.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/tutorial/2D Views and Maps/grid_shade_contour_annotate.py import geosoft.gxpy.gx as gx import geosoft.gxpy.map as gxmap import geosoft.gxpy.view as gxview import geosoft.gxpy.group as gxgroup import geosoft.gxpy.agg as gxagg import geosoft.gxpy.grid as gxgrd import geosoft.gxpy.viewer as gxviewer gxc = gx.GXpy() # create a map from grid coordinate system and extent with gxgrd.Grid('Wittichica Creek Residual Total Field.grd') as grd: grid_file_name = grd.file_name_decorated # create a map for this grid on A4 media, scale to fit the extent with gxmap.Map.new('Wittichica residual TMI', data_area=grd.extent_2d(), media="A4", margins=(1, 3.5, 3, 1), coordinate_system=grd.coordinate_system, overwrite=True) as gmap: map_file_name = gmap.file_name # draw into the views on the map. We are reopening the map as the Aggregate class only works with a closed grid. with gxmap.Map.open(map_file_name) as gmap: # work with the data view, draw a line around the data view with gxview.View.open(gmap, "data") as v: # add the grid image to the view, with shading, 20 nT contour interval to match default contour lines with gxagg.Aggregate_image.new(grid_file_name, shade=True, contour=20) as agg: gxgroup.Aggregate_group.new(v, agg) # colour legend gxgroup.legend_color_bar(v, 'TMI_legend', title='Res TMI\nnT', location=(1.2,0), cmap=agg.layer_color_map(0), cmap2=agg.layer_color_map(1)) # contour the grid gxgroup.contour(v, 'TMI_contour', grid_file_name) # map title and creator tag with gxview.View.open(gmap, "base") as v: with gxgroup.Draw(v, 'title') as g: g.text("Tutorial Example\nresidual mag", reference=gxgroup.REF_BOTTOM_CENTER, location=(100, 10), text_def=gxgroup.Text_def(height=3.5, weight=gxgroup.FONT_WEIGHT_BOLD)) g.text("created by:" + gxc.gid, location=(1, 1.5), text_def=gxgroup.Text_def(height=1.2, italics=True)) # add a map surround to the map gmap.surround(outer_pen='kt500', inner_pen='kt100', gap=0.1) # annotate the data view locations gmap.annotate_data_xy(grid=gxmap.GRID_CROSSES) gmap.annotate_data_ll(grid=gxmap.GRID_LINES, grid_pen=gxgroup.Pen(line_color='b'), text_def=gxgroup.Text_def(color='b', height=0.15, italics=True)) # scale bar gmap.scale_bar(location=(1, 3, 1.5), text_def=gxgroup.Text_def(height=0.15)) # display the map in a Geosoft viewer gxviewer.view_document(map_file_name, wait_for_close=False) # save to a PNG file gxmap.save_as_image(map_file_name, "wittichica_mag.png", type=gxmap.RASTER_FORMAT_PNG)<file_sep>/docs/GXGUI.rst .. _GXGUI: GXGUI class ================================== .. autoclass:: geosoft.gxapi.GXGUI :members: .. _AOI_RETURN_STATE: AOI_RETURN_STATE constants ----------------------------------------------------------------------- AOI query return state .. autodata:: geosoft.gxapi.AOI_RETURN_CANCEL :annotation: .. autoattribute:: geosoft.gxapi.AOI_RETURN_CANCEL .. autodata:: geosoft.gxapi.AOI_RETURN_NODEFINE :annotation: .. autoattribute:: geosoft.gxapi.AOI_RETURN_NODEFINE .. autodata:: geosoft.gxapi.AOI_RETURN_DEFINE :annotation: .. autoattribute:: geosoft.gxapi.AOI_RETURN_DEFINE .. _COORDSYS_MODE: COORDSYS_MODE constants ----------------------------------------------------------------------- Coordinate system wizard `GXIPJ <geosoft.gxapi.GXIPJ>` types allowed on return. The wizard present three types of projections for selection by the user, Geographic (GCS), Projected (PCS), and Unknown. (Unknown requires only that the units be defined.) The Editable flag must be Yes for this option to take affect, and is overridden internally if the user's license does not allow modification of projections (e.g. the OM Viewer). .. autodata:: geosoft.gxapi.COORDSYS_MODE_ALL :annotation: .. autoattribute:: geosoft.gxapi.COORDSYS_MODE_ALL .. autodata:: geosoft.gxapi.COORDSYS_MODE_GCS :annotation: .. autoattribute:: geosoft.gxapi.COORDSYS_MODE_GCS .. autodata:: geosoft.gxapi.COORDSYS_MODE_PCS :annotation: .. autoattribute:: geosoft.gxapi.COORDSYS_MODE_PCS .. autodata:: geosoft.gxapi.COORDSYS_MODE_GCS_PCS :annotation: .. autoattribute:: geosoft.gxapi.COORDSYS_MODE_GCS_PCS .. autodata:: geosoft.gxapi.COORDSYS_MODE_PCS_UNKNOWN :annotation: .. autoattribute:: geosoft.gxapi.COORDSYS_MODE_PCS_UNKNOWN .. _DAT_TYPE: DAT_TYPE constants ----------------------------------------------------------------------- Type of files (grids, images) to support .. autodata:: geosoft.gxapi.DAT_TYPE_GRID :annotation: .. autoattribute:: geosoft.gxapi.DAT_TYPE_GRID .. autodata:: geosoft.gxapi.DAT_TYPE_IMAGE :annotation: .. autoattribute:: geosoft.gxapi.DAT_TYPE_IMAGE .. autodata:: geosoft.gxapi.DAT_TYPE_GRID_AND_IMAGE :annotation: .. autoattribute:: geosoft.gxapi.DAT_TYPE_GRID_AND_IMAGE .. _FILE_FILTER: FILE_FILTER constants ----------------------------------------------------------------------- File filters .. autodata:: geosoft.gxapi.FILE_FILTER_ALL :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_ALL .. autodata:: geosoft.gxapi.FILE_FILTER_GDB :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GDB .. autodata:: geosoft.gxapi.FILE_FILTER_GX :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GX .. autodata:: geosoft.gxapi.FILE_FILTER_GS :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GS .. autodata:: geosoft.gxapi.FILE_FILTER_INI :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_INI .. autodata:: geosoft.gxapi.FILE_FILTER_OMN :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_OMN .. autodata:: geosoft.gxapi.FILE_FILTER_VU :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_VU .. autodata:: geosoft.gxapi.FILE_FILTER_MAP :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_MAP .. autodata:: geosoft.gxapi.FILE_FILTER_PRJ :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_PRJ .. autodata:: geosoft.gxapi.FILE_FILTER_CON :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_CON .. autodata:: geosoft.gxapi.FILE_FILTER_MNU :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_MNU .. autodata:: geosoft.gxapi.FILE_FILTER_PDF :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_PDF .. autodata:: geosoft.gxapi.FILE_FILTER_PLT :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_PLT .. autodata:: geosoft.gxapi.FILE_FILTER_GWS :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GWS .. autodata:: geosoft.gxapi.FILE_FILTER_AGG :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_AGG .. autodata:: geosoft.gxapi.FILE_FILTER_TBL :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_TBL .. autodata:: geosoft.gxapi.FILE_FILTER_ZON :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_ZON .. autodata:: geosoft.gxapi.FILE_FILTER_ITR :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_ITR .. autodata:: geosoft.gxapi.FILE_FILTER_DXF :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_DXF .. autodata:: geosoft.gxapi.FILE_FILTER_TIF :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_TIF .. autodata:: geosoft.gxapi.FILE_FILTER_EMF :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_EMF .. autodata:: geosoft.gxapi.FILE_FILTER_BMP :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_BMP .. autodata:: geosoft.gxapi.FILE_FILTER_LUT :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_LUT .. autodata:: geosoft.gxapi.FILE_FILTER_PNG :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_PNG .. autodata:: geosoft.gxapi.FILE_FILTER_JPG :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_JPG .. autodata:: geosoft.gxapi.FILE_FILTER_PCX :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_PCX .. autodata:: geosoft.gxapi.FILE_FILTER_GIF :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GIF .. autodata:: geosoft.gxapi.FILE_FILTER_GRD :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GRD .. autodata:: geosoft.gxapi.FILE_FILTER_ERS :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_ERS .. autodata:: geosoft.gxapi.FILE_FILTER_EPS :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_EPS .. autodata:: geosoft.gxapi.FILE_FILTER_SHP :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_SHP .. autodata:: geosoft.gxapi.FILE_FILTER_CGM :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_CGM .. autodata:: geosoft.gxapi.FILE_FILTER_TAB :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_TAB .. autodata:: geosoft.gxapi.FILE_FILTER_COMPS :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_COMPS .. autodata:: geosoft.gxapi.FILE_FILTER_CSV :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_CSV .. autodata:: geosoft.gxapi.FILE_FILTER_GPF :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GPF .. autodata:: geosoft.gxapi.FILE_FILTER_PLY :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_PLY .. autodata:: geosoft.gxapi.FILE_FILTER_STM :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_STM .. autodata:: geosoft.gxapi.FILE_FILTER_TTM :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_TTM .. autodata:: geosoft.gxapi.FILE_FILTER_XYZ :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_XYZ .. autodata:: geosoft.gxapi.FILE_FILTER_BAR :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_BAR .. autodata:: geosoft.gxapi.FILE_FILTER_GEOSOFT_LICENSE :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GEOSOFT_LICENSE .. autodata:: geosoft.gxapi.FILE_FILTER_XML :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_XML .. autodata:: geosoft.gxapi.FILE_FILTER_GXNET :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GXNET .. autodata:: geosoft.gxapi.FILE_FILTER_ECW :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_ECW .. autodata:: geosoft.gxapi.FILE_FILTER_J2K :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_J2K .. autodata:: geosoft.gxapi.FILE_FILTER_JP2 :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_JP2 .. autodata:: geosoft.gxapi.FILE_FILTER_SEL :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_SEL .. autodata:: geosoft.gxapi.FILE_FILTER_SVG :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_SVG .. autodata:: geosoft.gxapi.FILE_FILTER_SVZ :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_SVZ .. autodata:: geosoft.gxapi.FILE_FILTER_WRP :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_WRP .. autodata:: geosoft.gxapi.FILE_FILTER_MAPPLOT :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_MAPPLOT .. autodata:: geosoft.gxapi.FILE_FILTER_DTM :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_DTM .. autodata:: geosoft.gxapi.FILE_FILTER_VOXEL :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_VOXEL .. autodata:: geosoft.gxapi.FILE_FILTER_MAPTEMPLATE :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_MAPTEMPLATE .. autodata:: geosoft.gxapi.FILE_FILTER_ACTION :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_ACTION .. autodata:: geosoft.gxapi.FILE_FILTER_DM :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_DM .. autodata:: geosoft.gxapi.FILE_FILTER_KML :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_KML .. autodata:: geosoft.gxapi.FILE_FILTER_KMZ :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_KMZ .. autodata:: geosoft.gxapi.FILE_FILTER_TARGET_PLAN :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_TARGET_PLAN .. autodata:: geosoft.gxapi.FILE_FILTER_TARGET_SECTION :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_TARGET_SECTION .. autodata:: geosoft.gxapi.FILE_FILTER_TARGET_STRIPLOG :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_TARGET_STRIPLOG .. autodata:: geosoft.gxapi.FILE_FILTER_TARGET_3D :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_TARGET_3D .. autodata:: geosoft.gxapi.FILE_FILTER_ARGIS_LYR :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_ARGIS_LYR .. autodata:: geosoft.gxapi.FILE_FILTER_ARGIS_MXD :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_ARGIS_MXD .. autodata:: geosoft.gxapi.FILE_FILTER_GOCAD_TS :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GOCAD_TS .. autodata:: geosoft.gxapi.FILE_FILTER_LST :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_LST .. autodata:: geosoft.gxapi.FILE_FILTER_ECS :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_ECS .. autodata:: geosoft.gxapi.FILE_FILTER_TARGET_FENCE :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_TARGET_FENCE .. autodata:: geosoft.gxapi.FILE_FILTER_GMS3D :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GMS3D .. autodata:: geosoft.gxapi.FILE_FILTER_BT2 :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_BT2 .. autodata:: geosoft.gxapi.FILE_FILTER_BPR :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_BPR .. autodata:: geosoft.gxapi.FILE_FILTER_BPR2 :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_BPR2 .. autodata:: geosoft.gxapi.FILE_FILTER_XLS :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_XLS .. autodata:: geosoft.gxapi.FILE_FILTER_XLSX :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_XLSX .. autodata:: geosoft.gxapi.FILE_FILTER_MDB :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_MDB .. autodata:: geosoft.gxapi.FILE_FILTER_ACCDB :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_ACCDB .. autodata:: geosoft.gxapi.FILE_FILTER_INTERSECTION_TBL :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_INTERSECTION_TBL .. autodata:: geosoft.gxapi.FILE_FILTER_UBC_CON :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_UBC_CON .. autodata:: geosoft.gxapi.FILE_FILTER_UBC_CHG :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_UBC_CHG .. autodata:: geosoft.gxapi.FILE_FILTER_UBC_MSH :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_UBC_MSH .. autodata:: geosoft.gxapi.FILE_FILTER_UBC_MSH_DAT :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_UBC_MSH_DAT .. autodata:: geosoft.gxapi.FILE_FILTER_UBC_TOPO_DAT :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_UBC_TOPO_DAT .. autodata:: geosoft.gxapi.FILE_FILTER_UBC_TOPO_XYZ :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_UBC_TOPO_XYZ .. autodata:: geosoft.gxapi.FILE_FILTER_XYZ_TEMPLATE_I0 :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_XYZ_TEMPLATE_I0 .. autodata:: geosoft.gxapi.FILE_FILTER_PICO_TEMPLATE_I1 :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_PICO_TEMPLATE_I1 .. autodata:: geosoft.gxapi.FILE_FILTER_BB_TEMPLATE_I2 :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_BB_TEMPLATE_I2 .. autodata:: geosoft.gxapi.FILE_FILTER_ASCII_TEMPLATE_I3 :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_ASCII_TEMPLATE_I3 .. autodata:: geosoft.gxapi.FILE_FILTER_ODBC_TEMPLATE_I4 :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_ODBC_TEMPLATE_I4 .. autodata:: geosoft.gxapi.FILE_FILTER_EXP :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_EXP .. autodata:: geosoft.gxapi.FILE_FILTER_SEGY :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_SEGY .. autodata:: geosoft.gxapi.FILE_FILTER_DAARC500 :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_DAARC500 .. autodata:: geosoft.gxapi.FILE_FILTER_TXT :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_TXT .. autodata:: geosoft.gxapi.FILE_FILTER_VOXEL_INVERSION :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_VOXEL_INVERSION .. autodata:: geosoft.gxapi.FILE_FILTER_GMS :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GMS .. autodata:: geosoft.gxapi.FILE_FILTER_FLT3D :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_FLT3D .. autodata:: geosoft.gxapi.FILE_FILTER_RESOURCE_PACK :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_RESOURCE_PACK .. autodata:: geosoft.gxapi.FILE_FILTER_GEOSTRING :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GEOSTRING .. autodata:: geosoft.gxapi.FILE_FILTER_GEOSURFACE :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GEOSURFACE .. autodata:: geosoft.gxapi.FILE_FILTER_GEOSOFT3DV :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GEOSOFT3DV .. autodata:: geosoft.gxapi.FILE_FILTER_VECTORVOXEL :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_VECTORVOXEL .. autodata:: geosoft.gxapi.FILE_FILTER_FLT :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_FLT .. autodata:: geosoft.gxapi.FILE_FILTER_XYZ_TEMPLATE_O0 :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_XYZ_TEMPLATE_O0 .. autodata:: geosoft.gxapi.FILE_FILTER_GMS2D :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GMS2D .. autodata:: geosoft.gxapi.FILE_FILTER_IP_DATABASE_TEMPLATE :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_IP_DATABASE_TEMPLATE .. autodata:: geosoft.gxapi.FILE_FILTER_GEOSOFT_RESOURCE_MODULE :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GEOSOFT_RESOURCE_MODULE .. autodata:: geosoft.gxapi.FILE_FILTER_VT :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_VT .. autodata:: geosoft.gxapi.FILE_FILTER_INT :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_INT .. autodata:: geosoft.gxapi.FILE_FILTER_SGT :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_SGT .. autodata:: geosoft.gxapi.FILE_FILTER_IMGVIEW :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_IMGVIEW .. autodata:: geosoft.gxapi.FILE_FILTER_ZIP :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_ZIP .. autodata:: geosoft.gxapi.FILE_FILTER_GPS_TABLE :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GPS_TABLE .. autodata:: geosoft.gxapi.FILE_FILTER_VULCAN_TRIANGULATION :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_VULCAN_TRIANGULATION .. autodata:: geosoft.gxapi.FILE_FILTER_VULCAN_BLOCK_MODEL :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_VULCAN_BLOCK_MODEL .. autodata:: geosoft.gxapi.FILE_FILTER_PRJVIEW :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_PRJVIEW .. autodata:: geosoft.gxapi.FILE_FILTER_LEAPFROG_MODEL :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_LEAPFROG_MODEL .. autodata:: geosoft.gxapi.FILE_FILTER_IOGAS :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_IOGAS .. autodata:: geosoft.gxapi.FILE_FILTER_ASEG_ESF :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_ASEG_ESF .. autodata:: geosoft.gxapi.FILE_FILTER_LACOSTE_DAT :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_LACOSTE_DAT .. autodata:: geosoft.gxapi.FILE_FILTER_VAR :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_VAR .. autodata:: geosoft.gxapi.FILE_FILTER_P190 :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_P190 .. autodata:: geosoft.gxapi.FILE_FILTER_UBC_OBS_DAT :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_UBC_OBS_DAT .. autodata:: geosoft.gxapi.FILE_FILTER_UBC_LOC :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_UBC_LOC .. autodata:: geosoft.gxapi.FILE_FILTER_UBC_MOD :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_UBC_MOD .. autodata:: geosoft.gxapi.FILE_FILTER_UBC_DEN :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_UBC_DEN .. autodata:: geosoft.gxapi.FILE_FILTER_UBC_SUS :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_UBC_SUS .. autodata:: geosoft.gxapi.FILE_FILTER_GOCAD_VOXET :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_GOCAD_VOXET .. autodata:: geosoft.gxapi.FILE_FILTER_SCINTREX_DAT :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_SCINTREX_DAT .. autodata:: geosoft.gxapi.FILE_FILTER_DMP :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_DMP .. autodata:: geosoft.gxapi.FILE_FILTER_RAW :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_RAW .. autodata:: geosoft.gxapi.FILE_FILTER_DAT :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_DAT .. autodata:: geosoft.gxapi.FILE_FILTER_OMF :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_OMF .. autodata:: geosoft.gxapi.FILE_FILTER_3DSURVEY :annotation: .. autoattribute:: geosoft.gxapi.FILE_FILTER_3DSURVEY .. _FILE_FORM: FILE_FORM constants ----------------------------------------------------------------------- File Form Defines .. autodata:: geosoft.gxapi.FILE_FORM_OPEN :annotation: .. autoattribute:: geosoft.gxapi.FILE_FORM_OPEN .. autodata:: geosoft.gxapi.FILE_FORM_SAVE :annotation: .. autoattribute:: geosoft.gxapi.FILE_FORM_SAVE .. _GS_DIRECTORY: GS_DIRECTORY constants ----------------------------------------------------------------------- Geosoft predefined directory .. autodata:: geosoft.gxapi.GS_DIRECTORY_NONE :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_NONE .. autodata:: geosoft.gxapi.GS_DIRECTORY_GEOSOFT :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_GEOSOFT .. autodata:: geosoft.gxapi.GS_DIRECTORY_BIN :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_BIN .. autodata:: geosoft.gxapi.GS_DIRECTORY_GER :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_GER .. autodata:: geosoft.gxapi.GS_DIRECTORY_OMN :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_OMN .. autodata:: geosoft.gxapi.GS_DIRECTORY_TBL :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_TBL .. autodata:: geosoft.gxapi.GS_DIRECTORY_FONTS :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_FONTS .. autodata:: geosoft.gxapi.GS_DIRECTORY_GX :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_GX .. autodata:: geosoft.gxapi.GS_DIRECTORY_GS :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_GS .. autodata:: geosoft.gxapi.GS_DIRECTORY_APPS :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_APPS .. autodata:: geosoft.gxapi.GS_DIRECTORY_ETC :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_ETC .. autodata:: geosoft.gxapi.GS_DIRECTORY_HLP :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_HLP .. autodata:: geosoft.gxapi.GS_DIRECTORY_GXDEV :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_GXDEV .. autodata:: geosoft.gxapi.GS_DIRECTORY_COMPONENT :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_COMPONENT .. autodata:: geosoft.gxapi.GS_DIRECTORY_CSV :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_CSV .. autodata:: geosoft.gxapi.GS_DIRECTORY_LIC :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_LIC .. autodata:: geosoft.gxapi.GS_DIRECTORY_INI :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_INI .. autodata:: geosoft.gxapi.GS_DIRECTORY_TEMP :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_TEMP .. autodata:: geosoft.gxapi.GS_DIRECTORY_UETC :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_UETC .. autodata:: geosoft.gxapi.GS_DIRECTORY_UMAPTEMPLATE :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_UMAPTEMPLATE .. autodata:: geosoft.gxapi.GS_DIRECTORY_COMPONENT_SCRIPTS :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_COMPONENT_SCRIPTS .. autodata:: geosoft.gxapi.GS_DIRECTORY_COMPONENT_HTML :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_COMPONENT_HTML .. autodata:: geosoft.gxapi.GS_DIRECTORY_IMG :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_IMG .. autodata:: geosoft.gxapi.GS_DIRECTORY_BAR :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_BAR .. autodata:: geosoft.gxapi.GS_DIRECTORY_GXNET :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_GXNET .. autodata:: geosoft.gxapi.GS_DIRECTORY_MAPTEMPLATE :annotation: .. autoattribute:: geosoft.gxapi.GS_DIRECTORY_MAPTEMPLATE .. _IMPCH_TYPE: IMPCH_TYPE constants ----------------------------------------------------------------------- Import Chem defines .. autodata:: geosoft.gxapi.IMPCH_TYPE_DATA :annotation: .. autoattribute:: geosoft.gxapi.IMPCH_TYPE_DATA .. autodata:: geosoft.gxapi.IMPCH_TYPE_ASSAY :annotation: .. autoattribute:: geosoft.gxapi.IMPCH_TYPE_ASSAY .. _WINDOW_STATE: WINDOW_STATE constants ----------------------------------------------------------------------- Window State Options .. autodata:: geosoft.gxapi.WINDOW_RESTORE :annotation: .. autoattribute:: geosoft.gxapi.WINDOW_RESTORE .. autodata:: geosoft.gxapi.WINDOW_MINIMIZE :annotation: .. autoattribute:: geosoft.gxapi.WINDOW_MINIMIZE .. autodata:: geosoft.gxapi.WINDOW_MAXIMIZE :annotation: .. autoattribute:: geosoft.gxapi.WINDOW_MAXIMIZE .. _XTOOL_ALIGN: XTOOL_ALIGN constants ----------------------------------------------------------------------- XTool docking alignment flags .. autodata:: geosoft.gxapi.XTOOL_ALIGN_LEFT :annotation: .. autoattribute:: geosoft.gxapi.XTOOL_ALIGN_LEFT .. autodata:: geosoft.gxapi.XTOOL_ALIGN_TOP :annotation: .. autoattribute:: geosoft.gxapi.XTOOL_ALIGN_TOP .. autodata:: geosoft.gxapi.XTOOL_ALIGN_RIGHT :annotation: .. autoattribute:: geosoft.gxapi.XTOOL_ALIGN_RIGHT .. autodata:: geosoft.gxapi.XTOOL_ALIGN_BOTTOM :annotation: .. autoattribute:: geosoft.gxapi.XTOOL_ALIGN_BOTTOM .. autodata:: geosoft.gxapi.XTOOL_ALIGN_ANY :annotation: .. autoattribute:: geosoft.gxapi.XTOOL_ALIGN_ANY .. _XTOOL_DOCK: XTOOL_DOCK constants ----------------------------------------------------------------------- XTool default docking state .. autodata:: geosoft.gxapi.XTOOL_DOCK_TOP :annotation: .. autoattribute:: geosoft.gxapi.XTOOL_DOCK_TOP .. autodata:: geosoft.gxapi.XTOOL_DOCK_LEFT :annotation: .. autoattribute:: geosoft.gxapi.XTOOL_DOCK_LEFT .. autodata:: geosoft.gxapi.XTOOL_DOCK_RIGHT :annotation: .. autoattribute:: geosoft.gxapi.XTOOL_DOCK_RIGHT .. autodata:: geosoft.gxapi.XTOOL_DOCK_BOTTOM :annotation: .. autoattribute:: geosoft.gxapi.XTOOL_DOCK_BOTTOM .. autodata:: geosoft.gxapi.XTOOL_DOCK_FLOAT :annotation: .. autoattribute:: geosoft.gxapi.XTOOL_DOCK_FLOAT <file_sep>/geosoft/gxapi/GXKGRD.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXKGRD(gxapi_cy.WrapKGRD): """ GXKGRD class. The `GXKGRD <geosoft.gxapi.GXKGRD>` object is used as a storage place for the control parameters that the Krigrid program needs to execute. The Run_KGRD function executes the Krigrid program using the `GXKGRD <geosoft.gxapi.GXKGRD>` object. """ def __init__(self, handle=0): super(GXKGRD, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXKGRD <geosoft.gxapi.GXKGRD>` :returns: A null `GXKGRD <geosoft.gxapi.GXKGRD>` :rtype: GXKGRD """ return GXKGRD() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def clear(self): """ Clears all the parameters in a `GXKGRD <geosoft.gxapi.GXKGRD>` object .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ self._clear() @classmethod def create(cls): """ Create a handle to a Krigrid object :returns: `GXKGRD <geosoft.gxapi.GXKGRD>` Object :rtype: GXKGRD .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The Krigrid object is initially empty. It will store the control file parameters which the Krigrid program needs to execute. Use the LoadParms_KGRD method to get the control file parameters into the `GXKGRD <geosoft.gxapi.GXKGRD>` object. """ ret_val = gxapi_cy.WrapKGRD._create(GXContext._get_tls_geo()) return GXKGRD(ret_val) def load_parms(self, file): """ Retrieves a Krigrid object's control parameters from a file. :param file: Name of file to get the parameter settings from :type file: str :returns: 0 OK, 1 Error. :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If the control file name passed into this function is a file which does not exist, then the defaults for a Krigrid control file will be generated and put into the `GXKGRD <geosoft.gxapi.GXKGRD>` object. Otherwise, the control file's settings are retrieved from the file and loaded into the `GXKGRD <geosoft.gxapi.GXKGRD>` object. """ ret_val = self._load_parms(file.encode()) return ret_val def run(self, zchan, in_dat, out_grd_dat, out_err_dat, in_var_name, out_var_name, vao, vi, vo): """ Executes the Krigrid program, using the input channel and output file parameters. :param zchan: Name of Z Channel to perfrom gridding on :param in_dat: Handle to source `GXDAT <geosoft.gxapi.GXDAT>` object (from database) :param out_grd_dat: Handle to output grid file `GXDAT <geosoft.gxapi.GXDAT>` :param out_err_dat: Handle to output error grid file `GXDAT <geosoft.gxapi.GXDAT>` ((`GXDAT <geosoft.gxapi.GXDAT>`)0) if no error grid required :param in_var_name: Name of input variogram file :param out_var_name: Name of output variogram file :param vao: Flag of variogram only :param vi: Flag of input variogram :param vo: Flag of output variogram :type zchan: str :type in_dat: GXDAT :type out_grd_dat: GXDAT :type out_err_dat: GXDAT :type in_var_name: str :type out_var_name: str :type vao: int :type vi: int :type vo: int :returns: 0 OK, 1 Error. :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._run(zchan.encode(), in_dat, out_grd_dat, out_err_dat, in_var_name.encode(), out_var_name.encode(), vao, vi, vo) return ret_val @classmethod def run2(cls, db, x, y, z, ctl, grd, err_grd, in_var, out_var, vao): """ Executes the Krigrid program directly on a database. :param db: Handle to a database :param x: Y Channel :param y: X Channel :param z: Data channel :param ctl: KRIGRID control file. :param grd: (output grid name (not required if variogram analysis only)) :param err_grd: (output error file, "" for none) :param in_var: (input variogram file, "" for none) :param out_var: (output variogram file, "" for none) :param vao: 1 if Variogram Analysis Only, other wise 0 :type db: GXDB :type x: str :type y: str :type z: str :type ctl: str :type grd: str :type err_grd: str :type in_var: str :type out_var: str :type vao: int :returns: 0 OK, 1 Error. :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapKGRD._run2(GXContext._get_tls_geo(), db, x.encode(), y.encode(), z.encode(), ctl.encode(), grd.encode(), err_grd.encode(), in_var.encode(), out_var.encode(), vao) return ret_val @classmethod def run3(cls, db, x, y, z, ctl, grd, err_grd, in_var, out_var, log_file, vao): """ Executes the Krigrid program directly on a database and specifies the log file :param db: Handle to a database :param x: Y Channel :param y: X Channel :param z: Data channel :param ctl: KRIGRID control file. :param grd: (output grid name (not required if variogram analysis only)) :param err_grd: (output error file, "" for none) :param in_var: (input variogram file, "" for none) :param out_var: (output variogram file, "" for none) :param log_file: (log file name, "" for default) :param vao: 1 if Variogram Analysis Only, other wise 0 :type db: GXDB :type x: str :type y: str :type z: str :type ctl: str :type grd: str :type err_grd: str :type in_var: str :type out_var: str :type log_file: str :type vao: int :returns: 0 OK, 1 Error. :rtype: int .. versionadded:: 6.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapKGRD._run3(GXContext._get_tls_geo(), db, x.encode(), y.encode(), z.encode(), ctl.encode(), grd.encode(), err_grd.encode(), in_var.encode(), out_var.encode(), log_file.encode(), vao) return ret_val @classmethod def run_vv(cls, vv_x, vv_y, vv_z, ipj, ctl, grd, err_grd, in_var, out_var, log_file, vao): """ Executes the Krigrid program directly on input data VVs. :param vv_x: X data :param vv_y: Y data :param vv_z: Z (grid value) data :param ipj: Projection to put into grid :param ctl: KRIGRID control file. :param grd: (output grid name (not required if variogram analysis only)) :param err_grd: (output error file, "" for none) :param in_var: (input variogram file, "" for none) :param out_var: (output variogram file, "" for none) :param log_file: (log file name, "" for default) :param vao: 1 if Variogram Analysis Only, other wise 0 :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type ipj: GXIPJ :type ctl: str :type grd: str :type err_grd: str :type in_var: str :type out_var: str :type log_file: str :type vao: int :returns: 0 OK, 1 Error. :rtype: int .. versionadded:: 2022.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapKGRD._run_vv(GXContext._get_tls_geo(), vv_x, vv_y, vv_z, ipj, ctl.encode(), grd.encode(), err_grd.encode(), in_var.encode(), out_var.encode(), log_file.encode(), vao) return ret_val def save_parms(self, name): """ Puts the Krigrid object's control parameters back into its control file. :param name: Name of file to put the parameter settings into :type name: str :returns: 0 OK, 1 Error. :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If the control file did not previously exist, it will be created. Otherwise, the old file will be overwritten. """ ret_val = self._save_parms(name.encode()) return ret_val @classmethod def get_defaults(cls, db, x, y, z, blankingDistance, lowPassDesamplingFactor): """ Get default blanking distance and low-pass desampling factor. :param db: Handle to a database :param x: Y Channel :param y: X Channel :param z: Data channel :param blankingDistance: blanking distance :param lowPassDesamplingFactor: low-pass desampling factor :type db: GXDB :type x: str :type y: str :type z: str :type blankingDistance: float_ref :type lowPassDesamplingFactor: int_ref :returns: 0 OK, 1 Error. :rtype: int .. versionadded:: 2021.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val, blankingDistance.value, lowPassDesamplingFactor.value = gxapi_cy.WrapKGRD._get_defaults(GXContext._get_tls_geo(), db, x.encode(), y.encode(), z.encode(), blankingDistance.value, lowPassDesamplingFactor.value) return ret_val ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/viewer.py """ Geosoft Viewers. .. note:: Test example: `Tests <https://github.com/GeosoftInc/gxpy/blob/master/examples/stand-alone/test_viewer.py>`_ """ import os import subprocess import geosoft import geosoft.gxapi as gxapi __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) class ViewerException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.viewer`. .. versionadded:: 9.2 """ pass def _get_default_gd_exe(): s = gxapi.str_ref() gxapi.GXSYS.get_directory(gxapi.SYS_DIR_GEOSOFT_BIN, s) bin_dir = s.value gd_exe = os.path.join(bin_dir, 'omcore.exe') if os.path.exists(gd_exe): return gd_exe gd_exe = os.path.join(bin_dir, 'omtarget.exe') if os.path.exists(gd_exe): return gd_exe gd_exe = os.path.join(bin_dir, 'omedu.exe') if os.path.exists(gd_exe): return gd_exe gd_exe = os.path.join(bin_dir, 'omv.exe') if os.path.exists(gd_exe): return gd_exe return None def view_document(document_file_name, wait_for_close=True, env=None): """ Open Geosoft Desktop application for viewing a supported Geosoft document type. These include: :: gdb file map files geosoft_3dv files grid files voxel files vector_voxel files VOXI models GM-SYS 2d models GM-SYS 3d models :param document_file_name: document file name, require decorators for grids, e.g. testgrid.grd(GRD). Supports all documents that can be openned by Geosoft Desktop. :param wait_for_close: wait for process to exit, default `True` :param env: environment variables to add to os environment variables .. versionadded:: 9.2 """ gd_exe = _get_default_gd_exe() if not gd_exe: gxapi.GXSYS.display_message('Geosoft Desktop application not Found', 'Geosoft Desktop, Geosoft Target or a Geosoft viewer must be installed ' 'to view a Geosoft document type. Downloads are available from ' 'https://my.geosoft.com/downloads.') else: proc_env = os.environ.copy() if env: proc_env.update(env) proc = subprocess.Popen([gd_exe, '-doc={}'.format(document_file_name)], env=proc_env) if wait_for_close: proc.communicate() <file_sep>/docs/templates/geosoft.gxapi.classes.rst .. _geosoft.gxapi.classes: GX API Class/Library Reference ============================== These are the low-level library classes and functions that make up the GX API. .. toctree:: :maxdepth: 1 {% for class in classes %} {{ class }} {% endfor %} <file_sep>/geosoft/gxapi/GXSHD.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXSHD(gxapi_cy.WrapSHD): """ GXSHD class. This class supports fast interactive shadowing in a map or grid document. The SHD object is created using the StartShading_EMAP method. """ def __init__(self, handle=0): super(GXSHD, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXSHD <geosoft.gxapi.GXSHD>` :returns: A null `GXSHD <geosoft.gxapi.GXSHD>` :rtype: GXSHD """ return GXSHD() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def refresh(self, grid_path, inclination, declination, scale, contrast, brightness, wet_look): """ Refresh the SHD with new shading parameters. :param grid_path: Grid path returned :param inclination: inclination (degrees) :param declination: declination (degrees) :param scale: vertical scale relative to base scale :param contrast: contrast 0-1 (recommended >0.1, can change with wet_look changes) :param brightness: brightness 0-1 (can change with wet_look changes) :param wet_look: Apply wet-look effect (shading layer uses lighter distribution)? :type grid_path: str :type inclination: float :type declination: float :type scale: float :type contrast: float_ref :type brightness: float_ref :type wet_look: bool .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ contrast.value, brightness.value = self._refresh(grid_path.encode(), inclination, declination, scale, contrast.value, brightness.value, wet_look) def track_interactive(self, constraint, inclination, declination): """ Track a line on map and get shading parameters based on its length and direction. :param constraint: :ref:`SHD_FIX` :param inclination: returned inclination :param declination: returned declination :type constraint: int :type inclination: float_ref :type declination: float_ref :returns: 0 if tracking completed successfully. 1 if user cancelled or tracking failed. :rtype: int .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val, inclination.value, declination.value = self._track_interactive(constraint, inclination.value, declination.value) return ret_val def end_shading(self, apply_changes): """ This ends interactive shading and must be called if any interactive changes should be applied. Passing false to apply changes is equivalent to simply disposing handle. :param apply_changes: Apply changes to map. :type apply_changes: bool .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. .. seealso:: StartShading_EMAP """ self._end_shading(apply_changes) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXRA.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXRA(gxapi_cy.WrapRA): """ GXRA class. The `GXRA <geosoft.gxapi.GXRA>` class is used to access ASCII files sequentially or by line number. The files are opened in read-only mode, so no write operations are defined """ def __init__(self, handle=0): super(GXRA, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXRA <geosoft.gxapi.GXRA>` :returns: A null `GXRA <geosoft.gxapi.GXRA>` :rtype: GXRA """ return GXRA() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, file): """ Creates `GXRA <geosoft.gxapi.GXRA>` :param file: Name of the file :type file: str :returns: `GXRA <geosoft.gxapi.GXRA>` Object :rtype: GXRA .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapRA._create(GXContext._get_tls_geo(), file.encode()) return GXRA(ret_val) @classmethod def create_sbf(cls, sbf, file): """ Creates `GXRA <geosoft.gxapi.GXRA>` on an `GXSBF <geosoft.gxapi.GXSBF>` :param sbf: Storage :param file: Name of the file :type sbf: GXSBF :type file: str :returns: `GXRA <geosoft.gxapi.GXRA>` Object :rtype: GXRA .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method allows you to open an `GXRA <geosoft.gxapi.GXRA>` in a structured file storage (an `GXSBF <geosoft.gxapi.GXSBF>`). SBFs can be created inside other data containers, such as workspaces, maps, images and databases. This lets you store application specific information together with the data to which it applies. .. seealso:: sbf.gxh """ ret_val = gxapi_cy.WrapRA._create_sbf(GXContext._get_tls_geo(), sbf, file.encode()) return GXRA(ret_val) def gets(self, strbuff): """ Get next full line from `GXRA <geosoft.gxapi.GXRA>` :param strbuff: Buffer in which to place string :type strbuff: str_ref :returns: 0 - Ok 1 - End of file :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, strbuff.value = self._gets(strbuff.value.encode()) return ret_val def len(self): """ Returns the total number of lines in `GXRA <geosoft.gxapi.GXRA>` :returns: # of lines in the `GXRA <geosoft.gxapi.GXRA>`. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._len() return ret_val def line(self): """ Returns current line #, 0 is the first :returns: The current read line location. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This will be the next line read. """ ret_val = self._line() return ret_val def seek(self, line): """ Position next read to specified line # :param line: Line #, 0 is the first. :type line: int :returns: 0 if seeked line is within the range of lines, 1 if outside range, line pointer will not be moved. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._seek(line) return ret_val ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXMAPTEMPLATE.rst .. _GXMAPTEMPLATE: GXMAPTEMPLATE class ================================== .. autoclass:: geosoft.gxapi.GXMAPTEMPLATE :members: .. _MAPTEMPLATE_OPEN: MAPTEMPLATE_OPEN constants ----------------------------------------------------------------------- Open Modes .. autodata:: geosoft.gxapi.MAPTEMPLATE_WRITENEW :annotation: .. autoattribute:: geosoft.gxapi.MAPTEMPLATE_WRITENEW .. autodata:: geosoft.gxapi.MAPTEMPLATE_EXIST :annotation: .. autoattribute:: geosoft.gxapi.MAPTEMPLATE_EXIST <file_sep>/examples/tutorial/2D Views and Maps/get_data_files.py import geosoft.gxpy.gx as gx import geosoft.gxpy.utility as gxu gxc = gx.GXpy() url = 'https://github.com/GeosoftInc/gxpy/raw/9.3/examples/tutorial/2D%20Views%20and%20Maps/' gxu.url_retrieve(url + 'Wittichica Creek Residual Total Field.grd') gxu.url_retrieve(url + 'Wittichica Creek Residual Total Field.grd.gi') gxu.url_retrieve(url + 'Wittichica Creek Residual Total Field.grd.xml') <file_sep>/geosoft/gxapi/GX3DN.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GX3DN(gxapi_cy.Wrap3DN): """ GX3DN class. This class manages the rendering of a 3D view. It allows the positioning of the camera, specification of the zoom as well as some rendering controls for the axis. It is directly related to the `GXMVIEW <geosoft.gxapi.GXMVIEW>` class. """ def __init__(self, handle=0): super(GX3DN, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GX3DN <geosoft.gxapi.GX3DN>` :returns: A null `GX3DN <geosoft.gxapi.GX3DN>` :rtype: GX3DN """ return GX3DN() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def copy(self, source): """ Copy one `GX3DN <geosoft.gxapi.GX3DN>` object to another. :param source: Source `GX3DN <geosoft.gxapi.GX3DN>` to Copy from :type source: GX3DN .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._copy(source) @classmethod def create(cls): """ Creates a `GX3DN <geosoft.gxapi.GX3DN>`. :returns: `GX3DN <geosoft.gxapi.GX3DN>` Object :rtype: GX3DN .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.Wrap3DN._create(GXContext._get_tls_geo()) return GX3DN(ret_val) def get_point_of_view(self, distance, declination, inclination): """ Get location of the point we are looking from :param distance: Distance from center relative to longest grid dimension (which is 1.0) :param declination: Declination, 0 to 360 CW from Y :param inclination: Inclination, -90 to +90 :type distance: float_ref :type declination: float_ref :type inclination: float_ref .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ distance.value, declination.value, inclination.value = self._get_point_of_view(distance.value, declination.value, inclination.value) def get_scale(self, x, y, z): """ Get the axis relative scales. :param x: X Scale :param y: Y Scale :param z: Z Scale :type x: float_ref :type y: float_ref :type z: float_ref .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ x.value, y.value, z.value = self._get_scale(x.value, y.value, z.value) def get_axis_color(self): """ Get the Axis draw color :returns: Axis Color :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_axis_color() return ret_val def get_axis_font(self, font): """ Get the Axis font :param font: Font name :type font: str_ref .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ font.value = self._get_axis_font(font.value.encode()) def get_background_color(self): """ Get the window background color :returns: Background Color value :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_background_color() return ret_val def get_render_controls(self, box, axis, label_x, label_y, label_z): """ Get the rendering controls :param box: Render Bounding Box (0 or 1) :param axis: Render Axis (0 or 1) :param label_x: Label for X axis :param label_y: Label for Y axis :param label_z: Label for Z axis :type box: int_ref :type axis: int_ref :type label_x: str_ref :type label_y: str_ref :type label_z: str_ref .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ box.value, axis.value, label_x.value, label_y.value, label_z.value = self._get_render_controls(box.value, axis.value, label_x.value.encode(), label_y.value.encode(), label_z.value.encode()) def get_shading(self): """ Set the shading control on or off :returns: Shading On/Off :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_shading() return ret_val def set_axis_color(self, color): """ Set the Axis draw color :param color: Axis Color :type color: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_axis_color(color) def set_axis_font(self, font): """ Set the Axis font :param font: Font name :type font: str .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_axis_font(font.encode()) def set_background_color(self, color): """ Set the window background color :param color: Background Color :type color: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_background_color(color) def set_point_of_view(self, distance, declination, inclination): """ Set location of the point we are looking from :param distance: Distance from center relative to longest grid dimension (which is 1.0) :param declination: Declination, 0 to 360 CW from Y :param inclination: Inclination, -90 to +90 :type distance: float :type declination: float :type inclination: float .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_point_of_view(distance, declination, inclination) def set_render_controls(self, box, axis, label_x, label_y, label_z): """ Set the rendering controls :param box: Render Bounding Box (0 or 1) :param axis: Render Axis (0 or 1) :param label_x: Label for X axis :param label_y: Label for Y axis :param label_z: Label for Z axis :type box: int :type axis: int :type label_x: str :type label_y: str :type label_z: str .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_render_controls(box, axis, label_x.encode(), label_y.encode(), label_z.encode()) def set_scale(self, x, y, z): """ Set the axis relative scales. :param x: X Scale (default 1.0) :param y: Y Scale (default 1.0) :param z: Z Scale (default 1.0) :type x: float :type y: float :type z: float .. versionadded:: 6.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** By default all scales are equal (1.0). By setting these scales, relative adjustments to the overall view of the 3D objects can be made. Note that they are relative to each other. Thus, setting the scaling to 5,5,5 is the same as 1,1,1. This is typically used to exaggerate one scale such as Z (1,1,5). """ self._set_scale(x, y, z) def set_shading(self, shading): """ Set the shading control on or off :param shading: 0: Off, 1: On. :type shading: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_shading(shading) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/setup.py # coding = utf-8 import json import sys import shutil from os import path, remove, environ from glob import glob from setuptools import setup with open('geosoft/pkg_info.json') as fp: _info = json.load(fp) def read(fname): return open(path.join(path.dirname(__file__), fname)).read() version_tag = "{}{}".format(_info['version'], _info['pre-release']) if _info['pre-release'] == '': dev_status_classifier = "Development Status :: 5 - Production/Stable" else: dev_status_classifier = "Development Status :: 4 - Beta" for f in glob("geosoft/*.pyd"): try: remove(f) except PermissionError as e: raise Exception("An application is using a file we need to change: \n {}".format(str(e))) dependencies = ['numpy', 'pandas', 'requests'] if 'bdist_wheel' in sys.argv: # Have to specify python-tag to specify which module for arg in sys.argv: if arg.startswith('--python-tag='): pythontag = arg[13:] if pythontag == "cp36": shutil.copyfile('gxapi_cy.cp36-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy.pyd') shutil.copyfile('gxapi_cy_extend.cp36-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy_extend.pyd') elif pythontag == "cp37": shutil.copyfile('gxapi_cy.cp37-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy.pyd') shutil.copyfile('gxapi_cy_extend.cp37-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy_extend.pyd') elif pythontag == "cp38": shutil.copyfile('gxapi_cy.cp38-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy.pyd') shutil.copyfile('gxapi_cy_extend.cp38-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy_extend.pyd') elif pythontag == "cp39": shutil.copyfile('gxapi_cy.cp39-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy.pyd') shutil.copyfile('gxapi_cy_extend.cp39-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy_extend.pyd') elif pythontag == "cp310": shutil.copyfile('gxapi_cy.cp310-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy.pyd') shutil.copyfile('gxapi_cy_extend.cp310-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy_extend.pyd') break else: # Copy the version we are building for py_ver_major_minor = sys.version_info[:2] if py_ver_major_minor == (3, 6): shutil.copyfile('gxapi_cy.cp36-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy.pyd') shutil.copyfile('gxapi_cy_extend.cp36-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy_extend.pyd') elif py_ver_major_minor == (3, 7): shutil.copyfile('gxapi_cy.cp37-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy.pyd') shutil.copyfile('gxapi_cy_extend.cp37-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy_extend.pyd') elif py_ver_major_minor == (3, 8): shutil.copyfile('gxapi_cy.cp38-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy.pyd') shutil.copyfile('gxapi_cy_extend.cp38-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy_extend.pyd') elif py_ver_major_minor == (3, 9): shutil.copyfile('gxapi_cy.cp39-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy.pyd') shutil.copyfile('gxapi_cy_extend.cp39-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy_extend.pyd') elif py_ver_major_minor == (3, 10): shutil.copyfile('gxapi_cy.cp310-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy.pyd') shutil.copyfile('gxapi_cy_extend.cp310-win_amd64.pyd', 'geosoft/gxapi/gxapi_cy_extend.pyd') packages=[ 'geosoft', 'geosoft.gxapi', 'geosoft.gxpy', 'geosoft.gxpy._jdcal', 'geosoft.gxpy._xmltodict', 'geosoft.gxpy.user_input' ] package_data={ 'geosoft': ['*.json'], 'geosoft.gxapi': ['gxapi_cy.pyd', 'gxapi_cy_extend.pyd', '*.dll'], 'geosoft.gxpy._jdcal': ['*.txt', '*.rst'], 'geosoft.gxpy._xmltodict': ['LICENSE', '*.md'], 'geosoft.gxpy.user_input': ['*.gx'] } setup( name='geosoft', version=version_tag, description='Geosoft GX API module for Python', long_description=read('README.md'), author='<NAME>.', author_email='<EMAIL>', platforms=["win_amd64"], url='https://github.com/GeosoftInc/gxpy', license='BSD', install_requires=dependencies, packages=packages, package_data=package_data, test_suite="geosoft.gxpy.tests", classifiers=[ dev_status_classifier, "Topic :: Scientific/Engineering", "Intended Audience :: Science/Research", "License :: OSI Approved :: BSD License", "Natural Language :: English", "Operating System :: Microsoft :: Windows", "Programming Language :: Python :: 3 :: Only", ], ) <file_sep>/geosoft/gxpy/_jdcal/jdcal.py # -*- coding:utf-8 -*- """Functions for converting between Julian dates and calendar dates. A function for converting Gregorian calendar dates to Julian dates, and another function for converting Julian calendar dates to Julian dates are defined. Two functions for the reverse calculations are also defined. Different regions of the world switched to Gregorian calendar from Julian calendar on different dates. Having separate functions for Julian and Gregorian calendars allow maximum flexibility in choosing the relevant calendar. All the above functions are "proleptic". This means that they work for dates on which the concerned calendar is not valid. For example, Gregorian calendar was not used prior to around October 1582. Julian dates are stored in two floating point numbers (double). Julian dates, and Modified Julian dates, are large numbers. If only one number is used, then the precision of the time stored is limited. Using two numbers, time can be split in a manner that will allow maximum precision. For example, the first number could be the Julian date for the beginning of a day and the second number could be the fractional day. Calculations that need the latter part can now work with maximum precision. A function to test if a given Gregorian calendar year is a leap year is defined. Zero point of Modified Julian Date (MJD) and the MJD of 2000/1/1 12:00:00 are also given. This module is based on the TPM C library, by <NAME>. The idea for splitting Julian date into two floating point numbers was inspired by the IAU SOFA C library. :author: <NAME> :contact: <EMAIL> :license: BSD (http://www.opensource.org/licenses/bsd-license.php) """ from __future__ import division from __future__ import print_function import math __version__ = "1.3" MJD_0 = 2400000.5 MJD_JD2000 = 51544.5 def fpart(x): """Return fractional part of given number.""" return math.modf(x)[0] def ipart(x): """Return integer part of given number.""" return math.modf(x)[1] def is_leap(year): """Leap year or not in the Gregorian calendar.""" x = math.fmod(year, 4) y = math.fmod(year, 100) z = math.fmod(year, 400) # Divisible by 4 and, # either not divisible by 100 or divisible by 400. return not x and (y or not z) def gcal2jd(year, month, day): """Gregorian calendar date to Julian date. The input and output are for the proleptic Gregorian calendar, i.e., no consideration of historical usage of the calendar is made. Parameters ---------- year : int Year as an integer. month : int Month as an integer. day : int Day as an integer. Returns ------- jd1, jd2: 2-element tuple of floats When added together, the numbers give the Julian date for the given Gregorian calendar date. The first number is always MJD_0 i.e., 2451545.5. So the second is the MJD. Examples -------- >>> gcal2jd(2000,1,1) (2400000.5, 51544.0) >>> 2400000.5 + 51544.0 + 0.5 2451545.0 >>> year = [-4699, -2114, -1050, -123, -1, 0, 1, 123, 1678.0, 2000, ....: 2012, 2245] >>> month = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] >>> day = [1, 12, 23, 14, 25, 16, 27, 8, 9, 10, 11, 31] >>> x = [gcal2jd(y, m, d) for y, m, d in zip(year, month, day)] >>> for i in x: print i (2400000.5, -2395215.0) (2400000.5, -1451021.0) (2400000.5, -1062364.0) (2400000.5, -723762.0) (2400000.5, -679162.0) (2400000.5, -678774.0) (2400000.5, -678368.0) (2400000.5, -633797.0) (2400000.5, -65812.0) (2400000.5, 51827.0) (2400000.5, 56242.0) (2400000.5, 141393.0) Negative months and days are valid. For example, 2000/-2/-4 => 1999/+12-2/-4 => 1999/10/-4 => 1999/9/30-4 => 1999/9/26. >>> gcal2jd(2000, -2, -4) (2400000.5, 51447.0) >>> gcal2jd(1999, 9, 26) (2400000.5, 51447.0) >>> gcal2jd(2000, 2, -1) (2400000.5, 51573.0) >>> gcal2jd(2000, 1, 30) (2400000.5, 51573.0) >>> gcal2jd(2000, 3, -1) (2400000.5, 51602.0) >>> gcal2jd(2000, 2, 28) (2400000.5, 51602.0) Month 0 becomes previous month. >>> gcal2jd(2000, 0, 1) (2400000.5, 51513.0) >>> gcal2jd(1999, 12, 1) (2400000.5, 51513.0) Day number 0 becomes last day of previous month. >>> gcal2jd(2000, 3, 0) (2400000.5, 51603.0) >>> gcal2jd(2000, 2, 29) (2400000.5, 51603.0) If `day` is greater than the number of days in `month`, then it gets carried over to the next month. >>> gcal2jd(2000,2,30) (2400000.5, 51604.0) >>> gcal2jd(2000,3,1) (2400000.5, 51604.0) >>> gcal2jd(2001,2,30) (2400000.5, 51970.0) >>> gcal2jd(2001,3,2) (2400000.5, 51970.0) Notes ----- The returned Julian date is for mid-night of the given date. To find the Julian date for any time of the day, simply add time as a fraction of a day. For example Julian date for mid-day can be obtained by adding 0.5 to either the first part or the second part. The latter is preferable, since it will give the MJD for the date and time. BC dates should be given as -(BC - 1) where BC is the year. For example 1 BC == 0, 2 BC == -1, and so on. Negative numbers can be used for `month` and `day`. For example 2000, -1, 1 is the same as 1999, 11, 1. The Julian dates are proleptic Julian dates, i.e., values are returned without considering if Gregorian dates are valid for the given date. The input values are truncated to integers. """ year = int(year) month = int(month) day = int(day) a = ipart((month - 14) / 12.0) jd = ipart((1461 * (year + 4800 + a)) / 4.0) jd += ipart((367 * (month - 2 - 12 * a)) / 12.0) x = ipart((year + 4900 + a) / 100.0) jd -= ipart((3 * x) / 4.0) jd += day - 2432075.5 # was 32075; add 2400000.5 jd -= 0.5 # 0 hours; above JD is for midday, switch to midnight. return MJD_0, jd def jd2gcal(jd1, jd2): """Julian date to Gregorian calendar date and time of day. The input and output are for the proleptic Gregorian calendar, i.e., no consideration of historical usage of the calendar is made. Parameters ---------- jd1, jd2: int Sum of the two numbers is taken as the given Julian date. For example `jd1` can be the zero point of MJD (MJD_0) and `jd2` can be the MJD of the date and time. But any combination will work. Returns ------- y, m, d, f : int, int, int, float Four element tuple containing year, month, day and the fractional part of the day in the Gregorian calendar. The first three are integers, and the last part is a float. Examples -------- >>> jd2gcal(*gcal2jd(2000,1,1)) (2000, 1, 1, 0.0) >>> jd2gcal(*gcal2jd(1950,1,1)) (1950, 1, 1, 0.0) Out of range months and days are carried over to the next/previous year or next/previous month. See gcal2jd for more examples. >>> jd2gcal(*gcal2jd(1999,10,12)) (1999, 10, 12, 0.0) >>> jd2gcal(*gcal2jd(2000,2,30)) (2000, 3, 1, 0.0) >>> jd2gcal(*gcal2jd(-1999,10,12)) (-1999, 10, 12, 0.0) >>> jd2gcal(*gcal2jd(2000, -2, -4)) (1999, 9, 26, 0.0) >>> gcal2jd(2000,1,1) (2400000.5, 51544.0) >>> jd2gcal(2400000.5, 51544.0) (2000, 1, 1, 0.0) >>> jd2gcal(2400000.5, 51544.5) (2000, 1, 1, 0.5) >>> jd2gcal(2400000.5, 51544.245) (2000, 1, 1, 0.24500000000261934) >>> jd2gcal(2400000.5, 51544.1) (2000, 1, 1, 0.099999999998544808) >>> jd2gcal(2400000.5, 51544.75) (2000, 1, 1, 0.75) Notes ----- The last element of the tuple is the same as (hh + mm / 60.0 + ss / 3600.0) / 24.0 where hh, mm, and ss are the hour, minute and second of the day. See Also -------- gcal2jd """ from math import modf jd1_f, jd1_i = modf(jd1) jd2_f, jd2_i = modf(jd2) jd_i = jd1_i + jd2_i f = jd1_f + jd2_f # Set JD to noon of the current date. Fractional part is the # fraction from midnight of the current date. if -0.5 < f < 0.5: f += 0.5 elif f >= 0.5: jd_i += 1 f -= 0.5 elif f <= -0.5: jd_i -= 1 f += 1.5 l = jd_i + 68569 n = ipart((4 * l) / 146097.0) l -= ipart(((146097 * n) + 3) / 4.0) i = ipart((4000 * (l + 1)) / 1461001) l -= ipart((1461 * i) / 4.0) - 31 j = ipart((80 * l) / 2447.0) day = l - ipart((2447 * j) / 80.0) l = ipart(j / 11.0) month = j + 2 - (12 * l) year = 100 * (n - 49) + i + l return int(year), int(month), int(day), f def jcal2jd(year, month, day): """Julian calendar date to Julian date. The input and output are for the proleptic Julian calendar, i.e., no consideration of historical usage of the calendar is made. Parameters ---------- year : int Year as an integer. month : int Month as an integer. day : int Day as an integer. Returns ------- jd1, jd2: 2-element tuple of floats When added together, the numbers give the Julian date for the given Julian calendar date. The first number is always MJD_0 i.e., 2451545.5. So the second is the MJD. Examples -------- >>> jcal2jd(2000, 1, 1) (2400000.5, 51557.0) >>> year = [-4699, -2114, -1050, -123, -1, 0, 1, 123, 1678, 2000, ...: 2012, 2245] >>> month = [1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 12] >>> day = [1, 12, 23, 14, 25, 16, 27, 8, 9, 10, 11, 31] >>> x = [jcal2jd(y, m, d) for y, m, d in zip(year, month, day)] >>> for i in x: print i (2400000.5, -2395252.0) (2400000.5, -1451039.0) (2400000.5, -1062374.0) (2400000.5, -723765.0) (2400000.5, -679164.0) (2400000.5, -678776.0) (2400000.5, -678370.0) (2400000.5, -633798.0) (2400000.5, -65772.0) (2400000.5, 51871.0) (2400000.5, 56285.0) Notes ----- Unlike `gcal2jd`, negative months and days can result in incorrect Julian dates. """ year = int(year) month = int(month) day = int(day) jd = 367 * year x = ipart((month - 9) / 7.0) jd -= ipart((7 * (year + 5001 + x)) / 4.0) jd += ipart((275 * month) / 9.0) jd += day jd += 1729777 - 2400000.5 # Return 240000.5 as first part of JD. jd -= 0.5 # Convert midday to midnight. return MJD_0, jd def jd2jcal(jd1, jd2): """Julian calendar date for the given Julian date. The input and output are for the proleptic Julian calendar, i.e., no consideration of historical usage of the calendar is made. Parameters ---------- jd1, jd2: int Sum of the two numbers is taken as the given Julian date. For example `jd1` can be the zero point of MJD (MJD_0) and `jd2` can be the MJD of the date and time. But any combination will work. Returns ------- y, m, d, f : int, int, int, float Four element tuple containing year, month, day and the fractional part of the day in the Julian calendar. The first three are integers, and the last part is a float. Examples -------- >>> jd2jcal(*jcal2jd(2000, 1, 1)) (2000, 1, 1, 0.0) >>> jd2jcal(*jcal2jd(-4000, 10, 11)) (-4000, 10, 11, 0.0) >>> jcal2jd(2000, 1, 1) (2400000.5, 51557.0) >>> jd2jcal(2400000.5, 51557.0) (2000, 1, 1, 0.0) >>> jd2jcal(2400000.5, 51557.5) (2000, 1, 1, 0.5) >>> jd2jcal(2400000.5, 51557.245) (2000, 1, 1, 0.24500000000261934) >>> jd2jcal(2400000.5, 51557.1) (2000, 1, 1, 0.099999999998544808) >>> jd2jcal(2400000.5, 51557.75) (2000, 1, 1, 0.75) """ from math import modf jd1_f, jd1_i = modf(jd1) jd2_f, jd2_i = modf(jd2) jd_i = jd1_i + jd2_i f = jd1_f + jd2_f # Set JD to noon of the current date. Fractional part is the # fraction from midnight of the current date. if -0.5 < f < 0.5: f += 0.5 elif f >= 0.5: jd_i += 1 f -= 0.5 elif f <= -0.5: jd_i -= 1 f += 1.5 j = jd_i + 1402.0 k = ipart((j - 1) / 1461.0) l = j - (1461.0 * k) n = ipart((l - 1) / 365.0) - ipart(l / 1461.0) i = l - (365.0 * n) + 30.0 j = ipart((80.0 * i) / 2447.0) day = i - ipart((2447.0 * j) / 80.0) i = ipart(j / 11.0) month = j + 2 - (12.0 * i) year = (4 * k) + n + i - 4716.0 return int(year), int(month), int(day), f <file_sep>/docs/GXMPLY.rst .. _GXMPLY: GXMPLY class ================================== .. autoclass:: geosoft.gxapi.GXMPLY :members: <file_sep>/geosoft/gxapi/GXMAPTEMPLATE.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXMAPTEMPLATE(gxapi_cy.WrapMAPTEMPLATE): """ GXMAPTEMPLATE class. A `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>` wraps and provides manipulation and usage for the XML content in map template files. See the annotated schema file maptemplate.xsd in the <GEOSOFT>\\maptemplate folder and the accompanying documentation in that folder for documentation on the file format. """ def __init__(self, handle=0): super(GXMAPTEMPLATE, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>` :returns: A null `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>` :rtype: GXMAPTEMPLATE """ return GXMAPTEMPLATE() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Content Manipulation Methods def get_tmp_copy(self, tmp): """ Get a temporary XML file for manipulation of the map template. :param tmp: Returned temporary map template file name :type tmp: str_ref .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** After manipulating contents the object may be updated by a call to the UpdateFromTmpCopy method. """ tmp.value = self._get_tmp_copy(tmp.value.encode()) def update_from_tmp_copy(self, tmp): """ Update the object contents from a temporary XML file that may have bee manipulated externally. :param tmp: Temporary map template file name :type tmp: str .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This method will not modify the original contents of the file until a call to the the Commit method is made or the object is destroyed. A call to the Discard method will restore the contents to that of the original file. The temporary file is not deleted and should be to not leak file resources. """ self._update_from_tmp_copy(tmp.encode()) # File Methods def commit(self): """ Commit any changes to the map template to disk .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._commit() @classmethod def create(cls, name, base, mode): """ Create a `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>` from an existing file. :param name: Map Template file name :param base: Map Template base template to create from :param mode: :ref:`MAPTEMPLATE_OPEN` :type name: str :type base: str :type mode: int :returns: `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>` Object :rtype: GXMAPTEMPLATE .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The base template name should be the file name part of a geosoft_maptemplate file in the <geosoft>\\maptemplate or <geosoftuser>\\maptemplate folders. A base file in the user folder will override any in the Geosoft install dir. """ ret_val = gxapi_cy.WrapMAPTEMPLATE._create(GXContext._get_tls_geo(), name.encode(), base.encode(), mode) return GXMAPTEMPLATE(ret_val) def discard(self): """ Discard all changes made to the map template and reload from disk. .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._discard() def get_file_name(self, name): """ Get the file name of the map template. :param name: Returned map template file name :type name: str_ref .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ name.value = self._get_file_name(name.value.encode()) # Map Making def create_map(self, map, group): """ Create a map from the map template :param map: New map file name (if it exists it will be overwritten) :param group: Group name to use for settings :type map: str :type group: str .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._create_map(map.encode(), group.encode()) # Render/Preview def refresh(self): """ Refresh the map template with any newly saved items .. versionadded:: 7.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._refresh() def render_preview(self, hdc, left, bottom, right, top): """ Create a preview of the map template onto a Windows DC handle :param hdc: DC Handle :param left: Left value of the render rect in Windows coordinates (bottom>top) :param bottom: Bottom value :param right: Right value :param top: Top value :type hdc: int :type left: int :type bottom: int :type right: int :type top: int .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._render_preview(hdc, left, bottom, right, top) def render_preview_map_production(self, hdc, left, bottom, right, top): """ Render a preview for map sheet production purposes :param hdc: DC Handle (pass 0 to just query the Data view pixel location) :param left: Left value of the render rect in Windows coordinates (bottom>top) :param bottom: Bottom value :param right: Right value :param top: Top value :type hdc: int :type left: int_ref :type bottom: int_ref :type right: int_ref :type top: int_ref .. versionadded:: 6.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This method can also be used to get the data view pixel location by passing a null DC handle. This help to plot the view contents preview from another location. """ left.value, bottom.value, right.value, top.value = self._render_preview_map_production(hdc, left.value, bottom.value, right.value, top.value) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/tests/test_geometry.py import unittest import os import numpy as np import json import geosoft import geosoft.gxpy.map as gxmap import geosoft.gxpy.geometry as gxgeo import geosoft.gxpy.vv as gxvv import geosoft.gxpy.coordinate_system as gxcs from base import GXPYTest class Test(GXPYTest): def test_version(self): self.start() self.assertEqual(gxmap.__version__, geosoft.__version__) def test_exception(self): self.start() self.assertRaises(ValueError, gxgeo.Point, [1, 'yada', 2]) def test_base(self): self.start() g = gxgeo.Geometry() self.assertEqual(g.name, '_geometry_') self.assertEqual(g.coordinate_system, None) self.assertEqual(g.extent, None) self.assertEqual(g.extent_xyz, (None, None, None, None, None, None)) self.assertEqual(g.extent_xy, (None, None, None, None)) self.assertEqual(g.dimension, (None, None, None)) self.assertEqual(g.dimension_xy, (None, None)) self.assertEqual(g.centroid, None) self.assertEqual(g.centroid_xyz, (None, None, None)) self.assertEqual(g.centroid_xy, (None, None)) def test_new_point(self): self.start() p = gxgeo.Point((5, 10)) self.assertEqual(p.xyz, (5, 10, 0)) self.assertEqual(len(p), 1) self.assertEqual(tuple(p.pp[0]), (5, 10, 0)) p = gxgeo.Point((5, 10), z=2) self.assertEqual(p.xyz, (5, 10, 2)) p = gxgeo.Point((5, 10, 2)) self.assertEqual(p.xyz, (5, 10, 2)) p = gxgeo.Point(np.array((5, 10), dtype=np.float64)) self.assertEqual(p.xyz, (5, 10, 0)) p = gxgeo.Point(np.array((5, 10), dtype=np.float64), z=2) self.assertEqual(p.xyz, (5, 10, 2)) p = gxgeo.Point(np.array((5, 10, 2), dtype=np.float64)) self.assertEqual(p.xyz, (5, 10, 2)) self.assertEqual(p.x, 5) self.assertEqual(p.y, 10) self.assertEqual(p.z, 2) p.x = 2.5 self.assertEqual(p.x, 2.5) self.assertEqual(p.y, 10) self.assertEqual(p.z, 2) p.y = 7.5 self.assertEqual(p.x, 2.5) self.assertEqual(p.y, 7.5) self.assertEqual(p.z, 2) p.z = 3.3 self.assertEqual(p.x, 2.5) self.assertEqual(p.y, 7.5) self.assertEqual(p.z, 3.3) def test_cs(self): self.start() p = gxgeo.Point((5, 10)) self.assertTrue(p == p) self.assertTrue(gxgeo.Point((1,2), coordinate_system="WGS 84") == gxgeo.Point((1,2), coordinate_system="WGS 84")) self.assertTrue(gxgeo.Point((1, 2), coordinate_system="WGS 84") == gxgeo.Point((1, 2))) self.assertTrue(gxgeo.Point((1, 2)) == gxgeo.Point((1, 2))) s = "WGS 84 / UTM zone 32N <0, 0, 0, 10, 15, 32>" p = gxgeo.Point((5,10), coordinate_system=s) hcsd = p.coordinate_system.coordinate_dict() self.assertEqual(hcsd['name'], "WGS 84 / UTM zone 32N <0,0,0,10,15,32>") self.assertTrue(p == p) s = s + ' [geoid]' pp = gxgeo.PPoint(((8, 12), (5, 10)), coordinate_system=s) hcsd = p.coordinate_system.coordinate_dict() self.assertEqual(hcsd['name'], "WGS 84 / UTM zone 32N <0,0,0,10,15,32>") self.assertEqual(pp.coordinate_system.vcs, "geoid") self.assertTrue(pp == pp) self.assertTrue(pp == gxgeo.PPoint(((8, 12), (5, 10)))) self.assertFalse(pp == gxgeo.PPoint(((8, 12), (5, 10)), coordinate_system='WGS 84 [geoid]')) self.assertFalse(gxgeo.PPoint(((8, 12), (5, 10)), coordinate_system='WGS 84 [geoid]') == pp) def test_point(self): self.start() p = gxgeo.Point((5,10)) self.assertEqual(p.p.tolist(), [5.0, 10.0, 0.0]) self.assertEqual(p.xy, (5.0,10.0)) self.assertEqual(p.xyz, (5.0, 10.0, 0.0)) self.assertEqual(p.x, 5.0) self.assertEqual(p.y, 10.0) self.assertEqual(p.z, 0.0) p -= (0, 0, 15) self.assertEqual(p.xyz, (5.0, 10.0, -15.0)) p = gxgeo.Point((5,10,3.5)) self.assertEqual(p.p.tolist(), [5.0, 10.0, 3.5]) self.assertEqual(p.xyz, (5.0, 10.0, 3.5)) self.assertEqual(p.x, 5.0) self.assertEqual(p.y, 10.0) self.assertEqual(p.z, 3.5) p = gxgeo.Point(4) self.assertEqual(p.xyz, (4.0, 4.0, 4.0)) p += (1, 2, 3) self.assertEqual(p.xyz, (5., 6., 7.)) p *= 2 self.assertEqual(p.xyz, (10., 12., 14.)) p /= 2 self.assertEqual(p.xyz, (5., 6., 7.)) p = -p self.assertEqual(p.xyz, (-5., -6., -7.)) p = p + 1 self.assertEqual(p.xyz, (-4., -5., -6.)) p = p + (1, 2) self.assertEqual(p.xyz, (-3., -3., -6.)) p = p + (5, 2, 6) self.assertEqual(p.xyz, (2., -1., 0.)) p.x = p.x + 2 p.y -= 2 p.z = 5 self.assertEqual(p.xyz, (4., -3., 5.)) p.xy = (99, '88') self.assertEqual(p.xyz, (99., 88., 5.)) p.xyz = [0, 1, 45] self.assertEqual(p.xyz, (0., 1., 45.)) def test_ppoint(self): self.start() points = [(5, 10), (6, 11), (7, 12)] pp = gxgeo.PPoint(points) self.assertEqual(len(pp), 3) i = 0 for p in pp: self.assertEqual(p.xy, points[i]) i += 1 i = 0 for p in pp: self.assertEqual(p.xyz, (points[i][0], points[i][1], 0.0)) i += 1 p = pp[1] self.assertEqual(p.xy, points[1]) pp -= (0, 0, 15) self.assertEqual(pp[0].xyz, (5.0, 10.0, -15.0)) self.assertEqual(pp[2].xyz, (7.0, 12.0, -15.0)) pp += gxgeo.Point((0, 0, 15)) self.assertEqual(pp[0].xyz, (5.0, 10.0, 0.0)) self.assertEqual(pp[2].xyz, (7.0, 12.0, 0.0)) px = pp + gxgeo.PPoint(((0, 0, 15), (-1, -1, -10), (1, 2, 3))) self.assertEqual(px[0].xyz, (5.0, 10.0, 15.0)) self.assertEqual(px[2].xyz, (8.0, 14.0, 3.0)) pp -= gxgeo.PPoint(((0, 0, 15), (-1, -1, -10), (0, 0, 0))) self.assertEqual(pp[0].xyz, (5.0, 10.0, -15.0)) self.assertEqual(pp[1].xyz, (7, 12, 10)) self.assertEqual(pp[2].xyz, (7., 12., 0.)) pp -= gxgeo.Point((1, 2, 3)) self.assertEqual(pp[0].xyz, (4.0, 8.0, -18.0)) self.assertEqual(pp[1].xyz, (6, 10, 7)) self.assertEqual(pp[2].xyz, (6., 10., -3.)) pp = gxgeo.PPoint([(5, 10, 3.5)]) self.assertEqual(pp[0].xyz, (5.0, 10.0, 3.5)) pp = gxgeo.PPoint(((1, 2, 3), (4, 5, 6), (7, 8, 9))) pp += (1, 2, 3) self.assertEqual(pp[0].xyz, (2., 4., 6.)) self.assertEqual(pp[2].xyz, (8., 10., 12.)) pp *= (1, 2, 3) pp /= (1, 2, 3) self.assertEqual(pp[0].xyz, (2., 4., 6.)) self.assertEqual(pp[2].xyz, (8., 10., 12.)) pp *= (1, 2, 3) pp /= gxgeo.Point((1, 2, 3)) self.assertEqual(pp[0].xyz, (2., 4., 6.)) self.assertEqual(pp[2].xyz, (8., 10., 12.)) pp *= (1, 2, 3) p = gxgeo.Point((1, 2, 3)) pp /= gxgeo.PPoint((p, p, p)) self.assertEqual(pp[0].xyz, (2., 4., 6.)) self.assertEqual(pp[2].xyz, (8., 10., 12.)) pp *= gxgeo.Point((-1, -1, -1)) self.assertEqual(pp[0].xyz, (-2., -4., -6.)) self.assertEqual(pp[2].xyz, (-8., -10., -12.)) pp = -pp pp *= 2 self.assertEqual(pp[1].xyz, (10., 14., 18.)) pp /= 2 self.assertEqual(pp[1].xyz, (5., 7., 9.)) pp = -pp self.assertEqual(pp[1].xyz, (-5., -7., -9.)) pp = pp + (1, 2) self.assertEqual(pp[1].xyz, (-4., -5., -9.)) pp = pp + (5, 2, 6) self.assertEqual(pp[1].xyz, (1., -3., -3.)) pp = pp + ((1, 2, 3), (4, 5, 6), (7, 8, 9)) self.assertEqual(pp[1].xyz, (5., 2., 3.)) pp = pp * gxgeo.PPoint(((1, 2, 3), (4, 5, 6), (7, 8, 9))) self.assertEqual(pp[1].xyz, (20., 10., 18.)) self.assertEqual(len(pp), 3) self.assertEqual(tuple(pp.x), (5., 20., 35.)) self.assertEqual(tuple(pp.y), (4., 10., 16.)) self.assertEqual(tuple(pp.z), (9., 18., 27.)) points = [(5, 10), (6, 11), (7, 12)] pp = gxgeo.PPoint(points, z=3.5) self.assertEqual(pp.x.tolist(), [5., 6., 7.]) self.assertEqual(pp.z.tolist(), [3.5, 3.5, 3.5]) self.assertEqual(pp.xy.tolist(), np.array(points).tolist()) pp.x = 4.8 pp.y = (8., 5., 3.) pp.z = (1., 2., "-3") self.assertEqual(pp.x.tolist(), [4.8, 4.8, 4.8]) self.assertEqual(pp.y.tolist(), [8., 5., 3.]) self.assertEqual(pp.z.tolist(), [1., 2., -3.]) pp.xy = [(1, 2), (3,4), (5,6)] self.assertEqual(pp.xy.tolist(), [[1., 2.], [3., 4.], [5., 6.]]) pp = gxgeo.PPoint(((500000, 6000000), (500001, 6000001)), coordinate_system='NAD83 / UTM zone 15N') pp27 = gxgeo.PPoint(pp, coordinate_system='NAD27 / UTM zone 15N') self.assertEqual(pp27[0].xyz, (500016.35614845896, 5999777.5863711238, 0.0)) self.assertEqual(pp27[1].xyz, (500017.35614260647, 5999778.5863652565, 0.0)) self.assertEqual(pp27.length, 2) def test_pp_cs(self): self.start() p0 = gxgeo.Point((500000, 6000000)) p1 = gxgeo.Point((500000, 6000000), coordinate_system='NAD83 / UTM zone 15N') p2 = gxgeo.Point((500000, 6000000), coordinate_system='NAD27 / UTM zone 15N') pp = gxgeo.PPoint((p0, p1, p2)) self.assertEqual(pp.coordinate_system, 'NAD83 / UTM zone 15N') self.assertEqual(pp[0].xyz, (500000, 6000000, 0)) self.assertEqual(pp[1].xyz, (500000, 6000000, 0)) self.assertEqual(pp[2].xyz, (499983.64366013405, 6000222.4158355873, 0.0)) pp = gxgeo.PPoint((p0, p2, p1)) self.assertEqual(pp.coordinate_system, 'NAD27 / UTM zone 15N') self.assertEqual(pp[0].xyz, (500000, 6000000, 0)) self.assertEqual(pp[1].xyz, (500000, 6000000, 0)) self.assertEqual(pp[2].xyz, (500016.35614845896, 5999777.5863711238, 0.0)) pp = gxgeo.PPoint((p0, p1, p2), coordinate_system='NAD27 / UTM zone 15N') self.assertEqual(pp.coordinate_system, 'NAD27 / UTM zone 15N') self.assertEqual(pp[0].xyz, (500000, 6000000, 0)) self.assertEqual(pp[1].xyz, (500016.35614845896, 5999777.5863711238, 0.0)) self.assertEqual(pp[2].xyz, (500000, 6000000, 0)) def test_ppoint_constructors(self): self.start() def verify(): self.assertEqual(pp.x.tolist(), [1., 4., 7., 10., 13.]) self.assertEqual(pp.z.tolist(), [3., 6., 9., 12., 15.]) self.assertEqual(pp.xy.tolist(), nppp[:, :2].tolist()) lpp = ((1, 2, 3), (4, 5, 6), (7, 8, 9), (10, 11, 12), (13, 14, 15)) nppp = np.array(lpp) pp = gxgeo.PPoint(lpp) verify() nppp = np.array(lpp) pp = gxgeo.PPoint(nppp) verify() vvx = gxvv.GXvv(nppp[:, 0]) vvy = gxvv.GXvv(nppp[:, 1]) vvz = gxvv.GXvv(nppp[:, 2]) pp = gxgeo.PPoint((vvx, vvy, vvz)) verify() pp = gxgeo.PPoint((vvx, vvy), z=5) self.assertEqual(pp.x.tolist(), [1., 4., 7., 10., 13.]) self.assertEqual(pp.z.tolist(), [5, 5, 5, 5, 5]) vvx, vvy, vvz = pp. make_xyz_vv() self.assertEqual(tuple(vvx), ((1, 0.0), (4, 1.0), (7, 2.0), (10, 3.0), (13, 4.0))) self.assertEqual(tuple(vvy), ((2, 0.0), (5, 1.0), (8, 2.0), (11, 3.0), (14, 4.0))) self.assertEqual(tuple(vvz), ((5.0, 0.0), (5.0, 1.0), (5.0, 2.0), (5.0, 3.0), (5.0, 4.0))) pps = [] for xyz in lpp: pps.append(gxgeo.Point(xyz)) pp = gxgeo.PPoint(pps) verify() e = pp.extent self.assertTrue(e[0] == gxgeo.Point((1, 2, 3))) self.assertTrue(e[1] == gxgeo.Point((13, 14, 15))) def test_copy_geometry(self): self.start() p1 = gxgeo.Point((1,2)) p2 = p1 self.assertTrue(p1 is p2) p2 = gxgeo.Point(p2) self.assertFalse(p1 is p2) self.assertTrue(p1 == p2) p2.cs = "WGS 84" self.assertTrue(p1 == p2) p1.cs = "WGS 84" self.assertTrue(p1 == p2) p1.cs = gxcs.Coordinate_system("WGS 84 [geoid]") self.assertTrue(p1 == p2) def test_p2(self): self.start() b1 = gxgeo.Point2((gxgeo.Point((0, 1)), (10, 20, -1))) self.assertEqual(len(b1), 2) self.assertEqual(tuple(b1.pp[0]), (0, 1, 0)) self.assertEqual(tuple(b1.pp[1]), (10, 20, -1)) self.assertEqual(b1.centroid.xyz, (5.0, 10.5, -0.5)) self.assertEqual(len(b1), 2) self.assertEqual('_point2_[(0.0, 1.0, 0.0) (10.0, 20.0, -1.0)]', str(b1)) self.assertEqual(b1.x2, (0., 10.)) self.assertEqual(b1.y2, (1., 20.)) self.assertEqual(b1.z2, (0, -1.)) b2 = gxgeo.Point2(((0, 1), (10, 20, -1))) self.assertTrue(b1 == b2) b1 = gxgeo.Point2((gxgeo.Point((0, 1, -20)),(10, 20, -1))) self.assertEqual('_point2_[(0.0, 1.0, -20.0) (10.0, 20.0, -1.0)]', str(b1)) self.assertEqual(b1.x2, (0., 10.)) self.assertEqual(b1.y2, (1., 20.)) self.assertEqual(b1.z2, (-20., -1.)) b2 = gxgeo.Point2((gxgeo.Point((b1.x2[0], b1.y2[0], b1.z2[0])), gxgeo.Point((b1.x2[1], b1.y2[1], b1.z2[1])))) self.assertTrue(b1 == b2) b2 = gxgeo.Point2((gxgeo.Point((b1.x2[1], b1.y2[1], b1.z2[1])), gxgeo.Point((b1.x2[0], b1.y2[0], b1.z2[0]))), coordinate_system="WGS 84") self.assertTrue(b1 == b2) b2 = gxgeo.Point2(((b1.x2[1], b1.y2[1], b1.z2[1]), (b1.x2[0], b1.y2[0], b1.z2[0])), coordinate_system="WGS 84") self.assertTrue(b1 == b2) b2 = gxgeo.Point2((b1.x2[1], b1.y2[1], b1.z2[1], b1.x2[0], b1.y2[0], b1.z2[0]), coordinate_system="WGS 84") self.assertTrue(b1 == b2) c = gxgeo.Point(((b2.p0.x + b2.p1.x) * 0.5, (b2.p0.y + b2.p1.y) * 0.5, (b2.p0.z + b2.p1.z) * 0.5)) self.assertEqual(b2.centroid, c) self.assertEqual(b2.dimension, (abs(b2.p1.x - b2.p0.x), abs(b2.p1.y - b2.p0.y), abs(b2.p1.z - b2.p0.z))) self.assertEqual(b2.dimension_xy, (abs(b2.p1.x - b2.p0.x), abs(b2.p1.y - b2.p0.y))) self.assertEqual(b2.extent_xyz, (0.0, 1.0, -20.0, 10.0, 20.0, -1.0)) self.assertEqual(b2.extent_xy, (0.0, 1.0, 10.0, 20.0)) b = gxgeo.Point2(5) self.assertEqual(b[0].xyz, (5, 5, 5)) self.assertEqual(b[1].xyz, (5, 5, 5)) b = gxgeo.Point2((5, 6)) self.assertEqual(b[0].xyz, (5, 6, 0)) self.assertEqual(b[1].xyz, (5, 6, 0)) b = gxgeo.Point2((5, 6, 7)) self.assertEqual(b[0].xyz, (5, 6, 7)) self.assertEqual(b[1].xyz, (5, 6, 7)) b = gxgeo.Point2((5, 6, 7, 8)) self.assertEqual(b[0].xyz, (5, 6, 0)) self.assertEqual(b[1].xyz, (7, 8, 0)) b = gxgeo.Point2((5, 6, 7, 8, 9, 0)) self.assertEqual(b[0].xyz, (5, 6, 7)) self.assertEqual(b[1].xyz, (8, 9, 0)) self.assertRaises(gxgeo.GeometryException, gxgeo.Point2, (2, 3, 4, 5, 6)) def test_cs_math(self): self.start() p = gxgeo.Point((5, 10)) self.assertTrue(p == p) self.assertTrue(gxgeo.Point((1,2), coordinate_system="WGS 84") == gxgeo.Point((1,2), coordinate_system="WGS 84")) self.assertTrue(gxgeo.Point((1, 2), coordinate_system="WGS 84") == gxgeo.Point((1, 2))) self.assertTrue(gxgeo.Point((1, 2)) == gxgeo.Point((1, 2))) cs = "NAD83 / UTM zone 32N>" p = gxgeo.Point((500000,6000000), coordinate_system=cs) self.assertEqual(str(p.coordinate_system), "NAD83 / UTM zone 32N") p27 = gxgeo.Point(p, "NAD27 / UTM zone 32N") self.assertEqual(str(p27.coordinate_system), "NAD27 / UTM zone 32N") self.assertAlmostEqual(p27.x, 499840.780459, 3) self.assertAlmostEqual(p27.y, 5999920.58165, 3) self.assertFalse(p == p27) p27 = gxgeo.Point(p, "NAD27 / UTM zone 32N") self.assertEqual(str(p27.coordinate_system), "NAD27 / UTM zone 32N") self.assertAlmostEqual(p27.x, 499840.780459, 3) self.assertAlmostEqual(p27.y, 5999920.58165, 3) self.assertFalse(p == p27) pd = p - p27 self.assertEqual(str(pd.coordinate_system), "NAD83 / UTM zone 32N") self.assertAlmostEqual(pd.x, 0., 2) self.assertAlmostEqual(pd.y, 0., 2) pd = p27 - p self.assertEqual(str(pd.coordinate_system), "NAD27 / UTM zone 32N") self.assertAlmostEqual(pd.x, 0., 2) self.assertAlmostEqual(pd.y, 0., 2) pp = p + (10, 5) self.assertEqual(str(pp.coordinate_system), "NAD83 / UTM zone 32N") self.assertEqual(pp.xy, (500010, 6000005)) p = gxgeo.Point2(((500000, 6000000), (500001, 6000001)), coordinate_system=cs) self.assertEqual(str(p.coordinate_system), "NAD83 / UTM zone 32N") p27 = gxgeo.Point2(p, "NAD27 / UTM zone 32N") self.assertEqual(str(p27.coordinate_system), "NAD27 / UTM zone 32N") self.assertAlmostEqual(p27.p0.x, 499840.780459, 3) self.assertAlmostEqual(p27.p0.y, 5999920.58165, 3) self.assertAlmostEqual(p27.p1.x, 499841.780459, 3) self.assertAlmostEqual(p27.p1.y, 5999921.58165, 3) self.assertFalse(p == p27) pp = p / 2 self.assertEqual(pp[1].xyz, (250000.5, 3000000.5, 0.0)) pp = p / gxgeo.Point(2) self.assertEqual(pp[1].xyz, (250000.5, 3000000.5, 0.0)) pp = p / gxgeo.Point2((gxgeo.Point(2), gxgeo.Point(3))) self.assertEqual(pp[0].xyz, (250000.0, 3000000.0, 0.0)) self.assertEqual(pp[1].xyz, (166667.0, 2000000.3333333333, 0.0)) p27 = gxgeo.Point2(p, "NAD27 / UTM zone 32N") self.assertEqual(str(p27.coordinate_system), "NAD27 / UTM zone 32N") self.assertAlmostEqual(p27.p0.x, 499840.780459, 3) self.assertAlmostEqual(p27.p0.y, 5999920.58165, 3) self.assertAlmostEqual(p27.p1.x, 499841.780459, 3) self.assertAlmostEqual(p27.p1.y, 5999921.58165, 3) self.assertEqual(tuple(p27[0]), (499840.78045944084, 5999920.5816528751, 0.0)) self.assertEqual(tuple(p27[1]), (499841.7804697603, 5999921.5816632193, 0.0)) for pp in p27: self.assertTrue(isinstance(pp, gxgeo.Point)) self.assertFalse(p == p27) pp = p + p27 self.assertEqual(tuple(pp[0]), (999999.99835706223, 12000000.002281997, 0.0)) self.assertEqual(tuple(pp[1]), (1000001.9983570619, 12000002.002281997, 0.0)) pp = pp - p27[0] self.assertEqual(tuple(pp[0]), (500000.0, 6000000, 0.0)) self.assertEqual(tuple(pp[1]), (500001.99999999965, 6000002, 0.0)) pd = p - p27 self.assertEqual(str(pd.coordinate_system), "NAD83 / UTM zone 32N") self.assertAlmostEqual(pd.p0.x, 0., 2) self.assertAlmostEqual(pd.p0.y, 0., 2) self.assertAlmostEqual(pd.p1.x, 0., 2) self.assertAlmostEqual(pd.p1.y, 0., 2) pp = pd + 1 self.assertAlmostEqual(pp.p0.x, 1., 2) self.assertAlmostEqual(pp.p0.y, 1., 2) self.assertAlmostEqual(pp.p1.x, 1., 2) self.assertAlmostEqual(pp.p1.y, 1., 2) pp = pd + gxgeo.Point(1) self.assertAlmostEqual(pp.p0.x, 1., 2) self.assertAlmostEqual(pp.p0.y, 1., 2) self.assertAlmostEqual(pp.p1.x, 1., 2) self.assertAlmostEqual(pp.p1.y, 1., 2) pp = -pp self.assertAlmostEqual(pp.p0.x, -1., 2) self.assertAlmostEqual(pp.p0.y, -1., 2) self.assertAlmostEqual(pp.p1.x, -1., 2) self.assertAlmostEqual(pp.p1.y, -1., 2) pp = -pp - 1 self.assertAlmostEqual(pd.p0.x, 0., 2) self.assertAlmostEqual(pd.p0.y, 0., 2) self.assertAlmostEqual(pd.p1.x, 0., 2) self.assertAlmostEqual(pd.p1.y, 0., 2) pz = gxgeo.Point2(((0, 1, 2), (1, 2, 3))) pp = (pz + 1) * 5 self.assertEqual(tuple(pp[0]), (5., 10., 15.)) self.assertEqual(tuple(pp[1]), (10., 15., 20.)) pp = (pz + 1) * gxgeo.Point((2, 5, 10)) self.assertEqual(tuple(pp[0]), (2., 10., 30.)) self.assertEqual(tuple(pp[1]), (4., 15., 40.)) pp = (pz + 1) * gxgeo.Point2(((2, 5, 10), (1, 2, 3))) self.assertEqual(tuple(pp[0]), (2., 10., 30.)) self.assertEqual(tuple(pp[1]), (2.0, 6.0, 12.0)) pd = p27 - p self.assertEqual(str(pd.coordinate_system), "NAD27 / UTM zone 32N") self.assertAlmostEqual(pd.p0.x, 0., 2) self.assertAlmostEqual(pd.p0.y, 0., 2) self.assertAlmostEqual(pd.p1.x, 0., 2) self.assertAlmostEqual(pd.p1.y, 0., 2) pp = p + (10, 5) self.assertEqual(str(pp.coordinate_system), "NAD83 / UTM zone 32N") self.assertEqual(pp.p0.xy, (500010, 6000005)) self.assertEqual(pp.p1.xy, (500011, 6000006)) pp.x2 = (1, 2) pp.y2 = (3, 4) pp.z2 = (5, 6) self.assertEqual(pp.p0.xyz, (1, 3, 5)) self.assertEqual(pp.p1.xyz, (2, 4, 6)) def test_names(self): self.start() self.assertEqual(gxgeo.Point((1, 2)).name, '_point_') self.assertEqual(gxgeo.Point((1, 2), name='maki').name, 'maki') self.assertTrue(gxgeo.Point((1, 2)) == gxgeo.Point((1,2), name='maki')) self.assertEqual(gxgeo.Point(gxgeo.Point((1, 2))).name, '_point_') self.assertEqual(gxgeo.Point(gxgeo.Point((1, 2)), name='maki').name, 'maki') p1 = (1, 2) p2 = (2, 3) self.assertEqual(gxgeo.Point2((p1, p2)).name, '_point2_') self.assertEqual(gxgeo.Point2((p1, p2), name='maki').name, 'maki') self.assertTrue(gxgeo.Point2((p1, p2)) == gxgeo.Point2((p1, p2), name='maki')) pp = ((1, 2), (3, 2), (4, 5)) self.assertEqual(gxgeo.PPoint(pp).name, '_ppoint_') self.assertEqual(gxgeo.PPoint(pp, name='maki').name, 'maki') self.assertTrue(gxgeo.PPoint(pp) == gxgeo.PPoint(pp, name='maki')) def test_mesh(self): self.start() v = list(np.array(range(27), dtype=np.float).reshape(-1, 3)) f = list(np.array(range(len(v))).reshape(-1, 3)) m = gxgeo.Mesh((f, v)) self.assertEqual(len(m.faces), len(f)) self.assertEqual(len(m.verticies), len(v)) self.assertEqual(tuple(m[2][2]), (24., 25., 26.)) self.assertEqual(tuple(m.extent_minimum), (0, 1, 2)) m2 = m + 1.5 self.assertEqual(tuple(m2[2][2]), (25.5, 26.5, 27.5)) self.assertEqual(tuple(m2.extent_minimum), (1.5, 2.5, 3.5)) m2 = m2 - 1.5 self.assertEqual(tuple(m2[2][2]), (24., 25., 26.)) self.assertEqual(tuple(m2.extent_minimum), (0, 1, 2)) m2 = m + (1, 10, 50) self.assertEqual(tuple(m2[2][2]), (25., 35., 76.)) self.assertEqual(tuple(m2.extent_minimum), (1, 11, 52)) m2 = m2 - (1, 10, 50) self.assertEqual(tuple(m2[2][2]), (24., 25., 26.)) self.assertEqual(tuple(m2.extent_minimum), (0, 1, 2)) m2 = m + (m + (1, 10, 50)) self.assertEqual(tuple(m2.extent_maximum), (25., 35., 76.)) self.assertEqual(m2.extent_maximum_xyz, (25., 35., 76.)) self.assertEqual(m2.extent_minimum_xyz, (0, 1, 2)) self.assertEqual(m2.extent_maximum_xy, (25., 35.)) self.assertEqual(m2.extent_minimum_xy, (0, 1)) self.assertEqual(m2.centroid_xyz, (12.5, 18.0, 39.0)) self.assertEqual(m2.centroid_xy, (12.5, 18.0)) self.assertEqual(len(m2), 2 * len(m)) mm = gxgeo.Mesh((np.append(m2.faces, m2.faces, axis=0), m2.verticies)) self.assertEqual(len(mm), 12) self.assertEqual(len(mm.verticies), 18) mp = mm.point_array() self.assertEqual(len(mp), 18) mp = mm.point_array(unique=False) self.assertEqual(len(mp), 36) v = np.array(v) v[:, 0] += 500000 v[:, 1] += 6000000 m = gxgeo.Mesh((f, v), coordinate_system="NAD83 / UTM zone 17N") m = gxgeo.Mesh(m, coordinate_system="NAD27 / UTM zone 17N") self.assertEqual(str(m.coordinate_system), "NAD27 / UTM zone 17N") self.assertEqual(tuple(m[2][2]), (500006.87887296296, 5999802.6514122421, 26.0)) m83 = gxgeo.Mesh(m, coordinate_system="NAD83 / UTM zone 17N") self.assertEqual(m83.coordinate_system, "NAD83 / UTM zone 17N") self.assertEqual(tuple(m[2][2]), (500006.87887296296, 5999802.6514122421, 26.0)) self.assertEqual(tuple(m83[2][2]), (500024.00010005565, 6000025.0009018015, 26.0)) f = np.array(f) f1vv = gxvv.GXvv(f[:, 0]) f2vv = gxvv.GXvv(f[:, 1]) f3vv = gxvv.GXvv(f[:, 2]) xvv = gxvv.GXvv(v[:, 0]) yvv = gxvv.GXvv(v[:, 1]) zvv = gxvv.GXvv(v[:, 2]) m = gxgeo.Mesh(((f1vv, f2vv, f3vv), (xvv, yvv, zvv)), coordinate_system="NAD83 / UTM zone 17N", name='vv') self.assertEqual(m.name, 'vv') mm = gxgeo.Mesh(m, coordinate_system="NAD27 / UTM zone 17N") self.assertEqual(mm.name, '_mesh_') m2 = gxgeo.Mesh(mm, coordinate_system="NAD27 / UTM zone 17N", name='vv_copy') self.assertEqual(m2.name, 'vv_copy') self.assertEqual(str(m2.coordinate_system), "NAD27 / UTM zone 17N") self.assertEqual(tuple(m2[2][2]), (500006.87887296296, 5999802.6514122421, 26.0)) self.assertEqual(str(m2), 'vv_copy(3 faces)') self.assertTrue(mm == m2) self.assertEqual(len(list(m2)), 3) self.assertFalse(m == m2) m3 = m + m2 self.assertEqual(len(m3), 6) self.assertEqual(m3.length, 6) f1vv, f2vv, f3vv = m3.faces_vv() self.assertEqual(tuple(f1vv.np), (0, 3, 6, 9, 12, 15)) self.assertEqual(tuple(f2vv.np), (1, 4, 7, 10, 13, 16)) self.assertEqual(tuple(f3vv.np), (2, 5, 8, 11, 14, 17)) xvv, yvv, zvv = m3.verticies_vv() self.assertEqual(tuple(xvv.np[:3]), (500000.0, 500003.0, 500006.0)) self.assertEqual(tuple(yvv.np[:3]), (6000001.0, 6000004.0, 6000007.0)) self.assertEqual(tuple(zvv.np[:3]), (2.0, 5.0, 8.0)) def test_pp_from_list(self): self.start() plinelist = [[110, 5], [120, 20], [130, 15], [150, 50], [160, 70], [175, 35], [190, 65], [220, 50], [235, 18.5]] pp = gxgeo.PPoint.from_list(plinelist) self.assertEqual(len(pp), len(plinelist)) self.assertEqual(pp.extent_minimum_xyz, (110, 5, 0)) self.assertEqual(pp.extent_maximum_xyz, (235, 70., 0)) def test_pp_merge(self): self.start() plinelist = [[110, 5], [120, 20], [130, 15]] plinelst2 = [[150, 50], [160, 70]] plinelst3 = (190, 65, 4) pm = gxgeo.PPoint.merge((plinelist, plinelst2, [plinelst3])) self.assertEqual(pm.length, 6) self.assertEqual(tuple(pm[0]), (110, 5, 0)) self.assertEqual(tuple(pm[4]), (160, 70, 0)) self.assertEqual(tuple(pm[5]), (190, 65, 4)) pm = gxgeo.PPoint.merge((gxgeo.PPoint(plinelist), gxgeo.Point2(plinelst2), gxgeo.Point(plinelst3))) self.assertEqual(pm.length, 6) self.assertEqual(tuple(pm[0]), (110, 5, 0)) self.assertEqual(tuple(pm[4]), (160, 70, 0)) self.assertEqual(tuple(pm[5]), (190, 65, 4)) center = gxgeo.Point((550000, 6000000)) a = gxgeo.PPoint(plinelist) + center b = gxgeo.Point2(plinelst2, coordinate_system="NAD83 / UTM zone 15N") + center c = gxgeo.Point(plinelst3, coordinate_system="NAD27 / UTM zone 15N") + center pm = gxgeo.PPoint.merge((a, b, c)) self.assertEqual(pm.length, 6) self.assertTrue(pm.coordinate_system == b.coordinate_system) self.assertEqual(tuple(pm[0]), (550110., 6000005., 0.)) self.assertEqual(tuple(pm[4]), (550160., 6000070., 0.)) self.assertEqual(str(pm[5]), '_point_(550173.9373550161, 6000287.416398498, 4.0)') def test_union(self): self.start() cs = "NAD83 / UTM zone 32N>" p = gxgeo.Point((500000,6000000), cs) ext = gxgeo.extent_union(p, p) self.assertEqual(ext.p0.xyz, p.xyz) self.assertEqual(ext.p1.xyz, p.xyz) p2 = gxgeo.Point((500010,6000100), cs) ext = gxgeo.extent_union(p, p2) self.assertEqual(ext.p0.xyz, p.xyz) self.assertEqual(ext.p1.xyz, p2.xyz) p3 = (490000,5900000) ext = gxgeo.extent_union(p, p3) self.assertEqual(ext.p0.xyz, (490000,5900000, 0)) self.assertEqual(ext.p1.xyz, p.xyz) pp = gxgeo.PPoint((p2, gxgeo.Point(p3))) ext = gxgeo.extent_union(p, pp) self.assertEqual(ext.p0.xyz, (490000,5900000, 0)) self.assertEqual(ext.p1.xyz, p2.xyz) p2.coordinate_system = "NAD27 / UTM zone 32N" p2.z = -88 p3 = gxgeo.Point(p3, "NAD27 / UTM zone 32N") p3.z = 51 pp = gxgeo.PPoint((p2, p3)) ext = gxgeo.extent_union(p, pp) self.assertEqual(ext.p0.xyz, (490159.31817756349, 5900080.4689568691, -88.0)) self.assertEqual(ext.p1.xyz, (500169.21834333451, 6000179.4198679868, 51.0)) if __name__ == '__main__': unittest.main() <file_sep>/docs/GX3DV.rst .. _GX3DV: GX3DV class ================================== .. autoclass:: geosoft.gxapi.GX3DV :members: .. _GEO3DV_OPEN: GEO3DV_OPEN constants ----------------------------------------------------------------------- Open Modes .. autodata:: geosoft.gxapi.GEO3DV_MVIEW_READ :annotation: .. autoattribute:: geosoft.gxapi.GEO3DV_MVIEW_READ .. autodata:: geosoft.gxapi.GEO3DV_MVIEW_WRITEOLD :annotation: .. autoattribute:: geosoft.gxapi.GEO3DV_MVIEW_WRITEOLD <file_sep>/docs/generate_rsts.py import os import inspect from pkg_resources import parse_version import geosoft.gxpy as gxpy import geosoft.gxapi as gxa import re from docstring_info import parse_docstring min_version = parse_version("8.5") ver_add_re = re.compile(".*versionadded\:\:\s+([0-9\.]+)", re.MULTILINE) def _is_some_method(obj): return (inspect.isfunction(obj) or inspect.ismethod(obj) or inspect.isbuiltin(obj) or inspect.ismethoddescriptor(obj)) def get_short_desc(str): lines = [str.strip() for str in str.splitlines()] for line in lines: if line and not ( (line.endswith(':') and '->' in line) or 'parsed-literal' in line): return line return '' def get_short_desc_from_object(object): if not object.__doc__: return '' info = parse_docstring(object.__doc__) short_desc = get_short_desc(info['short_description']) if not short_desc: short_desc = get_short_desc(info['long_description']) return short_desc def add_object_to_history(mod, version_history, object, name, collection, parent_name=None): if not name.startswith('_') and hasattr(object, '__doc__'): doc = str(object.__doc__) match = ver_add_re.search(doc) if match: ver_string = match.group(1) version = parse_version(ver_string) if version >= min_version: version_history[version] = version_history.get(version, {'classes': [], 'functions': []}) short_desc = get_short_desc_from_object(object) if parent_name: version_history[version][collection].append(":func:`{}.{}.{}` {}"\ .format(mod.__name__, parent_name, name, short_desc)) else: if _is_some_method(object): version_history[version][collection].append(":func:`{}.{}` {}" .format(mod.__name__, name, short_desc)) elif inspect.isclass(object) and issubclass(object, BaseException): version_history[version][collection].append(":exc:`{}.{}` {}" .format(mod.__name__, name, short_desc)) else: version_history[version][collection].append(":class:`{}.{}` {}"\ .format(mod.__name__, name, short_desc)) def sort_version_history(version_history): for key, val in version_history.items(): val['classes'].sort() val['functions'].sort() # get_class_that_defined_method implementation from here: # http://stackoverflow.com/questions/3589311/get-defining-class-of-unbound-method-object-in-python-3/25959545#25959545 def get_class_that_defined_method(meth): if inspect.ismethod(meth): for cls in inspect.getmro(meth.__self__.__class__): if cls.__dict__.get(meth.__name__) is meth: return cls meth = meth.__func__ # fallback to __qualname__ parsing if inspect.isfunction(meth): mod = inspect.getmodule(meth) if mod is None: return None cls = getattr(mod, meth.__qualname__.split('.<locals>', 1)[0].rsplit('.', 1)[0], None) if cls is not None and isinstance(cls, type): return cls return None # not required since None would have been implicitly returned anyway def does_class_implement_method(cls, meth): cls_from_meth = get_class_that_defined_method(meth) return cls_from_meth is None or cls is cls_from_meth def parse_module_history(mod, version_history): functions = inspect.getmembers(mod, _is_some_method) for fn_key, fn_value in functions: if does_class_implement_method(mod, fn_value): add_object_to_history(mod, version_history, fn_value, fn_key, 'functions') classes = inspect.getmembers(mod, inspect.isclass) for cl_key, cl_value in classes: if not cl_key.startswith('_'): add_object_to_history(mod, version_history, cl_value, cl_key, 'classes') cl_functions = inspect.getmembers(cl_value, _is_some_method) for fn_key, fn_value in cl_functions: if does_class_implement_method(cl_value, fn_value): add_object_to_history(mod, version_history, fn_value, fn_key, 'functions', parent_name=cl_key) sort_version_history(version_history) def collect_gxa_version_history(): gxa._version_history = {} parse_module_history(gxa, gxa._version_history) gxa._versions = reversed(sorted(gxa._version_history.keys())) def collect_gxpy_version_history(): gxpy._version_history = {} for attr, value in gxpy.__dict__.items(): if not attr.startswith("_") and inspect.ismodule(value): parse_module_history(value, gxpy._version_history) gxpy._versions = reversed(sorted(gxpy._version_history.keys())) def gen_version_history(j2env, output_dir): collect_gxpy_version_history() collect_gxa_version_history() modules = [gxpy, gxa] template = j2env.get_template('version_history.rst') output_file = os.path.join(output_dir, 'version_history.rst') with open(output_file, 'w+') as f: f.write(template.render(modules=modules)) def gen_version_history(j2env, output_dir): collect_gxpy_version_history() collect_gxa_version_history() modules = [gxpy, gxa] template = j2env.get_template('version_history.rst') output_file = os.path.join(output_dir, 'version_history.rst') with open(output_file, 'w+') as f: f.write(template.render(modules=modules)) def gen_gxapi_rsts(j2env, output_dir): template = j2env.get_template('gxapi_class.rst') classes = inspect.getmembers(gxa, inspect.isclass) for cl_key, cl_value in classes: if (cl_key.startswith("GX") and not cl_key == "GXContext" and not cl_key == "GXCancel" and not cl_key == "GXExit" and not cl_key == "GXError" and not cl_key == "GXAPIError"): with open(cl_key + '.rst', 'w+') as f: f.write(template.render(class_name=cl_key, definitions = {})) def gen_gxapi_toc(j2env, output_dir): template = j2env.get_template('geosoft.gxapi.classes.rst') gxa_classes = inspect.getmembers(gxa, inspect.isclass) classes = [c for c, _ in gxa_classes if not c.startswith('_') and not c.startswith('ref_') and not c.endswith('_ref') and c != 'GXCancel' and c != 'GXAPIError' and c != 'GXError' and c != 'GXExit' and c != 'GXContext'] with open('geosoft.gxapi.classes.rst', 'w+') as f: f.write(template.render(classes=['geosoft.gxapi', 'GXGEOSOFT'] + classes)) def gen_gxpy_rsts(j2env, output_dir): # remove stale rst files for fn in list(os.listdir()): if not os.path.isdir(fn): if fn.startswith('geosoft.gxpy.') and fn.endswith('.rst'): os.remove(fn) modules = sorted([(k, get_short_desc_from_object(v)) for k, v in gxpy.__dict__.items() if not k.startswith("_") and inspect.ismodule(v)]) with open('geosoft.gxpy.rst', 'w+') as f: template = j2env.get_template('geosoft.gxpy.rst') f.write(template.render(modules=modules)) template = j2env.get_template('geosoft.gxpy.mod.rst') for module in modules: with open('geosoft.gxpy.' + module[0] + '.rst', 'w+') as f: f.write(template.render(module=module)) def generate(): from jinja2 import Environment, FileSystemLoader global __global_collection dir = os.path.split(__file__)[0] templates_dir = os.path.join(dir,'templates') j2env = Environment(loader=FileSystemLoader(templates_dir), trim_blocks = True, lstrip_blocks = True) gen_gxpy_rsts(j2env, dir) #gen_gxapi_rsts(j2env, dir) gen_gxapi_toc(j2env, dir) gen_version_history(j2env, dir) if __name__ == "__main__": generate() <file_sep>/geosoft/gxapi/GXBF.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXBF(gxapi_cy.WrapBF): """ GXBF class. The `GXBF <geosoft.gxapi.GXBF>` class is used to access (or create) Binary files and remove (or destroy) files from use. You can also perform a variety of additional tasks, such as positioning within files, reading from files and writing to files. """ def __init__(self, handle=0): super(GXBF, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXBF <geosoft.gxapi.GXBF>` :returns: A null `GXBF <geosoft.gxapi.GXBF>` :rtype: GXBF """ return GXBF() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def ch_size(self, size): """ Changes the size of a file :param size: New length in bytes :type size: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._ch_size(size) def seek(self, offset, ref): """ Moves file position :param offset: Number of bytes from reference point :param ref: :ref:`BF_SEEK` :type offset: int :type ref: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Terminates if attempt to move past the end of a read-only file. """ self._seek(offset, ref) def copy(self, b_fw): """ Copy entire contents of a source `GXBF <geosoft.gxapi.GXBF>` to a destination `GXBF <geosoft.gxapi.GXBF>` :param b_fw: Destination `GXBF <geosoft.gxapi.GXBF>` :type b_fw: GXBF .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._copy(b_fw) def crc(self, size, crc): """ Compute CRC of a file. :param size: Number of bytes to CRC :param crc: CRC start (use `CRC_INIT_VALUE <geosoft.gxapi.CRC_INIT_VALUE>` for new) :type size: int :type crc: int :returns: CRC Value :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._crc(size, crc) return ret_val @classmethod def create(cls, file, status): """ Create `GXBF <geosoft.gxapi.GXBF>` object. :param file: File name to open ("" is a temporary file) :param status: :ref:`BF_OPEN_MODE` :type file: str :type status: int :returns: `GXBF <geosoft.gxapi.GXBF>` Object :rtype: GXBF .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Run-time specific directory paths may be added the the front of file names as follows: <geosoft> the main Geosoft installation directory <geosoft2> the secondary Geosoft installation directory <geotemp> the Geosoft temporary file directory <windows> the operating system Windows directory <system> the operating system system directory <other> other environment variables For example "<geosoft>/user/csv/datum.csv" """ ret_val = gxapi_cy.WrapBF._create(GXContext._get_tls_geo(), file.encode(), status) return GXBF(ret_val) @classmethod def create_sbf(cls, sbf, file, status): """ Create `GXBF <geosoft.gxapi.GXBF>` object inside an `GXSBF <geosoft.gxapi.GXSBF>`. :param sbf: Storage :param file: File name to open ("" is a temporary file) :param status: :ref:`BF_OPEN_MODE` :type sbf: GXSBF :type file: str :type status: int :returns: `GXBF <geosoft.gxapi.GXBF>` Object :rtype: GXBF .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** see sbf.gxh """ ret_val = gxapi_cy.WrapBF._create_sbf(GXContext._get_tls_geo(), sbf, file.encode(), status) return GXBF(ret_val) def eof(self): """ Returns 1 if at the end of the file :returns: 1 if at the end of the file, 0 if not at the end of the file :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._eof() return ret_val def query_write(self): """ Check if you can write to the `GXBF <geosoft.gxapi.GXBF>`. :returns: 0 - No 1 - Yes :rtype: int .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._query_write() return ret_val def read_binary_string(self, bytes, encoding, data): """ Reads string data from current position in `GXBF <geosoft.gxapi.GXBF>` :param bytes: Number of bytes to read :param encoding: :ref:`BF_ENCODE` :param data: Data :type bytes: int :type encoding: int :type data: str_ref .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ data.value = self._read_binary_string(bytes, encoding, data.value.encode()) def size(self): """ Returns the file length :returns: File size in bytes. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._size() return ret_val def tell(self): """ Returns current position of file pointer in bytes :returns: Current file pointer location :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._tell() return ret_val def read_int(self, type, data): """ Reads int data from current position in `GXBF <geosoft.gxapi.GXBF>` :param type: :ref:`GS_TYPES` and :ref:`BF_BYTEORDER` :param data: Data :type type: int :type data: int_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the data source may be in byte order different from that required by the reader, you can add the source byte-order to the `GXBF <geosoft.gxapi.GXBF>` elelment type. The byte order will be swapped if required. For example, to write out a real number 3.5 with Most-Significant_Byte first (Mortorola) convention: `write_double <geosoft.gxapi.GXBF.write_double>`(hBF,`BF_BYTEORDER_MSB <geosoft.gxapi.BF_BYTEORDER_MSB>`+`GS_REAL <geosoft.gxapi.GS_REAL>`,3.5). If a byte order is not specified, the source is assumed to be in the native byte order of the reading/writing computer. """ data.value = self._read_int(type, data.value) def read_double(self, type, data): """ Reads real data from current position in `GXBF <geosoft.gxapi.GXBF>` :param type: :ref:`GS_TYPES` and :ref:`BF_BYTEORDER` :param data: Data :type type: int :type data: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the data source may be in byte order different from that required by the reader, you can add the source byte-order to the `GXBF <geosoft.gxapi.GXBF>` elelment type. The byte order will be swapped if required. For example, to write out a real number 3.5 with Most-Significant_Byte first (Mortorola) convention: `write_double <geosoft.gxapi.GXBF.write_double>`(hBF,`BF_BYTEORDER_MSB <geosoft.gxapi.BF_BYTEORDER_MSB>`+`GS_REAL <geosoft.gxapi.GS_REAL>`,3.5). If a byte order is not specified, the source is assumed to be in the native byte order of the reading/writing computer. """ data.value = self._read_double(type, data.value) def read_vv(self, type, vv): """ Read data to a `GXVV <geosoft.gxapi.GXVV>` from current position in `GXBF <geosoft.gxapi.GXBF>` :param type: :ref:`GS_TYPES` and :ref:`BF_BYTEORDER` :param vv: `GXVV <geosoft.gxapi.GXVV>` data to read, `GXVV <geosoft.gxapi.GXVV>` length is read :type type: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the data source may be in byte order different from that required by the reader, you can add the source byte-order to the `GXBF <geosoft.gxapi.GXBF>` elelment type. The byte order will be swapped if required. For example, to write out a real number 3.5 with Most-Significant_Byte first (Mortorola) convention: `write_double <geosoft.gxapi.GXBF.write_double>`(hBF,`BF_BYTEORDER_MSB <geosoft.gxapi.BF_BYTEORDER_MSB>`+`GS_REAL <geosoft.gxapi.GS_REAL>`,3.5). If a byte order is not specified, the source is assumed to be in the native byte order of the reading/writing computer. """ self._read_vv(type, vv) def set_destroy_status(self, status): """ Set the flag to delete the file on close :param status: :ref:`BF_CLOSE` :type status: int .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_destroy_status(status) def write_binary_string(self, encoding, data): """ Write a binary string to a `GXBF <geosoft.gxapi.GXBF>` :param encoding: :ref:`BF_ENCODE` :param data: String to write out :type encoding: int :type data: str .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._write_binary_string(encoding, data.encode()) def write_data_null(self): """ Writes a null byte (0) to `GXBF <geosoft.gxapi.GXBF>` .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._write_data_null() def write_int(self, type, data): """ Writes int to the `GXBF <geosoft.gxapi.GXBF>` :param type: :ref:`GS_TYPES` and :ref:`BF_BYTEORDER` :param data: Data :type type: int :type data: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See comments on byte order for the Read.. functions if you want to enforce a certain byte order. If a byte order is not specified, the data is written in the native byte order of the writing computer. """ self._write_int(type, data) def write_double(self, type, data): """ Writes real to the `GXBF <geosoft.gxapi.GXBF>` :param type: :ref:`GS_TYPES` and :ref:`BF_BYTEORDER` :param data: Data :type type: int :type data: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See comments on byte order for the Read.. functions if you want to enforce a certain byte order. If a byte order is not specified, the data is written in the native byte order of the writing computer. """ self._write_double(type, data) def write_vv(self, type, vv): """ Writes `GXVV <geosoft.gxapi.GXVV>` to the `GXBF <geosoft.gxapi.GXBF>` :param type: :ref:`GS_TYPES` and :ref:`BF_BYTEORDER` :param vv: Data :type type: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See comments on byte order for the Read.. functions if you want to enforce a certain byte order. If a byte order is not specified, the data is written in the native byte order of the writing computer. """ self._write_vv(type, vv) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/tests/base.py import os import shutil import glob import unittest import inspect import subprocess import numpy as np np.seterr(all='raise') os.environ['GEOSOFT_FORCE_MESA_3D'] = '1' os.environ['GEOSOFT_TEST_MODE'] = '1' os.environ['GEOSOFT_TESTSYSTEM_MODE'] = '1' import geosoft.gxpy.gx as gx import geosoft.gxapi as gxapi import geosoft.gxpy.map as gxmap import geosoft.gxpy.viewer as gxvwr import geosoft.gxpy.utility as gxu import geosoft.gxpy.system as gxsys # set to True to show viewer for each CRC call SHOW_TEST_VIEWERS = False _external_result_base_dir = os.environ.get('GEOSOFT_GXPY_DEV_RESULT_DIR', None) _dev_test_run = os.environ.get('GEOSOFT_GXPY_DEV_TEST_RUN', 0) == '1' if _dev_test_run: SHOW_TEST_VIEWERS = False import win32gui import win32con if win32gui.SystemParametersInfo(win32con.SPI_GETFONTSMOOTHING): import atexit def restore_font_smoothing(): win32gui.SystemParametersInfo(win32con.SPI_SETFONTSMOOTHING, True) atexit.register(restore_font_smoothing) win32gui.SystemParametersInfo(win32con.SPI_SETFONTSMOOTHING, False) # do not accidentally block automated runs if these variables are set _prevent_interactive = _dev_test_run or os.environ.get('GEOSOFT_PREVENT_INTERACTIVE', 0) == '1' if _prevent_interactive: SHOW_TEST_VIEWERS = False # Make root window for UI methods root_window = None if SHOW_TEST_VIEWERS: from tkinter import Tk, messagebox # We import these here to properly initialize common controls import win32gui import win32con root_window = Tk() root_window.overrideredirect(1) root_window.withdraw() def _t(s): return s def _verify_no_gx_context(): loc_gx = None try: loc_gx = gxapi.GXContext.current() except: loc_gx = None pass if loc_gx is not None: raise Exception(_t("We have a GXContext but should not!")) class GXPYTest(unittest.TestCase): maxDiff = None _test_case_py = None _test_case_filename = None _result_base_dir = None _cls_unique_id_count = 0 _gx = None @classmethod def _cls_uuid(cls): cls._cls_unique_id_count = cls._cls_unique_id_count + 1 return 'uuid_{}_{}'.format(cls._test_case_filename, cls._cls_unique_id_count) @classmethod def setUpGXPYTest(cls, res_stack=6, context_name=__name__, parent_window=0, per_user_key=False): _verify_no_gx_context() cls._cls_unique_id_count = 0 cls._test_case_py = os.path.join(os.getcwd(), inspect.getfile(cls)) cls._test_case_filename = os.path.split(cls._test_case_py)[1] if cls._test_case_filename == os.path.split(__file__)[1]: raise Exception(_t("GXPYTest base class incorrectly detected as test case!")) cur_dir = os.path.dirname(cls._test_case_py) cls._result_base_dir = os.path.join(cur_dir, 'results', cls._test_case_filename) if _external_result_base_dir is None else _external_result_base_dir os.makedirs(cls._result_base_dir, exist_ok=True) os.chdir(cls._result_base_dir) gxu._temp_folder_override = os.path.join(cls._result_base_dir, '__tmp__') if os.path.exists(gxu._temp_folder_override): shutil.rmtree(gxu._temp_folder_override) os.makedirs(gxu._temp_folder_override, exist_ok=True) gxu._uuid_callable = cls._cls_uuid if not _dev_test_run: # This ensures clean global and other settings for consistent test runs _, user_dir, _ = gxapi.GXContext.get_key_based_product_dirs(per_user_key=per_user_key) if os.path.exists(user_dir): shutil.rmtree(user_dir) os.makedirs(user_dir, exist_ok=True) cls._gx = gx.GXpy(name=context_name, log=print, res_stack=res_stack, max_warnings=12, suppress_progress=True, parent_window=parent_window, per_user_key=per_user_key) @classmethod def tearDownGXPYTest(cls): cls._gx.__del__() cls._gx = None gxu._temp_folder_override = None gxu._uuid_callable = None cls._test_case_py = None cls._test_case_filename = None cls._result_base_dir = None cls._cls_unique_id_count = 0 _verify_no_gx_context() @classmethod def setUpClass(cls, res_stack=6): if cls is GXPYTest: raise unittest.SkipTest("Skip GXPYTest tests, it's a base class") cls.setUpGXPYTest() @classmethod def tearDownClass(cls): cls.tearDownGXPYTest() def _uuid(self): self._unique_id_count = self._unique_id_count + 1 return 'uuid_{}_{}'.format(self._func, self._unique_id_count) def start(self): self._func = self.id().split('.')[-1] gx.gx().log('\n' + self._func) self._result_dir = os.path.join(self._result_base_dir, self._func) result_run_dir = os.path.join(self._result_dir, 'result') if os.path.exists(result_run_dir): shutil.rmtree(result_run_dir) gxu._uuid_callable = self._uuid self._unique_id_count = 0 @property def gx(self): return self.__class__._gx @property def result_dir(self): return self._result_dir @result_dir.setter def result_dir(self, value): self._result_dir = value @classmethod def _remove_time_chunk_from_png(cls, png_file): file_length = os.stat(png_file).st_size with open(png_file, 'rb') as f: bytes = f.read(file_length) with open(png_file, 'wb') as f: f.write(bytes[:8]) pos = 8 while pos < len(bytes): buf_length = bytes[pos:pos + 4] length = buf_length[0] * 256 * 256 * 256 + \ buf_length[1] * 256 * 256 + \ buf_length[2] * 256 + buf_length[3] buf_type = bytes[pos + 4:pos + 12] chunk_type = buf_type.decode('ascii', 'ignore') if not (chunk_type.startswith('tIME') or chunk_type.startswith('tEXtdate')): f.write(bytes[pos:pos + length + 12]) pos = pos + length + 12 def _map_to_results(self, map_file, xml_file, image_file, map_result, format, pix_width): m = gxapi.GXMAP.create(map_file, gxapi.MAP_WRITEOLD) m_res = gxapi.GXMAP.create(map_result, gxapi.MAP_WRITENEW) m.dup_map(m_res, gxapi.DUPMAP_COPY) #m_res.pack_files() m_res = None os.remove(map_result + '.xml') m.export_all_raster(image_file, '', pix_width, 0, gxapi.rDUMMY, gxapi.MAP_EXPORT_BITS_24, gxapi.MAP_EXPORT_METHOD_NONE, format, '') if format == 'PNG': GXPYTest._remove_time_chunk_from_png(image_file) crc = gxapi.int_ref() m.crc_map(crc, xml_file) m = None try: os.remove(image_file + '.gi') os.remove(image_file + '.xml') except FileNotFoundError: pass @classmethod def _agnosticize_and_ensure_consistent_line_endings(cls, xml_file, file_name_part, alt_crc_name): with open(xml_file) as f: lines = f.read().splitlines() with open(xml_file, 'wb') as f: for line in lines: line = line.replace(file_name_part, alt_crc_name) f.write('{}\r\n'.format(line).encode('UTF-8')) def crc_map(self, map_file, *, format='PNG', pix_width=2048, alt_crc_name=None): """ Run Geosoft crc testing protocol on Geosoft maps. :param pix_width: pixel width, increase if achieve higher fidelity in the bitmap test :param alt_crc_name: test name. The default is the name of the calling function. The name must be unique within this test suite, which it will be if there is only one test per test function. If you have more than one test in a single testing function use this parameter to create unique names. """ result_dir = os.path.join(self.result_dir, 'result') if _external_result_base_dir is None else self.result_dir if not os.path.exists(result_dir): os.makedirs(result_dir) master_dir = os.path.join(self.result_dir, 'master') if not os.path.exists(master_dir): os.makedirs(master_dir) file_part = os.path.split(map_file)[1] image_result_file = os.path.join(result_dir, "{}.png".format(file_part)) xml_result_file = os.path.join(result_dir, "{}.xml".format(file_part)) map_result_file = os.path.join(result_dir, "{}".format(file_part)) self._map_to_results(map_file, xml_result_file, image_result_file, map_result_file, format, pix_width) xml_result_file_catalog = xml_result_file + '.catalog.xml' if os.path.exists(xml_result_file_catalog): os.remove(xml_result_file_catalog) file_name_part = file_part.split('.')[0] if alt_crc_name is None: alt_crc_name = gxsys.func_name(1) result_files = glob.glob(xml_result_file + '*') for result in result_files: self._agnosticize_and_ensure_consistent_line_endings(result, file_name_part, alt_crc_name) if alt_crc_name: alt_file_part = file_part.replace(file_name_part, alt_crc_name) alt_image_result_file = os.path.join(result_dir, "{}.png".format(alt_file_part)) alt_xml_result_file = os.path.join(result_dir, "{}.xml".format(alt_file_part)) alt_map_result_file = os.path.join(result_dir, "{}".format(alt_file_part)) shutil.move(image_result_file, alt_image_result_file) shutil.move(map_result_file, alt_map_result_file) result_files = glob.glob(xml_result_file + '*') for result in result_files: result_file_part = os.path.split(result)[1] alt_result = os.path.join(result_dir, result_file_part.replace(file_name_part, alt_crc_name)) shutil.move(result, alt_result) image_result_file = alt_image_result_file map_result_file = alt_map_result_file xml_result_file = alt_xml_result_file image_master_file = os.path.join(master_dir, "{}.png".format(alt_file_part)) map_master_file = os.path.join(master_dir, "{}".format(alt_file_part)) xml_master_file = os.path.join(master_dir, "{}.xml".format(alt_file_part)) else: image_master_file = os.path.join(master_dir, "{}.png".format(file_part)) map_master_file = os.path.join(master_dir, "{}".format(file_part)) xml_master_file = os.path.join(master_dir, "{}.xml".format(file_part)) if _external_result_base_dir is not None: return xml_result_part = os.path.join('result', os.path.split(xml_result_file)[1]) xml_master_part = os.path.join('master', os.path.split(xml_master_file)[1]) xml_result_files = glob.glob(map_result_file + '*') xml_master_files = glob.glob(map_master_file + '*') if SHOW_TEST_VIEWERS: gxvwr.view_document(map_file, env={'GEOSOFT_FORCE_MESA_3D': '0'}) @classmethod def pause(cls): if not _prevent_interactive: print("\n\nHit Return key to continue...") input() def assertSpatialRefWKT(self, expected_wkt, spatial_ref): wkt_actual = spatial_ref.exportToString() if spatial_ref is not None else "[*unknown]" # strip precision strings expected_wkt = expected_wkt[:expected_wkt.rindex(']') + 1] wkt_actual = wkt_actual[:wkt_actual.rindex(']')+1] self.assertEqual(expected_wkt, wkt_actual) def npAssertAlmostEqual(self, expected, actual, decimal=7, err_msg='', verbose=True): np.testing.assert_almost_equal(actual, expected, decimal, err_msg, verbose) def npAssertEqual(self, expected, actual, err_msg='', verbose=True): np.testing.assert_equal(actual, expected, err_msg, verbose) <file_sep>/docs/GXSTRINGS.rst .. _GXSTRINGS: GXSTRINGS class ================================== .. autoclass:: geosoft.gxapi.GXSTRINGS :members: <file_sep>/docs/GXMVG.rst .. _GXMVG: GXMVG class ================================== .. autoclass:: geosoft.gxapi.GXMVG :members: .. _MVG_DRAW: MVG_DRAW constants ----------------------------------------------------------------------- `GXMVG <geosoft.gxapi.GXMVG>` draw define .. autodata:: geosoft.gxapi.MVG_DRAW_POLYLINE :annotation: .. autoattribute:: geosoft.gxapi.MVG_DRAW_POLYLINE .. autodata:: geosoft.gxapi.MVG_DRAW_POLYGON :annotation: .. autoattribute:: geosoft.gxapi.MVG_DRAW_POLYGON .. _MVG_GRID: MVG_GRID constants ----------------------------------------------------------------------- `GXMVG <geosoft.gxapi.GXMVG>` grid define .. autodata:: geosoft.gxapi.MVG_GRID_DOT :annotation: .. autoattribute:: geosoft.gxapi.MVG_GRID_DOT .. autodata:: geosoft.gxapi.MVG_GRID_LINE :annotation: .. autoattribute:: geosoft.gxapi.MVG_GRID_LINE .. autodata:: geosoft.gxapi.MVG_GRID_CROSS :annotation: .. autoattribute:: geosoft.gxapi.MVG_GRID_CROSS .. _MVG_LABEL_BOUND: MVG_LABEL_BOUND constants ----------------------------------------------------------------------- `GXMVG <geosoft.gxapi.GXMVG>` label bound define .. autodata:: geosoft.gxapi.MVG_LABEL_BOUND_NO :annotation: .. autoattribute:: geosoft.gxapi.MVG_LABEL_BOUND_NO .. autodata:: geosoft.gxapi.MVG_LABEL_BOUND_YES :annotation: .. autoattribute:: geosoft.gxapi.MVG_LABEL_BOUND_YES .. _MVG_LABEL_JUST: MVG_LABEL_JUST constants ----------------------------------------------------------------------- `GXMVG <geosoft.gxapi.GXMVG>` label justification define .. autodata:: geosoft.gxapi.MVG_LABEL_JUST_TOP :annotation: .. autoattribute:: geosoft.gxapi.MVG_LABEL_JUST_TOP .. autodata:: geosoft.gxapi.MVG_LABEL_JUST_BOTTOM :annotation: .. autoattribute:: geosoft.gxapi.MVG_LABEL_JUST_BOTTOM .. autodata:: geosoft.gxapi.MVG_LABEL_JUST_LEFT :annotation: .. autoattribute:: geosoft.gxapi.MVG_LABEL_JUST_LEFT .. autodata:: geosoft.gxapi.MVG_LABEL_JUST_RIGHT :annotation: .. autoattribute:: geosoft.gxapi.MVG_LABEL_JUST_RIGHT .. _MVG_LABEL_ORIENT: MVG_LABEL_ORIENT constants ----------------------------------------------------------------------- `GXMVG <geosoft.gxapi.GXMVG>` label orientation .. autodata:: geosoft.gxapi.MVG_LABEL_ORIENT_HORIZONTAL :annotation: .. autoattribute:: geosoft.gxapi.MVG_LABEL_ORIENT_HORIZONTAL .. autodata:: geosoft.gxapi.MVG_LABEL_ORIENT_TOP_RIGHT :annotation: .. autoattribute:: geosoft.gxapi.MVG_LABEL_ORIENT_TOP_RIGHT .. autodata:: geosoft.gxapi.MVG_LABEL_ORIENT_TOP_LEFT :annotation: .. autoattribute:: geosoft.gxapi.MVG_LABEL_ORIENT_TOP_LEFT .. _MVG_SCALE: MVG_SCALE constants ----------------------------------------------------------------------- `GXMVG <geosoft.gxapi.GXMVG>` scale define .. autodata:: geosoft.gxapi.MVG_SCALE_LINEAR :annotation: .. autoattribute:: geosoft.gxapi.MVG_SCALE_LINEAR .. autodata:: geosoft.gxapi.MVG_SCALE_LOG :annotation: .. autoattribute:: geosoft.gxapi.MVG_SCALE_LOG .. autodata:: geosoft.gxapi.MVG_SCALE_LOGLINEAR :annotation: .. autoattribute:: geosoft.gxapi.MVG_SCALE_LOGLINEAR .. _MVG_WRAP: MVG_WRAP constants ----------------------------------------------------------------------- `GXMVG <geosoft.gxapi.GXMVG>` wrap define .. autodata:: geosoft.gxapi.MVG_WRAP_NO :annotation: .. autoattribute:: geosoft.gxapi.MVG_WRAP_NO .. autodata:: geosoft.gxapi.MVG_WRAP_YES :annotation: .. autoattribute:: geosoft.gxapi.MVG_WRAP_YES <file_sep>/geosoft/gxpy/tests/test_system.py """ @author: Ian """ import unittest import os import time import numpy as np import geosoft import geosoft.gxapi as gxapi import geosoft.gxpy.system as gsys from base import GXPYTest class Test(GXPYTest): @classmethod def tf(cls, f): return os.path.join(os.path.dirname(cls._test_case_py), f) def test_system(self): self.start() self.assertEqual(gsys.__version__, geosoft.__version__) def test_call_location(self): self.start() f = gsys.call_location() p = gsys.call_location(1) self.assertIn("test_call_location", f) self.assertIn("test_system.py", f) self.assertIn("line 28", f) self.assertIn("run", p) def test_func(self): self.start() f = gsys.func_name() p = gsys.func_name(2) # This may change with Python upgrades if the unittest.case code changes self.assertEqual(f, "test_func") self.assertEqual(p, "run") def test_statics(self): self.start() app = gsys.app_name() self.assertTrue(len(app) > 0) func = gsys.func_name() self.assertEqual(func, 'test_statics') def test_unzip(self): self.start() folder, files = gsys.unzip(Test.tf('little.zip'), Test.tf('_test'), checkready=1) self.assertEqual(len(files), 3) self.assertTrue(os.path.isfile(folder + '\\little.png')) self.assertTrue(os.path.isfile(folder + '\\little - Copy.png')) self.assertTrue(os.path.isfile(folder + '\\little - Copy (2).png')) gsys.remove_dir(folder, tries=0) def test_task_range(self): self.start() nrecords = 200000000 nfields = 1 bufsize = 10000000 # this needs to be big enough to properly test the parallel implementation data = np.arange(nrecords*nfields*3).reshape(nrecords, nfields, 3) def get_record_count(): return data.shape[0] def read_records(i,j): return data[i:(i+j), :, :] def validate(range_min,range_max): self.assertEqual(range_min[0],data[0,0,0]) self.assertEqual(range_min[1],data[0,0,1]) self.assertEqual(range_min[2],data[0,0,2]) self.assertEqual(range_max[0],data[-1,0,0]) self.assertEqual(range_max[1],data[-1,0,1]) self.assertEqual(range_max[2],data[-1,0,2]) def reference_range(): # Roger's reference implementation record_count = get_record_count() if record_count < 1: return #first_point = kv.Project.read_records(src_name, 0, 1) first_point = read_records(0, 1) range_min = [first_point[0][0][0], first_point[0][0][1], first_point[0][0][2]] range_max = range_min[:] record_offset = 1 buf_size = min(bufsize, record_count) while record_offset < record_count: records_to_read = min(buf_size, record_count - record_offset) # buf = kv.Project.read_records(src_name, record_offset, records_to_read) buf = read_records(record_offset, records_to_read) for i in range(records_to_read): # kv.Thread.task_progress(record_offset / record_count * 100) record_offset += 1 # if kv.Thread.is_cancelling(): # return range_min[0] = min(buf[i][0][0], range_min[0]) range_min[1] = min(buf[i][0][1], range_min[1]) range_min[2] = min(buf[i][0][2], range_min[2]) range_max[0] = max(buf[i][0][0], range_max[0]) range_max[1] = max(buf[i][0][1], range_max[1]) range_max[2] = max(buf[i][0][2], range_max[2]) return range_min, range_max def numpy_range(): # Minimum change numpy implementation, only replace inner loop record_count = get_record_count() if record_count < 1: return first_point = read_records(0, 1) range_min = [first_point[0][0][0], first_point[0][0][1], first_point[0][0][2]] range_max = range_min[:] record_offset = 1 buf_size = min(bufsize, record_count) while record_offset < record_count: records_to_read = min(buf_size, record_count - record_offset) buf = read_records(record_offset, records_to_read) bx = buf[:,0,0] by = buf[:,0,1] bz = buf[:,0,2] range_min = [min(range_min[0], np.nanmin(bx)), min(range_min[1], np.nanmin(by)), min(range_min[2], np.nanmin(bz))] range_max = [max(range_max[0], np.nanmax(bx)), max(range_max[1], np.nanmax(by)), max(range_max[2], np.nanmax(bz))] record_offset += buf.shape[0] return range_min, range_max def numpy_parallel_range(): # parallel implementation record_count = get_record_count() if record_count < 1: return first_point = read_records(0, 1) range_min = [first_point[0][0][0], first_point[0][0][1], first_point[0][0][2]] range_max = range_min[:] record_offset = 1 buf_size = min(bufsize, record_count) while record_offset < record_count: records_to_read = min(buf_size, record_count - record_offset) buf = read_records(record_offset, records_to_read) # arrange the problem for three parallel threads parallel = [(range_min[0], range_max[0], buf[:,0,0]), (range_min[1], range_max[1], buf[:,0,1]), (range_min[2], range_max[2], buf[:,0,2])] # run in parallel results = gsys.parallel_map(lambda a: (min(a[0], np.nanmin(a[2])), max(a[1], np.nanmax(a[2]))), parallel) # update the ranges from the results, which are in a list of (min,max) zip1,zip2 = zip(*results) range_min = list(zip1) range_max = list(zip2) record_offset += buf.shape[0] return range_min, range_max def time_test(func, reference_time=None): start = time.time() range_min, range_max = func() end = time.time() # make sure result is correct validate(range_min,range_max) elapsed = end-start if reference_time is None: speed_improvement = 1 else: speed_improvement = reference_time / elapsed geosoft.gxpy.gx.gx().log("{}: time: {} seconds, {} times faster than reference".format(func.__name__, elapsed, speed_improvement)) return elapsed #ref = time_test(reference_range) ref = 0 ref = time_test(numpy_range, ref) time_test(numpy_parallel_range, ref) # testing documented parallel example data = [(1+i, 2+i) for i in range(20)] geosoft.gxpy.gx.gx().log('parallel:',gsys.parallel_map(lambda ab: ab[0] + ab[1], data)) ############################################################################################### if __name__ == '__main__': unittest.main() <file_sep>/examples/stand-alone/authenticate.py import geosoft.gxapi as gxapi ctx = gxapi.GXContext.create('ian', '0.0', 0, 0) user = gxapi.str_ref() company = gxapi.str_ref() gxapi.GXSYS.get_licensed_user(user, company) try: # this function requires a licence gxapi.GXST2.create() print(user.value, 'is licenced.') except gxapi.GXAPIError: print(user.value, 'is NOT licenced.') <file_sep>/geosoft/gxpy/tests/test_vv.py import numpy as np import os import unittest import geosoft import geosoft.gxpy import geosoft.gxapi as gxapi import geosoft.gxpy.vv as gxvv import geosoft.gxpy.utility as gxu from base import GXPYTest class Test(GXPYTest): def test_vv(self): self.start() self.assertEqual(gxvv.__version__, geosoft.__version__) with gxvv.GXvv(dtype=np.float) as vv: self.assertTrue(vv.gxvv, gxapi.GXVV) self.assertEqual(vv.fid, (0.0, 1.0)) fid = (10.1,0.99) with gxvv.GXvv(dtype=np.float, fid=fid) as vv: self.assertTrue(vv.is_float) self.assertFalse(vv.is_int) self.assertFalse(vv.is_string) self.assertEqual(vv.fid, fid) self.assertEqual(vv.length, 0) fid = (-45,7) vv.fid = fid self.assertEqual(vv.fid, fid) vv.refid((-40, 8), 4) self.assertEqual(vv.fid, (-40,8)) self.assertEqual(vv.length, 4) self.assertEqual(len(vv.np), 4) vv.length = 16 self.assertEqual(vv.fid, (-40,8)) self.assertEqual(vv.length, 16) self.assertEqual(len(vv.np), 16) self.assertEqual(vv.dim, 1) vv.refid((-38, 1.5)) self.assertEqual(vv.fid, (-38, 1.5)) self.assertEqual(vv.length, 80) vv.refid((1000, 50)) self.assertEqual(vv.fid, (1000., 50.)) self.assertEqual(vv.length, 0) self.assertRaises(gxvv.VVException, vv.refid, (1, 0)) self.assertRaises(gxvv.VVException, vv.refid, (1, -1)) with gxvv.GXvv([1, 2, 3, 4, 5, 6]) as vv: self.assertTrue(vv.is_int) self.assertFalse(vv.is_float) self.assertFalse(vv.is_string) self.assertEqual(vv.fid, (0.0, 1.0)) self.assertEqual(vv.length, 6) self.assertEqual(vv.dtype, np.int32) self.assertEqual(vv.gxtype, gxu.gx_dtype(np.int32)) with gxvv.GXvv([1, 2, 3, 4, 5, 6], dtype=np.int64) as vv: self.assertEqual(vv.fid, (0.0, 1.0)) self.assertEqual(vv.length, 6) self.assertEqual(vv.dtype, np.int64) self.assertEqual(vv.gxtype, gxu.gx_dtype(np.int64)) def test_init_len(self): self.start() with gxvv.GXvv(dtype=np.float, len=10) as vv: self.assertEqual(vv.length, 10) def test_np(self): self.start() fid = (99,0.1) npdata = np.array([1,2,3,4,5,6,7]) with gxvv.GXvv(npdata, fid=fid) as vv: self.assertEqual(vv.fid, fid) self.assertEqual(vv.length, len(npdata)) np2 = vv.get_data(vv.dtype) self.assertEqual(np2[0].shape,(7,)) np2,fid2 = vv.get_data(vv.dtype,start=1) self.assertEqual(fid2,(99.1,.1)) self.assertEqual(np2.shape,(6,)) self.assertEqual(vv.get_data(vv.dtype,start=6)[0].shape,(1,)) self.assertRaises(gxvv.VVException, vv.get_data, vv.dtype, start=7) np3,fid3 = vv.get_data(np.int) self.assertEqual(fid3,fid) self.assertEqual(np3[0], 1) self.assertEqual(np3[6], 7) self.assertEqual(float(vv.np[6]), 7.0) self.assertEqual(int(vv.np[6]), 7) self.assertEqual(str(vv.np[6]), "7") npdata = np.array([1,2,3,4,5,6,7],dtype=np.int) with gxvv.GXvv(npdata, fid=fid) as vv: np3= vv.get_data(dtype=np.int64) self.assertEqual(np3[0][0],1) self.assertEqual(np3[0][6],7) np3 = vv.get_data(np.float) self.assertEqual(np3[0][0],1.) self.assertEqual(np3[0][6],7.) vv.set_data(np.array([4,5,6,7], dtype=np.int)) np3,fid = vv.get_data(dtype=np.int64) self.assertEqual(len(np3), 4) self.assertEqual(np3[0], 4) self.assertEqual(np3[3], 7) np3,fid = vv.get_data(np.float) self.assertEqual(np3[0], 4.) self.assertEqual(np3[3], 7.) npdata = np.array(['4', '5', '6', '7']) with gxvv.GXvv(npdata, fid=fid) as vv3: np3, fid = vv3.get_data() self.assertEqual(len(np3), 4) self.assertEqual(np3[0], '4') self.assertEqual(np3[3], '7') np3, fid = vv3.get_data(np.float) self.assertEqual(np3[0], 4.) self.assertEqual(np3[3], 7.) npdata = np.array(['4000', '50', '60', '-70']) with gxvv.GXvv(npdata, fid=fid) as vv3: self.assertTrue(vv3.is_string) self.assertFalse(vv3.is_int) self.assertFalse(vv3.is_float) npf = vv3.np self.assertEqual(len(npf), 4) self.assertEqual(npf[0], '4000') self.assertEqual(npf[3], '-70') npf, fid = vv3.get_data(np.float) self.assertEqual(npf[0], 4000.) self.assertEqual(npf[3], -70.) self.assertEqual(vv3.min_max(), (-70., 4000.)) with gxvv.GXvv(['maki', 'bob']) as vv: self.assertEqual(vv.min_max(), (None, None)) self.assertEqual(gxvv.GXvv(['*', 1]).min_max(), (1., 1.)) def test_strings(self): self.start() fidvv = (99,0.1) npdata = np.array(["name", "maki", "neil", "rider"]) with gxvv.GXvv(npdata, fid=fidvv) as vv: self.assertTrue(vv.is_string) self.assertFalse(vv.is_int) self.assertFalse(vv.is_float) self.assertEqual(vv.fid,fidvv) self.assertEqual(vv.length,len(npdata)) self.assertEqual(vv.gxtype,-20) self.assertTrue(vv.dtype.type is np.str_) self.assertEqual(str(vv.dtype),'<U5') npd,fid = vv.get_data(vv.dtype) self.assertEqual(npd[0],"name") self.assertEqual(npd[1],"maki") self.assertEqual(npd[2],"neil") self.assertEqual(npd[3],"rider") npd,fid = vv.get_data(vv.dtype,start=2,n=2) self.assertEqual(npd[0],"neil") self.assertEqual(npd[1],"rider") self.assertEqual(fid,(99.2,0.1)) vv.fill(2.5) self.assertEqual(vv[0][0], '2.5') self.assertEqual(vv[len(vv) - 1][0], '2.5') npdata = np.array(["1","2","3","4000","*"]) with gxvv.GXvv(npdata, fid=fid) as vv: npd,fid = vv.get_data(np.float) self.assertEqual(npd[0],1.0) self.assertEqual(npd[1],2.0) self.assertEqual(npd[2],3.0) self.assertEqual(npd[3],4000.0) self.assertTrue(np.isnan(npd[4])) npdata = np.array(["1","2","3","4000","40000","*"]) with gxvv.GXvv(npdata, fid=fid) as vv: npd,fid = vv.get_data(np.int) self.assertEqual(npd[0],1) self.assertEqual(npd[1],2) self.assertEqual(npd[2],3) self.assertEqual(npd[3],4000) self.assertEqual(npd[4],40000) self.assertEqual(npd[5],gxapi.iDUMMY) with gxvv.GXvv(npdata, fid=fid, dtype=np.float) as vv: npd, fid = vv.get_data(np.int) self.assertEqual(npd[0],1) self.assertEqual(npd[1],2) self.assertEqual(npd[2],3) self.assertEqual(npd[3],4000) self.assertEqual(npd[4],40000) self.assertEqual(npd[5], gxapi.iDUMMY) npd, fid = vv.get_data(np.int32) self.assertEqual(npd[5], gxapi.GS_S4DM) npd, fid = vv.get_data(np.int64) self.assertEqual(npd[5], gxapi.GS_S8DM) npd, fid = vv.get_data(np.float) self.assertTrue(np.isnan(npd[5])) npd, fid = vv.get_data(np.float32) self.assertTrue(np.isnan(npd[5])) npd, fid = vv.get_data(np.float64) self.assertTrue(np.isnan(npd[5])) with gxvv.GXvv(npdata, fid=fid, dtype=np.float32) as vv: npd, fid = vv.get_data(np.float) self.assertTrue(np.isnan(npd[5])) npd, fid = vv.get_data(np.float32) self.assertTrue(np.isnan(npd[5])) npd, fid = vv.get_data(np.float64) self.assertTrue(np.isnan(npd[5])) with gxvv.GXvv(npdata, fid=fid, dtype=np.int64) as vv: npd, fid = vv.get_data(np.float) self.assertTrue(np.isnan(npd[5])) npdata = np.array(["1","2","3","4000","40000","*"]) with gxvv.GXvv(npdata, fid=fid) as vv: npd,fid = vv.get_data(np.int,start=2, n=3) self.assertEqual(npd[0],3) self.assertEqual(npd[1],4000) self.assertEqual(npd[2],40000) npdata = np.array(["make_it_big enough"]) with gxvv.GXvv(npdata, fid=fid) as vv: npd, fid = vv.get_data(np.int, start=0, n=1) self.assertEqual(npd[0], gxapi.iDUMMY) npdata = np.array([1.,2.,-30.,-87.66662]) vv.set_data(npdata) npd, fid = vv.get_data(start=0, n=4) self.assertEqual(npd[0], "1.0") self.assertEqual(npd[2], "-30.0") self.assertEqual(npd[3], "-87.66662") npdata = np.array([1, 2, 3]) with gxvv.GXvv(npdata, fid=fid) as vv: npd = vv.np self.assertEqual(npd[1], 2) npdata = np.array([1.,2.,-30.,-87.66662]) vv.set_data(npdata) npd, fid = vv.get_data(start=0, n=4) self.assertEqual(npd[0], 1) self.assertEqual(npd[2], -30) self.assertEqual(npd[3], -88) def test_string(self): self.start() l = [1, 2, 3] with gxvv.GXvv(l, dtype='U2') as vv: self.assertEqual(list(vv.np), ['1', '2', '3']) # Since we are using UTF-8 internally characters can take anywhere between 1 and 4 bytes. # Specifying a numpy dtype to instantiate VV will ensure the internal space is enough to allocate up to # that 4 times the Unicode characters, however any Numpy arrays will limit the characters to the passed dtype. l = [1, 2, "abcdefghijklmnopqrstuvxyz"] with gxvv.GXvv(l, dtype='U4') as vv: self.assertEqual(list(vv.np), ['1', '2', 'abcd']) # The following 4-byte UTF-8 characters can be correctly extracted (to limits of what is specified). # Characters from http://www.i18nguy.com/unicode/supplementary-test.html l = [1, 2, "𠜎𠜱𠝹𠱓𠱸𠲖"] with gxvv.GXvv(l, dtype='U4') as vv: self.assertEqual(list(vv.np), ['1', '2', '𠜎𠜱𠝹𠱓']) with gxvv.GXvv(l, dtype='U2') as vv: self.assertEqual(list(vv.np), ['1', '2', '𠜎𠜱']) def test_iterator(self): self.start() with gxvv.GXvv(range(1000)) as vv: l2 = [v for v in vv] self.assertEqual(len(l2), 1000) self.assertEqual(l2[0], (0., 0)) self.assertEqual(l2[999], (999., 999)) vvlist = vv.list() self.assertEqual(len(vvlist), 1000) self.assertEqual(vvlist[0], 0.0) self.assertEqual(vvlist[999], 999.) def test_uom(self): self.start() with gxvv.GXvv(range(1000), unit_of_measure='maki') as vv: self.assertEqual(vv.unit_of_measure, 'maki') vv.unit_of_measure = 'nT' self.assertEqual(vv.unit_of_measure, 'nT') def test_dim(self): self.start() data = np.array(range(1000), dtype=np.float64).reshape((500, 2)) with gxvv.GXvv(data) as vv: self.assertEqual(vv.dim, 2) self.assertEqual(vv.np.shape, (500, 2)) self.assertEqual(tuple(vv.np[499,:]), (998., 999.)) data = np.array(range(300), dtype=np.float64).reshape((100, 3)) with gxvv.GXvv(data) as vv: self.assertEqual(vv.dim, 3) self.assertEqual(vv.np.shape, (100, 3)) self.assertEqual(tuple(vv.np[99,:]), (297., 298., 299.)) with gxvv.GXvv(dim=3) as vv: vv.set_data(np.array(range(300), dtype=np.float64)) self.assertEqual(vv.dim, 3) self.assertEqual(vv.np.shape, (100, 3)) self.assertEqual(tuple(vv.np[99, :]), (297., 298., 299.)) with gxvv.GXvv(dim=3) as vv: data = np.array(range(300), dtype=np.float64) data[1] = data[299] = np.nan vv.set_data(data) self.assertTrue(np.isnan(data[299])) self.assertEqual(vv.dim, 3) self.assertEqual(vv.np.shape, (100, 3)) self.assertEqual(tuple(vv.np[99, :2]), (297., 298.)) self.assertTrue(np.isnan(vv.np[99, 2])) def test_vvset(self): self.start() npd = np.array(range(12), dtype=np.float64).reshape(-1, 2) vv = gxvv.vvset_from_np(npd) self.assertEqual(len(vv), npd.shape[1]) self.assertEqual(len(vv[0]), npd.shape[0]) vv = gxvv.vvset_from_np(npd, axis=0) self.assertEqual(len(vv), npd.shape[0]) self.assertEqual(len(vv[0]), npd.shape[1]) npd = np.array(range(48), dtype=np.float64).reshape(6, 2, 4) vv = gxvv.vvset_from_np(npd) self.assertEqual(len(vv), 4) self.assertEqual(len(vv[0]), 12) vv = gxvv.vvset_from_np(npd, axis=0) self.assertEqual(len(vv), 12) self.assertEqual(len(vv[0]), 4) npd = gxvv.np_from_vvset(vv) self.assertEqual(npd.shape, (4, 12)) npd = gxvv.np_from_vvset(vv, axis=0) self.assertEqual(npd.shape, (12, 4)) def test_vv_creation(self): self.start() vv = gxvv.GXvv(range(6), fid=(-0.5, 2), dtype=np.float32, unit_of_measure='maki') vv2 = gxvv.GXvv(vv) self.assertTrue(vv == vv2) self.assertEqual(tuple(vv2.np), tuple(vv.np)) self.assertEqual(vv.fid, vv2.fid) self.assertEqual(vv.dim, vv2.dim) self.assertEqual(vv.unit_of_measure, vv2.unit_of_measure) self.assertEqual(vv.dtype, vv2.dtype) vv.fid = (0., 1.) self.assertFalse(vv == vv2) vv2 = gxvv.GXvv(vv, unit_of_measure='maki2') self.assertFalse(vv == vv2) vv2 = gxvv.GXvv(vv, dim=1) self.assertTrue(vv == vv2) vv2 = gxvv.GXvv(vv, dim=2) self.assertFalse(vv == vv2) vv2 = gxvv.GXvv(vv, dim=3) self.assertFalse(vv == vv2) vv3 = gxvv.GXvv(vv2, dim=1) self.assertTrue(vv == vv3) def test_empty(self): self.start() empty = np.array([]) vv = gxvv.GXvv(empty) self.assertEqual(len(vv), 0) self.assertEqual(vv.np.size, 0) ############################################################################################## if __name__ == '__main__': unittest.main()<file_sep>/docs/GXDAT.rst .. _GXDAT: GXDAT class ================================== .. autoclass:: geosoft.gxapi.GXDAT :members: .. _DAT_FILE: DAT_FILE constants ----------------------------------------------------------------------- Type of grid .. autodata:: geosoft.gxapi.DAT_FILE_GRID :annotation: .. autoattribute:: geosoft.gxapi.DAT_FILE_GRID .. autodata:: geosoft.gxapi.DAT_FILE_IMAGE :annotation: .. autoattribute:: geosoft.gxapi.DAT_FILE_IMAGE .. _DAT_FILE_FORM: DAT_FILE_FORM constants ----------------------------------------------------------------------- Type of form .. autodata:: geosoft.gxapi.DAT_FILE_FORM_OPEN :annotation: .. autoattribute:: geosoft.gxapi.DAT_FILE_FORM_OPEN .. autodata:: geosoft.gxapi.DAT_FILE_FORM_SAVE :annotation: .. autoattribute:: geosoft.gxapi.DAT_FILE_FORM_SAVE .. _DAT_XGD: DAT_XGD constants ----------------------------------------------------------------------- `GXDAT <geosoft.gxapi.GXDAT>` Open modes .. autodata:: geosoft.gxapi.DAT_XGD_READ :annotation: .. autoattribute:: geosoft.gxapi.DAT_XGD_READ .. autodata:: geosoft.gxapi.DAT_XGD_NEW :annotation: .. autoattribute:: geosoft.gxapi.DAT_XGD_NEW .. autodata:: geosoft.gxapi.DAT_XGD_WRITE :annotation: .. autoattribute:: geosoft.gxapi.DAT_XGD_WRITE <file_sep>/geosoft/gxapi/GXARCMAP.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXARCMAP(gxapi_cy.WrapARCMAP): """ GXARCMAP class. This library is not a class. It contains various utilities used in maps and layers by the Geosoft extensions for ArcGIS. """ def __init__(self, handle=0): super(GXARCMAP, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXARCMAP <geosoft.gxapi.GXARCMAP>` :returns: A null `GXARCMAP <geosoft.gxapi.GXARCMAP>` :rtype: GXARCMAP """ return GXARCMAP() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def change_size(cls, x, y): """ Changes the custom page size of the ArcGIS Map document. :param x: X Size (mm) :param y: Y Size (mm) :type x: float :type y: float .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapARCMAP._change_size(GXContext._get_tls_geo(), x, y) @classmethod def display_in_3d_view(cls, file): """ Display a file in 3D view :param file: File Name :type file: str .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapARCMAP._display_in_3d_view(GXContext._get_tls_geo(), file.encode()) @classmethod def export_feature_layer_by_name_to_3d_file(cls, mxd_file, dataframe_name, layer_name, output_file): """ Exports the shapes from a feature layer of the ArcMap document to a 3D File. :param mxd_file: `GXMXD <geosoft.gxapi.GXMXD>` filename :param dataframe_name: Dataframe name :param layer_name: Layer name :param output_file: Output file name :type mxd_file: str :type dataframe_name: str :type layer_name: str :type output_file: str .. versionadded:: 8.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapARCMAP._export_feature_layer_by_name_to_3d_file(GXContext._get_tls_geo(), mxd_file.encode(), dataframe_name.encode(), layer_name.encode(), output_file.encode()) @classmethod def export_selected_feature_layer_to_3d_file(cls, output_file): """ Exports the shapes from the currently selected feature layer (if any) in ArcMap to a 3D file (only on oriented frames i.e. sections). :param output_file: Output file name :type output_file: str .. versionadded:: 8.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapARCMAP._export_selected_feature_layer_to_3d_file(GXContext._get_tls_geo(), output_file.encode()) @classmethod def get_current_document_info(cls, mxd, layer, map): """ Get some info on the current `GXMXD <geosoft.gxapi.GXMXD>` in ArcMap and selected layer (if any) :param mxd: `GXMXD <geosoft.gxapi.GXMXD>` filename :param layer: Selected Layer name (If a layer is selected) :param map: Dataframe name containing selected layer (If a layer is selected) :type mxd: str_ref :type layer: str_ref :type map: str_ref .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ mxd.value, layer.value, map.value = gxapi_cy.WrapARCMAP._get_current_document_info(GXContext._get_tls_geo(), mxd.value.encode(), layer.value.encode(), map.value.encode()) @classmethod def get_selected_layer_info(cls, layer_number, layer, map): """ Get the name info on the specified selected layer :param layer_number: Selected layer number :param layer: Selected Layer name :param map: Dataframe name containing selected layer :type layer_number: int :type layer: str_ref :type map: str_ref .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ layer.value, map.value = gxapi_cy.WrapARCMAP._get_selected_layer_info(GXContext._get_tls_geo(), layer_number, layer.value.encode(), map.value.encode()) @classmethod def get_number_of_selected_layers(cls): """ Get the number of selected layers in the TOC Returns The number of layers selected. :rtype: int .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapARCMAP._get_number_of_selected_layers(GXContext._get_tls_geo()) return ret_val @classmethod def load_map(cls, map, extra_csv, layer_tag, flags): """ Loads a Geosoft map into the ArcMap document. :param map: Map File Name :param extra_csv: Optional Extra Datasets CSV Filename (Rasters and shape files to display with layers) :param layer_tag: Optional frame/layer tag (suffix) :param flags: Combination of :ref:`ARCMAP_LOAD_FLAGS` :type map: str :type extra_csv: str :type layer_tag: str :type flags: int :returns: 0 - OK 1 - Error -1 - Canceled :rtype: int .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The extra datasets CSV should contain the the following fields: ID - Unique identifier DATASOURCE - Filename TYPE - RASTER and SHAPE supported MAPMATCH - Map to associate with (used for grouping logic) VIEWMATCH - View to match with in associated map (used for grouping logic) ZONEFILE - Used for type RASTER """ ret_val = gxapi_cy.WrapARCMAP._load_map(GXContext._get_tls_geo(), map.encode(), extra_csv.encode(), layer_tag.encode(), flags) return ret_val @classmethod def load_map_ex(cls, map, view, extra_csv, layer_tag, flags): """ Loads a Geosoft map into the ArcMap document, specifying which View to use as Data view. :param map: Map File Name :param view: View Name :param extra_csv: Optional Extra Datasets CSV Filename (Rasters and shape files to display with layers) :param layer_tag: Optional frame/layer tag (suffix) :param flags: Combination of :ref:`ARCMAP_LOAD_FLAGS` :type map: str :type view: str :type extra_csv: str :type layer_tag: str :type flags: int :returns: 0 - OK 1 - Error -1 - Canceled :rtype: int .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The extra datasets CSV should contain the the following fields: ID - Unique identifier DATASOURCE - Filename TYPE - RASTER and SHAPE supported MAPMATCH - Map to associate with (used for grouping logic) VIEWMATCH - View to match with in associated map (used for grouping logic) ZONEFILE - Used for type RASTER """ ret_val = gxapi_cy.WrapARCMAP._load_map_ex(GXContext._get_tls_geo(), map.encode(), view.encode(), extra_csv.encode(), layer_tag.encode(), flags) return ret_val @classmethod def load_shape(cls, shp, delete_existing): """ Load a shape file into ArcMap. :param shp: Shape file to load :param delete_existing: Delete existing layers? :type shp: str :type delete_existing: int :returns: 0- OK, 1 - Error, -1 - Cancel :rtype: int .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapARCMAP._load_shape(GXContext._get_tls_geo(), shp.encode(), delete_existing) return ret_val @classmethod def load_spf(cls, shp, num_shp): """ Load all the shape files generated by importing a SPF into ArcMap. :param shp: List of shape files to load :param num_shp: Number of shape files :type shp: str :type num_shp: int :returns: 0- OK, 1 - Error, -1 - Cancel :rtype: int .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapARCMAP._load_spf(GXContext._get_tls_geo(), shp.encode(), num_shp) return ret_val @classmethod def load_lyr(cls, file): """ Load a LYR file to the current data frame :param file: File Name :type file: str .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapARCMAP._load_lyr(GXContext._get_tls_geo(), file.encode()) @classmethod def load_map(cls, map, view, extra_csv, layer_tag, fit, activate, prefix): """ Loads a Geosoft map into the current ArcMap document :param map: Map File Name :param view: View Name :param extra_csv: Optional Extra Datasets CSV Filename (Rasters and shape files to display with layers) :param layer_tag: Optional frame/layer tag (suffix) :param fit: Fit to map size :param activate: Activate view (3D) :param prefix: Layer name tag is prefix :type map: str :type view: str :type extra_csv: str :type layer_tag: str :type fit: int :type activate: bool :type prefix: bool .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The extra datasets CSV should contain the the following fields: ID - Unique identifier DATASOURCE - Filename TYPE - RASTER and SHAPE supported MAPMATCH - Map to associate with (used for grouping logic) VIEWMATCH - View to match with in associated map (used for grouping logic) ZONEFILE - Used for type RASTER """ gxapi_cy.WrapARCMAP._load_map(GXContext._get_tls_geo(), map.encode(), view.encode(), extra_csv.encode(), layer_tag.encode(), fit, activate, prefix) @classmethod def load_map_view(cls, map, view, layer, all): """ Load a Geosoft Map as a layer into the current data frame :param map: Map File Name :param view: View Name :param layer: Layer Name :param all: Pass TRUE to also render other views in map (Use second parameter view for location) :type map: str :type view: str :type layer: str :type all: int .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapARCMAP._load_map_view(GXContext._get_tls_geo(), map.encode(), view.encode(), layer.encode(), all) @classmethod def load_raster(cls, file): """ Load a raster file to the current data frame :param file: File Name :type file: str .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Loads any file type recognized as "raster" formats by ARC `GXGIS <geosoft.gxapi.GXGIS>`. This includes geosoft GRD files. """ gxapi_cy.WrapARCMAP._load_raster(GXContext._get_tls_geo(), file.encode()) @classmethod def load_raster_ex(cls, file): """ Load a raster file to the current data frame and create associated files :param file: File Name :type file: str .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Loads any file type recognized as "raster" formats by ARC `GXGIS <geosoft.gxapi.GXGIS>`. This includes geosoft GRD files. """ gxapi_cy.WrapARCMAP._load_raster_ex(GXContext._get_tls_geo(), file.encode()) @classmethod def load_shape(cls, file, layer_prefix, layer_suffix): """ Load a `GXSHP <geosoft.gxapi.GXSHP>` file to the current data frame :param file: File Name :param layer_prefix: Layer Name Prefix: An underscore is added automatically :param layer_suffix: Layer Name Suffix An underscore is added automatically :type file: str :type layer_prefix: str :type layer_suffix: str .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The input layer name is created using the (optional) prefix and suffix as follows: Prefix_NAME_Suffix """ gxapi_cy.WrapARCMAP._load_shape(GXContext._get_tls_geo(), file.encode(), layer_prefix.encode(), layer_suffix.encode()) @classmethod def map_view_to_shape(cls, map, view, shp, lst): """ Create `GXSHP <geosoft.gxapi.GXSHP>` file(s) from a Geosoft Map view. :param map: Map File Name :param view: View Name :param shp: `GXSHP <geosoft.gxapi.GXSHP>` File Name :param lst: List to fill with shape files created :type map: str :type view: str :type shp: str :type lst: GXLST .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The output `GXSHP <geosoft.gxapi.GXSHP>` file name(s) are made up as follows (where NAME is the input `GXSHP <geosoft.gxapi.GXSHP>` file name): NAME_pt.shp (point objects) NAME_ln.shp (line or arc objects) NAME_pg.shp (polygon objects) """ gxapi_cy.WrapARCMAP._map_view_to_shape(GXContext._get_tls_geo(), map.encode(), view.encode(), shp.encode(), lst) @classmethod def query_size(cls, x, y): """ Query the page size in mm of the entire map page. :param x: X Size (mm) :param y: Y Size (mm) :type x: float_ref :type y: float_ref .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ x.value, y.value = gxapi_cy.WrapARCMAP._query_size(GXContext._get_tls_geo(), x.value, y.value) @classmethod def show_layer_by_name_in_3d(cls, mxd_file, dataframe_name, layer_name): """ Shows a layer in ArcMap in a 3D view in an `GXMXD <geosoft.gxapi.GXMXD>` :param mxd_file: `GXMXD <geosoft.gxapi.GXMXD>` filename :param dataframe_name: Dataframe name :param layer_name: Layer name :type mxd_file: str :type dataframe_name: str :type layer_name: str .. versionadded:: 8.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapARCMAP._show_layer_by_name_in_3d(GXContext._get_tls_geo(), mxd_file.encode(), dataframe_name.encode(), layer_name.encode()) @classmethod def show_selected_layers_in_3d(cls): """ Shows the selected layers in ArcMap in a 3D view .. versionadded:: 8.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapARCMAP._show_selected_layers_in_3d(GXContext._get_tls_geo()) @classmethod def get_ipj_for_predefined_esri_gcs(cls, ipj, esri_gcs_code): """ Fills an `GXIPJ <geosoft.gxapi.GXIPJ>` with a predefined ESRI GCS :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` to fill :param esri_gcs_code: Predefined ESRI GCS Code :type ipj: GXIPJ :type esri_gcs_code: int .. versionadded:: 8.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapARCMAP._get_ipj_for_predefined_esri_gcs(GXContext._get_tls_geo(), ipj, esri_gcs_code) @classmethod def get_ipj_for_predefined_esri_pcs(cls, ipj, esri_pcs_code): """ Fills an `GXIPJ <geosoft.gxapi.GXIPJ>` with a predefined ESRI PCS :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` to fill :param esri_pcs_code: Predefined ESRI PCS Code :type ipj: GXIPJ :type esri_pcs_code: int .. versionadded:: 8.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapARCMAP._get_ipj_for_predefined_esri_pcs(GXContext._get_tls_geo(), ipj, esri_pcs_code) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXSTR.rst .. _GXSTR: GXSTR class ================================== .. autoclass:: geosoft.gxapi.GXSTR :members: .. _FILE_EXT: FILE_EXT constants ----------------------------------------------------------------------- Extension option .. autodata:: geosoft.gxapi.FILE_EXT_ADD_IF_NONE :annotation: .. autoattribute:: geosoft.gxapi.FILE_EXT_ADD_IF_NONE .. autodata:: geosoft.gxapi.FILE_EXT_FORCE :annotation: .. autoattribute:: geosoft.gxapi.FILE_EXT_FORCE .. _STR_CASE: STR_CASE constants ----------------------------------------------------------------------- Case sensitivity .. autodata:: geosoft.gxapi.STR_CASE_TOLERANT :annotation: .. autoattribute:: geosoft.gxapi.STR_CASE_TOLERANT .. autodata:: geosoft.gxapi.STR_CASE_SENSITIVE :annotation: .. autoattribute:: geosoft.gxapi.STR_CASE_SENSITIVE .. _STR_ESCAPE: STR_ESCAPE constants ----------------------------------------------------------------------- How to handle escape .. autodata:: geosoft.gxapi.ESCAPE_CONVERT :annotation: .. autoattribute:: geosoft.gxapi.ESCAPE_CONVERT .. autodata:: geosoft.gxapi.ESCAPE_REPLACE :annotation: .. autoattribute:: geosoft.gxapi.ESCAPE_REPLACE .. _STR_FILE_PART: STR_FILE_PART constants ----------------------------------------------------------------------- Parts of a path string .. autodata:: geosoft.gxapi.STR_FILE_PART_NAME :annotation: .. autoattribute:: geosoft.gxapi.STR_FILE_PART_NAME .. autodata:: geosoft.gxapi.STR_FILE_PART_EXTENSION :annotation: .. autoattribute:: geosoft.gxapi.STR_FILE_PART_EXTENSION .. autodata:: geosoft.gxapi.STR_FILE_PART_DIRECTORY :annotation: .. autoattribute:: geosoft.gxapi.STR_FILE_PART_DIRECTORY .. autodata:: geosoft.gxapi.STR_FILE_PART_VOLUME :annotation: .. autoattribute:: geosoft.gxapi.STR_FILE_PART_VOLUME .. autodata:: geosoft.gxapi.STR_FILE_PART_QUALIFIERS :annotation: .. autoattribute:: geosoft.gxapi.STR_FILE_PART_QUALIFIERS .. autodata:: geosoft.gxapi.STR_FILE_PART_NAME_EXTENSION :annotation: .. autoattribute:: geosoft.gxapi.STR_FILE_PART_NAME_EXTENSION .. autodata:: geosoft.gxapi.STR_FILE_PART_FULLPATH_NO_QUALIFIERS :annotation: .. autoattribute:: geosoft.gxapi.STR_FILE_PART_FULLPATH_NO_QUALIFIERS .. _STR_JUSTIFY: STR_JUSTIFY constants ----------------------------------------------------------------------- String justification style .. autodata:: geosoft.gxapi.STR_JUSTIFY_LEFT :annotation: .. autoattribute:: geosoft.gxapi.STR_JUSTIFY_LEFT .. autodata:: geosoft.gxapi.STR_JUSTIFY_CENTER :annotation: .. autoattribute:: geosoft.gxapi.STR_JUSTIFY_CENTER .. autodata:: geosoft.gxapi.STR_JUSTIFY_RIGHT :annotation: .. autoattribute:: geosoft.gxapi.STR_JUSTIFY_RIGHT .. _STR_TRIM: STR_TRIM constants ----------------------------------------------------------------------- What to trim .. autodata:: geosoft.gxapi.STR_TRIMRIGHT :annotation: .. autoattribute:: geosoft.gxapi.STR_TRIMRIGHT .. autodata:: geosoft.gxapi.STR_TRIMLEFT :annotation: .. autoattribute:: geosoft.gxapi.STR_TRIMLEFT .. autodata:: geosoft.gxapi.STR_TRIMBOTH :annotation: .. autoattribute:: geosoft.gxapi.STR_TRIMBOTH <file_sep>/docs/GXIEXP.rst .. _GXIEXP: GXIEXP class ================================== .. autoclass:: geosoft.gxapi.GXIEXP :members: <file_sep>/docs/GXDMPPLY.rst .. _GXDMPPLY: GXDMPPLY class ================================== .. autoclass:: geosoft.gxapi.GXDMPPLY :members: <file_sep>/docs/GXLST.rst .. _GXLST: GXLST class ================================== .. autoclass:: geosoft.gxapi.GXLST :members: .. _LST_ITEM: LST_ITEM constants ----------------------------------------------------------------------- `GXLST <geosoft.gxapi.GXLST>` data access .. autodata:: geosoft.gxapi.LST_ITEM_NAME :annotation: .. autoattribute:: geosoft.gxapi.LST_ITEM_NAME .. autodata:: geosoft.gxapi.LST_ITEM_VALUE :annotation: .. autoattribute:: geosoft.gxapi.LST_ITEM_VALUE <file_sep>/docs/GXTC.rst .. _GXTC: GXTC class ================================== .. autoclass:: geosoft.gxapi.GXTC :members: .. _TC_OPT: TC_OPT constants ----------------------------------------------------------------------- Optimization .. autodata:: geosoft.gxapi.TC_OPT_NONE :annotation: .. autoattribute:: geosoft.gxapi.TC_OPT_NONE .. autodata:: geosoft.gxapi.TC_OPT_MAX :annotation: .. autoattribute:: geosoft.gxapi.TC_OPT_MAX .. _TC_SURVEYTYPE: TC_SURVEYTYPE constants ----------------------------------------------------------------------- Survey Type .. autodata:: geosoft.gxapi.TC_SURVEYTYPE_GROUND :annotation: .. autoattribute:: geosoft.gxapi.TC_SURVEYTYPE_GROUND .. autodata:: geosoft.gxapi.TC_SURVEYTYPE_SHIPBORNE :annotation: .. autoattribute:: geosoft.gxapi.TC_SURVEYTYPE_SHIPBORNE .. autodata:: geosoft.gxapi.TC_SURVEYTYPE_AIRBORNE :annotation: .. autoattribute:: geosoft.gxapi.TC_SURVEYTYPE_AIRBORNE .. _GG_ELEMENT: GG_ELEMENT constants ----------------------------------------------------------------------- GG element .. autodata:: geosoft.gxapi.GG_ELEMENT_XX :annotation: .. autoattribute:: geosoft.gxapi.GG_ELEMENT_XX .. autodata:: geosoft.gxapi.GG_ELEMENT_YY :annotation: .. autoattribute:: geosoft.gxapi.GG_ELEMENT_YY .. autodata:: geosoft.gxapi.GG_ELEMENT_XY :annotation: .. autoattribute:: geosoft.gxapi.GG_ELEMENT_XY .. autodata:: geosoft.gxapi.GG_ELEMENT_XZ :annotation: .. autoattribute:: geosoft.gxapi.GG_ELEMENT_XZ .. autodata:: geosoft.gxapi.GG_ELEMENT_YZ :annotation: .. autoattribute:: geosoft.gxapi.GG_ELEMENT_YZ <file_sep>/docs/GXDU.rst .. _GXDU: GXDU class ================================== .. autoclass:: geosoft.gxapi.GXDU :members: .. _DB_DUP: DB_DUP constants ----------------------------------------------------------------------- Duplicate Types .. autodata:: geosoft.gxapi.DB_DUP_FIRST :annotation: .. autoattribute:: geosoft.gxapi.DB_DUP_FIRST .. autodata:: geosoft.gxapi.DB_DUP_AVERAGE :annotation: .. autoattribute:: geosoft.gxapi.DB_DUP_AVERAGE .. autodata:: geosoft.gxapi.DB_DUP_MINIMUM :annotation: .. autoattribute:: geosoft.gxapi.DB_DUP_MINIMUM .. autodata:: geosoft.gxapi.DB_DUP_MAXIMUM :annotation: .. autoattribute:: geosoft.gxapi.DB_DUP_MAXIMUM .. autodata:: geosoft.gxapi.DB_DUP_MEDIAN :annotation: .. autoattribute:: geosoft.gxapi.DB_DUP_MEDIAN .. autodata:: geosoft.gxapi.DB_DUP_LAST :annotation: .. autoattribute:: geosoft.gxapi.DB_DUP_LAST .. _DB_DUPEDIT: DB_DUPEDIT constants ----------------------------------------------------------------------- Duplicate Edit Flags .. autodata:: geosoft.gxapi.DB_DUPEDIT_SINGLE :annotation: .. autoattribute:: geosoft.gxapi.DB_DUPEDIT_SINGLE .. autodata:: geosoft.gxapi.DB_DUPEDIT_ALL :annotation: .. autoattribute:: geosoft.gxapi.DB_DUPEDIT_ALL .. _DU_CHANNELS: DU_CHANNELS constants ----------------------------------------------------------------------- Channels to Display .. autodata:: geosoft.gxapi.DU_CHANNELS_DISPLAYED :annotation: .. autoattribute:: geosoft.gxapi.DU_CHANNELS_DISPLAYED .. autodata:: geosoft.gxapi.DU_CHANNELS_ALL :annotation: .. autoattribute:: geosoft.gxapi.DU_CHANNELS_ALL .. _DU_EXPORT: DU_EXPORT constants ----------------------------------------------------------------------- Export Type .. autodata:: geosoft.gxapi.DU_EXPORT_CSV :annotation: .. autoattribute:: geosoft.gxapi.DU_EXPORT_CSV .. autodata:: geosoft.gxapi.DU_EXPORT_ODDF :annotation: .. autoattribute:: geosoft.gxapi.DU_EXPORT_ODDF .. autodata:: geosoft.gxapi.DU_EXPORT_POST_PC :annotation: .. autoattribute:: geosoft.gxapi.DU_EXPORT_POST_PC .. autodata:: geosoft.gxapi.DU_EXPORT_POST_UNIX :annotation: .. autoattribute:: geosoft.gxapi.DU_EXPORT_POST_UNIX .. _DU_FILL: DU_FILL constants ----------------------------------------------------------------------- Filling Options .. autodata:: geosoft.gxapi.DU_FILL_INSIDE :annotation: .. autoattribute:: geosoft.gxapi.DU_FILL_INSIDE .. autodata:: geosoft.gxapi.DU_FILL_OUTSIDE :annotation: .. autoattribute:: geosoft.gxapi.DU_FILL_OUTSIDE .. _DU_IMPORT: DU_IMPORT constants ----------------------------------------------------------------------- Import Mode .. autodata:: geosoft.gxapi.DU_IMPORT_APPEND :annotation: .. autoattribute:: geosoft.gxapi.DU_IMPORT_APPEND .. autodata:: geosoft.gxapi.DU_IMPORT_REPLACE :annotation: .. autoattribute:: geosoft.gxapi.DU_IMPORT_REPLACE .. autodata:: geosoft.gxapi.DU_IMPORT_MERGE :annotation: .. autoattribute:: geosoft.gxapi.DU_IMPORT_MERGE .. autodata:: geosoft.gxapi.DU_IMPORT_MERGE_APPEND :annotation: .. autoattribute:: geosoft.gxapi.DU_IMPORT_MERGE_APPEND .. _DU_INTERP: DU_INTERP constants ----------------------------------------------------------------------- Inside Interpolation Method .. autodata:: geosoft.gxapi.DU_INTERP_NEAREST :annotation: .. autoattribute:: geosoft.gxapi.DU_INTERP_NEAREST .. autodata:: geosoft.gxapi.DU_INTERP_LINEAR :annotation: .. autoattribute:: geosoft.gxapi.DU_INTERP_LINEAR .. autodata:: geosoft.gxapi.DU_INTERP_CUBIC :annotation: .. autoattribute:: geosoft.gxapi.DU_INTERP_CUBIC .. autodata:: geosoft.gxapi.DU_INTERP_AKIMA :annotation: .. autoattribute:: geosoft.gxapi.DU_INTERP_AKIMA .. autodata:: geosoft.gxapi.DU_INTERP_PREDICT :annotation: .. autoattribute:: geosoft.gxapi.DU_INTERP_PREDICT .. _DU_INTERP_EDGE: DU_INTERP_EDGE constants ----------------------------------------------------------------------- Edge Interpolation Method .. autodata:: geosoft.gxapi.DU_INTERP_EDGE_NONE :annotation: .. autoattribute:: geosoft.gxapi.DU_INTERP_EDGE_NONE .. autodata:: geosoft.gxapi.DU_INTERP_EDGE_SAME :annotation: .. autoattribute:: geosoft.gxapi.DU_INTERP_EDGE_SAME .. autodata:: geosoft.gxapi.DU_INTERP_EDGE_NEAREST :annotation: .. autoattribute:: geosoft.gxapi.DU_INTERP_EDGE_NEAREST .. autodata:: geosoft.gxapi.DU_INTERP_EDGE_LINEAR :annotation: .. autoattribute:: geosoft.gxapi.DU_INTERP_EDGE_LINEAR .. _DU_LAB_TYPE: DU_LAB_TYPE constants ----------------------------------------------------------------------- File Types .. autodata:: geosoft.gxapi.DU_LAB_TYPE_FREE :annotation: .. autoattribute:: geosoft.gxapi.DU_LAB_TYPE_FREE .. autodata:: geosoft.gxapi.DU_LAB_TYPE_COMMA :annotation: .. autoattribute:: geosoft.gxapi.DU_LAB_TYPE_COMMA .. _DU_LEVEL: DU_LEVEL constants ----------------------------------------------------------------------- Leveling Options .. autodata:: geosoft.gxapi.DU_LEVEL_LINES :annotation: .. autoattribute:: geosoft.gxapi.DU_LEVEL_LINES .. autodata:: geosoft.gxapi.DU_LEVEL_TIES :annotation: .. autoattribute:: geosoft.gxapi.DU_LEVEL_TIES .. autodata:: geosoft.gxapi.DU_LEVEL_ALL :annotation: .. autoattribute:: geosoft.gxapi.DU_LEVEL_ALL .. _DU_LINEOUT: DU_LINEOUT constants ----------------------------------------------------------------------- Lineout Options (du.h) .. autodata:: geosoft.gxapi.DU_LINEOUT_SINGLE :annotation: .. autoattribute:: geosoft.gxapi.DU_LINEOUT_SINGLE .. autodata:: geosoft.gxapi.DU_LINEOUT_MULTIPLE :annotation: .. autoattribute:: geosoft.gxapi.DU_LINEOUT_MULTIPLE .. _DU_FEATURE_TYPE_OUTPUT: DU_FEATURE_TYPE_OUTPUT constants ----------------------------------------------------------------------- Export to geodatabase feature type (du.h) .. autodata:: geosoft.gxapi.DU_FEATURE_TYPE_OUTPUT_POINT :annotation: .. autoattribute:: geosoft.gxapi.DU_FEATURE_TYPE_OUTPUT_POINT .. autodata:: geosoft.gxapi.DU_FEATURE_TYPE_OUTPUT_LINE :annotation: .. autoattribute:: geosoft.gxapi.DU_FEATURE_TYPE_OUTPUT_LINE .. _DU_GEODATABASE_EXPORT_TYPE: DU_GEODATABASE_EXPORT_TYPE constants ----------------------------------------------------------------------- Export to geodatabase overwrite mode(du.h) .. autodata:: geosoft.gxapi.DU_GEODATABASE_EXPORT_TYPE_OVERWRITE_GEODATABASE :annotation: .. autoattribute:: geosoft.gxapi.DU_GEODATABASE_EXPORT_TYPE_OVERWRITE_GEODATABASE .. autodata:: geosoft.gxapi.DU_GEODATABASE_EXPORT_TYPE_OVERWRITE_FEATURECLASS :annotation: .. autoattribute:: geosoft.gxapi.DU_GEODATABASE_EXPORT_TYPE_OVERWRITE_FEATURECLASS .. autodata:: geosoft.gxapi.DU_GEODATABASE_EXPORT_TYPE_APPEND :annotation: .. autoattribute:: geosoft.gxapi.DU_GEODATABASE_EXPORT_TYPE_APPEND .. _DU_LINES: DU_LINES constants ----------------------------------------------------------------------- Lines to display .. autodata:: geosoft.gxapi.DU_LINES_DISPLAYED :annotation: .. autoattribute:: geosoft.gxapi.DU_LINES_DISPLAYED .. autodata:: geosoft.gxapi.DU_LINES_SELECTED :annotation: .. autoattribute:: geosoft.gxapi.DU_LINES_SELECTED .. autodata:: geosoft.gxapi.DU_LINES_ALL :annotation: .. autoattribute:: geosoft.gxapi.DU_LINES_ALL .. _DU_LOADLTB: DU_LOADLTB constants ----------------------------------------------------------------------- Load table options .. autodata:: geosoft.gxapi.DU_LOADLTB_REPLACE :annotation: .. autoattribute:: geosoft.gxapi.DU_LOADLTB_REPLACE .. autodata:: geosoft.gxapi.DU_LOADLTB_APPEND :annotation: .. autoattribute:: geosoft.gxapi.DU_LOADLTB_APPEND .. _DU_LOOKUP: DU_LOOKUP constants ----------------------------------------------------------------------- Lookup Mode .. autodata:: geosoft.gxapi.DU_LOOKUP_EXACT :annotation: .. autoattribute:: geosoft.gxapi.DU_LOOKUP_EXACT .. autodata:: geosoft.gxapi.DU_LOOKUP_NEAREST :annotation: .. autoattribute:: geosoft.gxapi.DU_LOOKUP_NEAREST .. autodata:: geosoft.gxapi.DU_LOOKUP_INTERPOLATE :annotation: .. autoattribute:: geosoft.gxapi.DU_LOOKUP_INTERPOLATE .. autodata:: geosoft.gxapi.DU_LOOKUP_NEARESTCLOSE :annotation: .. autoattribute:: geosoft.gxapi.DU_LOOKUP_NEARESTCLOSE .. autodata:: geosoft.gxapi.DU_LOOKUP_INTERPCLOSE :annotation: .. autoattribute:: geosoft.gxapi.DU_LOOKUP_INTERPCLOSE .. autodata:: geosoft.gxapi.DU_LOOKUP_INTERPOLATE_DUMMYOUTSIDE :annotation: .. autoattribute:: geosoft.gxapi.DU_LOOKUP_INTERPOLATE_DUMMYOUTSIDE .. autodata:: geosoft.gxapi.DU_LOOKUP_INTERPOLATE_CONSTOUTSIDE :annotation: .. autoattribute:: geosoft.gxapi.DU_LOOKUP_INTERPOLATE_CONSTOUTSIDE .. autodata:: geosoft.gxapi.DU_LOOKUP_INTERPOLATE_EXTPLOUTSIDE :annotation: .. autoattribute:: geosoft.gxapi.DU_LOOKUP_INTERPOLATE_EXTPLOUTSIDE .. autodata:: geosoft.gxapi.DU_LOOKUP_MAXOPTION :annotation: .. autoattribute:: geosoft.gxapi.DU_LOOKUP_MAXOPTION .. _DU_MASK: DU_MASK constants ----------------------------------------------------------------------- Masking Options .. autodata:: geosoft.gxapi.DU_MASK_INSIDE :annotation: .. autoattribute:: geosoft.gxapi.DU_MASK_INSIDE .. autodata:: geosoft.gxapi.DU_MASK_OUTSIDE :annotation: .. autoattribute:: geosoft.gxapi.DU_MASK_OUTSIDE .. _DU_MERGE: DU_MERGE constants ----------------------------------------------------------------------- Merge flags .. autodata:: geosoft.gxapi.DU_MERGE_APPEND :annotation: .. autoattribute:: geosoft.gxapi.DU_MERGE_APPEND .. _DU_MODFID: DU_MODFID constants ----------------------------------------------------------------------- Fid Update Options .. autodata:: geosoft.gxapi.DU_MODFID_INSERT :annotation: .. autoattribute:: geosoft.gxapi.DU_MODFID_INSERT .. autodata:: geosoft.gxapi.DU_MODFID_DELETE :annotation: .. autoattribute:: geosoft.gxapi.DU_MODFID_DELETE .. autodata:: geosoft.gxapi.DU_MODFID_APPEND :annotation: .. autoattribute:: geosoft.gxapi.DU_MODFID_APPEND .. _DU_MOVE: DU_MOVE constants ----------------------------------------------------------------------- Move Style .. autodata:: geosoft.gxapi.DU_MOVE_ABSOLUTE :annotation: .. autoattribute:: geosoft.gxapi.DU_MOVE_ABSOLUTE .. autodata:: geosoft.gxapi.DU_MOVE_MINUS :annotation: .. autoattribute:: geosoft.gxapi.DU_MOVE_MINUS .. autodata:: geosoft.gxapi.DU_MOVE_PLUS :annotation: .. autoattribute:: geosoft.gxapi.DU_MOVE_PLUS .. autodata:: geosoft.gxapi.DU_MOVE_INTERP :annotation: .. autoattribute:: geosoft.gxapi.DU_MOVE_INTERP .. _DU_REFID: DU_REFID constants ----------------------------------------------------------------------- Interpolation mode .. autodata:: geosoft.gxapi.DU_REFID_LINEAR :annotation: .. autoattribute:: geosoft.gxapi.DU_REFID_LINEAR .. autodata:: geosoft.gxapi.DU_REFID_MINCUR :annotation: .. autoattribute:: geosoft.gxapi.DU_REFID_MINCUR .. autodata:: geosoft.gxapi.DU_REFID_AKIMA :annotation: .. autoattribute:: geosoft.gxapi.DU_REFID_AKIMA .. autodata:: geosoft.gxapi.DU_REFID_NEAREST :annotation: .. autoattribute:: geosoft.gxapi.DU_REFID_NEAREST .. _DU_SORT: DU_SORT constants ----------------------------------------------------------------------- Sort Direction .. autodata:: geosoft.gxapi.DU_SORT_ASCENDING :annotation: .. autoattribute:: geosoft.gxapi.DU_SORT_ASCENDING .. autodata:: geosoft.gxapi.DU_SORT_DESCENDING :annotation: .. autoattribute:: geosoft.gxapi.DU_SORT_DESCENDING .. _DU_SPLITLINE: DU_SPLITLINE constants ----------------------------------------------------------------------- Sort Direction .. autodata:: geosoft.gxapi.DU_SPLITLINE_XYPOSITION :annotation: .. autoattribute:: geosoft.gxapi.DU_SPLITLINE_XYPOSITION .. autodata:: geosoft.gxapi.DU_SPLITLINE_SEQUENTIAL :annotation: .. autoattribute:: geosoft.gxapi.DU_SPLITLINE_SEQUENTIAL .. autodata:: geosoft.gxapi.DU_SPLITLINE_TOVERSIONS :annotation: .. autoattribute:: geosoft.gxapi.DU_SPLITLINE_TOVERSIONS .. _DU_STORAGE: DU_STORAGE constants ----------------------------------------------------------------------- Storage Type .. autodata:: geosoft.gxapi.DU_STORAGE_LINE :annotation: .. autoattribute:: geosoft.gxapi.DU_STORAGE_LINE .. autodata:: geosoft.gxapi.DU_STORAGE_GROUP :annotation: .. autoattribute:: geosoft.gxapi.DU_STORAGE_GROUP .. _QC_PLAN_TYPE: QC_PLAN_TYPE constants ----------------------------------------------------------------------- Type Plan .. autodata:: geosoft.gxapi.QC_PLAN_SURVEYLINE :annotation: .. autoattribute:: geosoft.gxapi.QC_PLAN_SURVEYLINE .. autodata:: geosoft.gxapi.QC_PLAN_TIELINE :annotation: .. autoattribute:: geosoft.gxapi.QC_PLAN_TIELINE .. autodata:: geosoft.gxapi.QC_PLAN_BOTHLINES :annotation: .. autoattribute:: geosoft.gxapi.QC_PLAN_BOTHLINES .. _DU_DISTANCE_CHANNEL_TYPE: DU_DISTANCE_CHANNEL_TYPE constants ----------------------------------------------------------------------- Distance channel direction type .. autodata:: geosoft.gxapi.DU_DISTANCE_CHANNEL_MAINTAIN_DIRECTION :annotation: .. autoattribute:: geosoft.gxapi.DU_DISTANCE_CHANNEL_MAINTAIN_DIRECTION .. autodata:: geosoft.gxapi.DU_DISTANCE_CHANNEL_CARTESIAN_COORDINATES :annotation: .. autoattribute:: geosoft.gxapi.DU_DISTANCE_CHANNEL_CARTESIAN_COORDINATES .. _DU_DIRECTGRID_METHOD: DU_DIRECTGRID_METHOD constants ----------------------------------------------------------------------- How to calculate the cell values for direct gridding. .. autodata:: geosoft.gxapi.DU_DIRECTGRID_MIN :annotation: .. autoattribute:: geosoft.gxapi.DU_DIRECTGRID_MIN .. autodata:: geosoft.gxapi.DU_DIRECTGRID_MAX :annotation: .. autoattribute:: geosoft.gxapi.DU_DIRECTGRID_MAX .. autodata:: geosoft.gxapi.DU_DIRECTGRID_MEAN :annotation: .. autoattribute:: geosoft.gxapi.DU_DIRECTGRID_MEAN <file_sep>/docs/GXGEOSOFT.rst .. _GXGEOSOFT: GXGEOSOFT class ================================== This is not a class but a collection of global defines. It is used by all functions. .. _CRC_INIT_VALUE: CRC_INIT_VALUE constants ----------------------------------------------------------------------- Initial value for starting a CRC .. autodata:: geosoft.gxapi.CRC_INIT_VALUE :annotation: .. autoattribute:: geosoft.gxapi.CRC_INIT_VALUE .. _DATE_FORMAT: DATE_FORMAT constants ----------------------------------------------------------------------- Old Date formats .. autodata:: geosoft.gxapi.DATE_FORMAT_YYYYMMDD :annotation: .. autoattribute:: geosoft.gxapi.DATE_FORMAT_YYYYMMDD .. autodata:: geosoft.gxapi.DATE_FORMAT_DDMMYYYY :annotation: .. autoattribute:: geosoft.gxapi.DATE_FORMAT_DDMMYYYY .. autodata:: geosoft.gxapi.DATE_FORMAT_MMDDYYYY :annotation: .. autoattribute:: geosoft.gxapi.DATE_FORMAT_MMDDYYYY .. _GEO_DUMMY: GEO_DUMMY constants ----------------------------------------------------------------------- Special numbers indicating NULLL .. autodata:: geosoft.gxapi.iDUMMY :annotation: .. autoattribute:: geosoft.gxapi.iDUMMY .. autodata:: geosoft.gxapi.rDUMMY :annotation: .. autoattribute:: geosoft.gxapi.rDUMMY .. _GEO_FULL_LIMITS: GEO_FULL_LIMITS constants ----------------------------------------------------------------------- Data ranges of all Geosoft types .. autodata:: geosoft.gxapi.GS_S1MX :annotation: .. autoattribute:: geosoft.gxapi.GS_S1MX .. autodata:: geosoft.gxapi.GS_S1MN :annotation: .. autoattribute:: geosoft.gxapi.GS_S1MN .. autodata:: geosoft.gxapi.GS_S1DM :annotation: .. autoattribute:: geosoft.gxapi.GS_S1DM .. autodata:: geosoft.gxapi.GS_U1MX :annotation: .. autoattribute:: geosoft.gxapi.GS_U1MX .. autodata:: geosoft.gxapi.GS_U1MN :annotation: .. autoattribute:: geosoft.gxapi.GS_U1MN .. autodata:: geosoft.gxapi.GS_U1DM :annotation: .. autoattribute:: geosoft.gxapi.GS_U1DM .. autodata:: geosoft.gxapi.GS_S2MX :annotation: .. autoattribute:: geosoft.gxapi.GS_S2MX .. autodata:: geosoft.gxapi.GS_S2MN :annotation: .. autoattribute:: geosoft.gxapi.GS_S2MN .. autodata:: geosoft.gxapi.GS_S2DM :annotation: .. autoattribute:: geosoft.gxapi.GS_S2DM .. autodata:: geosoft.gxapi.GS_U2MX :annotation: .. autoattribute:: geosoft.gxapi.GS_U2MX .. autodata:: geosoft.gxapi.GS_U2MN :annotation: .. autoattribute:: geosoft.gxapi.GS_U2MN .. autodata:: geosoft.gxapi.GS_U2DM :annotation: .. autoattribute:: geosoft.gxapi.GS_U2DM .. autodata:: geosoft.gxapi.GS_S4MX :annotation: .. autoattribute:: geosoft.gxapi.GS_S4MX .. autodata:: geosoft.gxapi.GS_S4MN :annotation: .. autoattribute:: geosoft.gxapi.GS_S4MN .. autodata:: geosoft.gxapi.GS_S4DM :annotation: .. autoattribute:: geosoft.gxapi.GS_S4DM .. autodata:: geosoft.gxapi.GS_U4MX :annotation: .. autoattribute:: geosoft.gxapi.GS_U4MX .. autodata:: geosoft.gxapi.GS_U4MN :annotation: .. autoattribute:: geosoft.gxapi.GS_U4MN .. autodata:: geosoft.gxapi.GS_U4DM :annotation: .. autoattribute:: geosoft.gxapi.GS_U4DM .. autodata:: geosoft.gxapi.GS_S8MX :annotation: .. autoattribute:: geosoft.gxapi.GS_S8MX .. autodata:: geosoft.gxapi.GS_S8MN :annotation: .. autoattribute:: geosoft.gxapi.GS_S8MN .. autodata:: geosoft.gxapi.GS_S8DM :annotation: .. autoattribute:: geosoft.gxapi.GS_S8DM .. autodata:: geosoft.gxapi.GS_U8MX :annotation: .. autoattribute:: geosoft.gxapi.GS_U8MX .. autodata:: geosoft.gxapi.GS_U8MN :annotation: .. autoattribute:: geosoft.gxapi.GS_U8MN .. autodata:: geosoft.gxapi.GS_U8DM :annotation: .. autoattribute:: geosoft.gxapi.GS_U8DM .. autodata:: geosoft.gxapi.GS_R4MX :annotation: .. autoattribute:: geosoft.gxapi.GS_R4MX .. autodata:: geosoft.gxapi.GS_R4MN :annotation: .. autoattribute:: geosoft.gxapi.GS_R4MN .. autodata:: geosoft.gxapi.GS_R4DM :annotation: .. autoattribute:: geosoft.gxapi.GS_R4DM .. autodata:: geosoft.gxapi.GS_R8MX :annotation: .. autoattribute:: geosoft.gxapi.GS_R8MX .. autodata:: geosoft.gxapi.GS_R8MN :annotation: .. autoattribute:: geosoft.gxapi.GS_R8MN .. autodata:: geosoft.gxapi.GS_R8DM :annotation: .. autoattribute:: geosoft.gxapi.GS_R8DM .. autodata:: geosoft.gxapi.GS_R4EPSILON :annotation: .. autoattribute:: geosoft.gxapi.GS_R4EPSILON .. autodata:: geosoft.gxapi.GS_R8EPSILON :annotation: .. autoattribute:: geosoft.gxapi.GS_R8EPSILON .. _GEO_LIMITS: GEO_LIMITS constants ----------------------------------------------------------------------- Data ranges of numbers .. autodata:: geosoft.gxapi.iMIN :annotation: .. autoattribute:: geosoft.gxapi.iMIN .. autodata:: geosoft.gxapi.iMAX :annotation: .. autoattribute:: geosoft.gxapi.iMAX .. autodata:: geosoft.gxapi.rMIN :annotation: .. autoattribute:: geosoft.gxapi.rMIN .. autodata:: geosoft.gxapi.rMAX :annotation: .. autoattribute:: geosoft.gxapi.rMAX .. _GEO_STRING_SIZE: GEO_STRING_SIZE constants ----------------------------------------------------------------------- Default string sized for different uses GX's must use these unless there is a very good reason not to. The path strings here are generally larger than what is possible in the OS, but it is defined as such for Unicode conversion reasons. .. autodata:: geosoft.gxapi.STR_DEFAULT :annotation: .. autoattribute:: geosoft.gxapi.STR_DEFAULT .. autodata:: geosoft.gxapi.STR_DEFAULT_SHORT :annotation: .. autoattribute:: geosoft.gxapi.STR_DEFAULT_SHORT .. autodata:: geosoft.gxapi.STR_DEFAULT_LONG :annotation: .. autoattribute:: geosoft.gxapi.STR_DEFAULT_LONG .. autodata:: geosoft.gxapi.STR_ERROR :annotation: .. autoattribute:: geosoft.gxapi.STR_ERROR .. autodata:: geosoft.gxapi.STR_VERY_LONG :annotation: .. autoattribute:: geosoft.gxapi.STR_VERY_LONG .. autodata:: geosoft.gxapi.STR_VIEW :annotation: .. autoattribute:: geosoft.gxapi.STR_VIEW .. autodata:: geosoft.gxapi.STR_GROUP :annotation: .. autoattribute:: geosoft.gxapi.STR_GROUP .. autodata:: geosoft.gxapi.STR_VIEW_GROUP :annotation: .. autoattribute:: geosoft.gxapi.STR_VIEW_GROUP .. autodata:: geosoft.gxapi.STR_FILE :annotation: .. autoattribute:: geosoft.gxapi.STR_FILE .. autodata:: geosoft.gxapi.STR_MULTI_FILE :annotation: .. autoattribute:: geosoft.gxapi.STR_MULTI_FILE .. autodata:: geosoft.gxapi.STR_DB_SYMBOL :annotation: .. autoattribute:: geosoft.gxapi.STR_DB_SYMBOL .. autodata:: geosoft.gxapi.STR_GXF :annotation: .. autoattribute:: geosoft.gxapi.STR_GXF .. autodata:: geosoft.gxapi.STR_MAX_PATH :annotation: .. autoattribute:: geosoft.gxapi.STR_MAX_PATH .. autodata:: geosoft.gxapi.STR_MULTI_PATH :annotation: .. autoattribute:: geosoft.gxapi.STR_MULTI_PATH .. autodata:: geosoft.gxapi.GS_MAX_PATH :annotation: .. autoattribute:: geosoft.gxapi.GS_MAX_PATH .. autodata:: geosoft.gxapi.GS_MULTI_PATH :annotation: .. autoattribute:: geosoft.gxapi.GS_MULTI_PATH .. _GEO_VAR: GEO_VAR constants ----------------------------------------------------------------------- Variable types. Use -X for strings of X length .. autodata:: geosoft.gxapi.GS_INT :annotation: .. autoattribute:: geosoft.gxapi.GS_INT .. autodata:: geosoft.gxapi.GS_REAL :annotation: .. autoattribute:: geosoft.gxapi.GS_REAL .. _GS_FORMATS: GS_FORMATS constants ----------------------------------------------------------------------- Special use data types. String are indicated by a negative maximum string length (including NULL). .. autodata:: geosoft.gxapi.FORMAT_DECIMAL :annotation: .. autoattribute:: geosoft.gxapi.FORMAT_DECIMAL .. autodata:: geosoft.gxapi.FORMAT_SIG_DIG :annotation: .. autoattribute:: geosoft.gxapi.FORMAT_SIG_DIG .. autodata:: geosoft.gxapi.FORMAT_EXP :annotation: .. autoattribute:: geosoft.gxapi.FORMAT_EXP .. autodata:: geosoft.gxapi.FORMAT_TIME_COLON :annotation: .. autoattribute:: geosoft.gxapi.FORMAT_TIME_COLON .. autodata:: geosoft.gxapi.FORMAT_TIME_HMS :annotation: .. autoattribute:: geosoft.gxapi.FORMAT_TIME_HMS .. autodata:: geosoft.gxapi.FORMAT_TIME_HHMMSS :annotation: .. autoattribute:: geosoft.gxapi.FORMAT_TIME_HHMMSS .. autodata:: geosoft.gxapi.FORMAT_DATE_YYYYMMDD :annotation: .. autoattribute:: geosoft.gxapi.FORMAT_DATE_YYYYMMDD .. autodata:: geosoft.gxapi.FORMAT_DATE_DDMMYYYY :annotation: .. autoattribute:: geosoft.gxapi.FORMAT_DATE_DDMMYYYY .. autodata:: geosoft.gxapi.FORMAT_DATE_MMDDYYYY :annotation: .. autoattribute:: geosoft.gxapi.FORMAT_DATE_MMDDYYYY .. autodata:: geosoft.gxapi.FORMAT_GEOGRAPHIC :annotation: .. autoattribute:: geosoft.gxapi.FORMAT_GEOGRAPHIC .. autodata:: geosoft.gxapi.FORMAT_GEOGRAPHIC_1 :annotation: .. autoattribute:: geosoft.gxapi.FORMAT_GEOGRAPHIC_1 .. autodata:: geosoft.gxapi.FORMAT_GEOGRAPHIC_2 :annotation: .. autoattribute:: geosoft.gxapi.FORMAT_GEOGRAPHIC_2 .. autodata:: geosoft.gxapi.FORMAT_GEOGRAPHIC_3 :annotation: .. autoattribute:: geosoft.gxapi.FORMAT_GEOGRAPHIC_3 .. _GS_TYPES: GS_TYPES constants ----------------------------------------------------------------------- Special use data types. String are indicated by a negative maximum string length (including NULL). .. autodata:: geosoft.gxapi.GS_BYTE :annotation: .. autoattribute:: geosoft.gxapi.GS_BYTE .. autodata:: geosoft.gxapi.GS_USHORT :annotation: .. autoattribute:: geosoft.gxapi.GS_USHORT .. autodata:: geosoft.gxapi.GS_SHORT :annotation: .. autoattribute:: geosoft.gxapi.GS_SHORT .. autodata:: geosoft.gxapi.GS_LONG :annotation: .. autoattribute:: geosoft.gxapi.GS_LONG .. autodata:: geosoft.gxapi.GS_FLOAT :annotation: .. autoattribute:: geosoft.gxapi.GS_FLOAT .. autodata:: geosoft.gxapi.GS_DOUBLE :annotation: .. autoattribute:: geosoft.gxapi.GS_DOUBLE .. autodata:: geosoft.gxapi.GS_UBYTE :annotation: .. autoattribute:: geosoft.gxapi.GS_UBYTE .. autodata:: geosoft.gxapi.GS_ULONG :annotation: .. autoattribute:: geosoft.gxapi.GS_ULONG .. autodata:: geosoft.gxapi.GS_LONG64 :annotation: .. autoattribute:: geosoft.gxapi.GS_LONG64 .. autodata:: geosoft.gxapi.GS_ULONG64 :annotation: .. autoattribute:: geosoft.gxapi.GS_ULONG64 .. autodata:: geosoft.gxapi.GS_FLOAT3D :annotation: .. autoattribute:: geosoft.gxapi.GS_FLOAT3D .. autodata:: geosoft.gxapi.GS_DOUBLE3D :annotation: .. autoattribute:: geosoft.gxapi.GS_DOUBLE3D .. autodata:: geosoft.gxapi.GS_FLOAT2D :annotation: .. autoattribute:: geosoft.gxapi.GS_FLOAT2D .. autodata:: geosoft.gxapi.GS_DOUBLE2D :annotation: .. autoattribute:: geosoft.gxapi.GS_DOUBLE2D .. autodata:: geosoft.gxapi.GS_MAXTYPE :annotation: .. autoattribute:: geosoft.gxapi.GS_MAXTYPE .. autodata:: geosoft.gxapi.GS_TYPE_DEFAULT :annotation: .. autoattribute:: geosoft.gxapi.GS_TYPE_DEFAULT .. _SYS_CRYPT_KEY: SYS_CRYPT_KEY constants ----------------------------------------------------------------------- Special Encryption Keys .. autodata:: geosoft.gxapi.SYS_CRYPT_LICENSE_KEY :annotation: .. autoattribute:: geosoft.gxapi.SYS_CRYPT_LICENSE_KEY .. autodata:: geosoft.gxapi.SYS_CRYPT_COMPUTER_ID :annotation: .. autoattribute:: geosoft.gxapi.SYS_CRYPT_COMPUTER_ID .. autodata:: geosoft.gxapi.SYS_CRYPT_GLOBAL_ID :annotation: .. autoattribute:: geosoft.gxapi.SYS_CRYPT_GLOBAL_ID .. _TIME_FORMAT: TIME_FORMAT constants ----------------------------------------------------------------------- Old Time formats .. autodata:: geosoft.gxapi.TIME_FORMAT_COLON :annotation: .. autoattribute:: geosoft.gxapi.TIME_FORMAT_COLON .. autodata:: geosoft.gxapi.TIME_FORMAT_HMS :annotation: .. autoattribute:: geosoft.gxapi.TIME_FORMAT_HMS <file_sep>/docs/GXSQLSRV.rst .. _GXSQLSRV: GXSQLSRV class ================================== .. autoclass:: geosoft.gxapi.GXSQLSRV :members: .. _MFCSQL_DRIVER: MFCSQL_DRIVER constants ----------------------------------------------------------------------- SQL Server Driver .. autodata:: geosoft.gxapi.MFCSQL_DRIVER_NOPROMPT :annotation: .. autoattribute:: geosoft.gxapi.MFCSQL_DRIVER_NOPROMPT .. autodata:: geosoft.gxapi.MFCSQL_DRIVER_COMPLETE :annotation: .. autoattribute:: geosoft.gxapi.MFCSQL_DRIVER_COMPLETE .. autodata:: geosoft.gxapi.MFCSQL_DRIVER_PROMPT :annotation: .. autoattribute:: geosoft.gxapi.MFCSQL_DRIVER_PROMPT .. autodata:: geosoft.gxapi.MFCSQL_DRIVER_COMPLETE_REQUIRED :annotation: .. autoattribute:: geosoft.gxapi.MFCSQL_DRIVER_COMPLETE_REQUIRED <file_sep>/geosoft/gxpy/tests/test_gx.py import unittest import os import time import concurrent.futures import geosoft import geosoft.gxpy.gx as gx from base import GXPYTest class Test(GXPYTest): @classmethod def setUpClass(cls): cls.setUpGXPYTest(res_stack=4) def _get_thread_gx_context(cls, use_gxpy_context: bool): if use_gxpy_context: return gx.GXpy('ThreadGXContext', geosoft.__version__) else: return geosoft.gxapi.GXContext.create('ThreadGXContext', geosoft.__version__) def _thread_gx_func(self, use_gxpy_context: bool): tls_geo = geosoft.gxapi.GXContext._try_get_tls_geo() if tls_geo is not None: return "(1) We have a GX context geo pointer in TLS but should not!" if gx._have_gx(): return "(2) geosoft.gxpy._have_gx returned True but should be False!" with self._get_thread_gx_context(use_gxpy_context) as gxc: time.sleep(0.1) tls_geo = geosoft.gxapi.GXContext._try_get_tls_geo() if tls_geo is None: return "(3) We should have a GX context geo pointer in TLS but do not!" if use_gxpy_context: if not gx._have_gx(): return "(4) geosoft.gxpy._have_gx returned False but should be True!" else: if gx._have_gx(): return "(5) geosoft.gxpy._have_gx returned True but should be False!" if gx._have_gx(): return "(6) geosoft.gxpy._have_gx returned True but should be False!" tls_geo = geosoft.gxapi.GXContext._try_get_tls_geo() if tls_geo is not None: return "(7) We have a GX context geo pointer in TLS but should not!" return "" NUM_THREADS_TO_TEST = 100 def _run_threads_context(self, use_gxpy_context: bool): with concurrent.futures.ThreadPoolExecutor(max_workers=self.NUM_THREADS_TO_TEST) as executor: futures = [] for i in range(0, self.NUM_THREADS_TO_TEST): futures.append(executor.submit(lambda: self._thread_gx_func(use_gxpy_context))) for future in futures: result = future.result() if result: self.fail(result) def test_threads_gxapi_context(self): self._run_threads_context(False) def test_threads_gxpy_context(self): self._run_threads_context(True) def test_gxpy(self): self.start() gxc = self._gx self.assertTrue(gxc.gid.find('@') > 0) self.assertEqual(gxc.main_wind_id, 0) self.assertEqual(gxc.active_wind_id, 0) self.assertEqual(gx.__version__, geosoft.__version__) def test_env(self): self.start() gxc = self._gx self.assertFalse(gxc.gid is None) self.assertFalse(gxc.current_date is None) self.assertFalse(gxc.current_utc_date is None) self.assertFalse(gxc.current_time is None) self.assertFalse(gxc.current_utc_time is None) self.assertFalse(gxc.license_class is None) self.assertFalse(gxc.folder_workspace is None) self.assertFalse(gxc.folder_temp is None) self.assertFalse(gxc.folder_user is None) self.assertTrue(gxc.geosoft_version_label) self.assertTrue(gxc.geosoft_version_major >= 9) self.assertTrue(gxc.geosoft_version_minor >= 0) self.assertTrue(gxc.geosoft_version_micro >= 0) self.assertTrue(gxc.geosoft_build_label) self.assertTrue(gxc.geosoft_build_number) self.assertTrue(gxc.geosoft_name) @unittest.skip('WIP') def test_entitlements(self): with gx.GXpy() as gxc: ent = gxc.entitlements() self.assertTrue(ent['1000'], 'Oasis montaj™ Base') self.assertTrue(gxc.has_entitlement(1000)) self.assertTrue(gxc.has_entitlement('Oasis montaj™ Base')) self.assertTrue(gxc.has_entitlement(2000)) self.assertTrue(gxc.has_entitlement("ArcGIS")) self.assertTrue(gxc.has_entitlement(3000)) self.assertTrue(gxc.has_entitlement("MapInfo")) self.assertFalse(gxc.has_entitlement("bogus")) #for e in ent: # print('{}: "{}"'.format(e, ent[e])) if gxc.entitled: self.assertTrue(gxc.has_entitlement(10000) or gxc.has_entitlement(30000) or gxc.has_entitlement(30101) or gxc.has_entitlement(40000) or gxc.has_entitlement(41000)) else: self.assertTrue(gxc.has_entitlement(1000) and gxc.has_entitlement(2000) and gxc.has_entitlement(3000)) self.assertFalse(gxc.has_entitlement(10000)) self.assertFalse(gxc.has_entitlement(30000)) self.assertFalse(gxc.has_entitlement(30101)) self.assertFalse(gxc.has_entitlement(40000)) self.assertFalse(gxc.has_entitlement(41000)) def test_temp(self): self.start() gxc = self._gx tf = gxc.temp_folder() self.assertTrue(os.path.isdir(tf)) tf = gxc.temp_file() self.assertFalse(os.path.exists(tf)) tf = gxc.temp_file(ext=".dummy") self.assertFalse(os.path.exists(tf)) self.assertEqual(tf[-6:], ".dummy") try: with open(tf, 'x'): pass except: self.assertTrue(False) def test_elapsed_time(self): self.start() self.assertTrue(self._gx.elapsed_seconds("startup") > 0.0) time.sleep(0.25) self.assertTrue(self._gx.elapsed_seconds("0.25 seconds later") > 0.25) def test_remove_temp_files(self): self.start() gxc = self._gx def make_file(name): with open(name, 'w+') as f: f.write('ok') files = [] for i in range(3): fn = gxc.temp_file() make_file(fn) files.append(fn) gxc.remove_stale_temporary_files() for f in files: self.assertTrue(os.path.exists(f)) gxc.remove_stale_temporary_files(age=0) for f in files: self.assertFalse(os.path.exists(f)) def test_profile(self): self.start() gxc = self._gx self.assertTrue(len(gxc.profile_name)) self.assertTrue(len(gxc.profile_url)) ############################################################################################### if __name__ == '__main__': unittest.main() <file_sep>/geosoft/gxapi/GXUNC.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXUNC(gxapi_cy.WrapUNC): """ GXUNC class. This library is not a class. Use the `GXUNC <geosoft.gxapi.GXUNC>` library functions to work with Unicode characters and strings. Since version 6.2 all strings are represented internally in the the GX engine as UTF-8. The character set concept was discarded as a way to work with characters that does not fall within the normal ASCII range 0x01-0x7F. The utilities here aids with any new functionality that is now possible (e.g. an expanded symbol range with TrueType fonts). """ def __init__(self, handle=0): super(GXUNC, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXUNC <geosoft.gxapi.GXUNC>` :returns: A null `GXUNC <geosoft.gxapi.GXUNC>` :rtype: GXUNC """ return GXUNC() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # UTF @classmethod def is_valid_utf16_char(cls, ch): """ Check if the UTF-16 value is a valid Unicode character code point. :param ch: UTF-16 value (32-bit int, lower 16 bits used, upper bits reserved for future use) :type ch: int :rtype: bool .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapUNC._is_valid_utf16_char(GXContext._get_tls_geo(), ch) return ret_val @classmethod def valid_symbol(cls, face, geofont, number): """ See if a Symbol number is valid in a particular font. :param face: Face name (undecorated) :param geofont: Geosoft font? :param number: Symbol number :type face: str :type geofont: bool :type number: int :rtype: bool .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapUNC._valid_symbol(GXContext._get_tls_geo(), face.encode(), geofont, number) return ret_val @classmethod def utf16_val_to_str(cls, ch, str_val): """ Convert a UTF-16 value to a UTF-8 encoded string. :param ch: UTF-16 value (32-bit int, lower 16 bits used, upper bits reserved for future use) :param str_val: Converted string :type ch: int :type str_val: str_ref .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** An empty string will be returned for invalid symbols """ str_val.value = gxapi_cy.WrapUNC._utf16_val_to_str(GXContext._get_tls_geo(), ch, str_val.value.encode()) @classmethod def validate_symbols(cls, vv, face, geofont): """ High performance method to see if a set of symbols are valid in a particular font. :param vv: `GXVV <geosoft.gxapi.GXVV>` of symbols :param face: Face name (undecorated) :param geofont: Geosoft font? :type vv: GXVV :type face: str :type geofont: bool .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Invalid symbols in the `GXVV <geosoft.gxapi.GXVV>` will be set to -1 by this call. `GXVV <geosoft.gxapi.GXVV>` has to be of type `GS_LONG <geosoft.gxapi.GS_LONG>`. """ gxapi_cy.WrapUNC._validate_symbols(GXContext._get_tls_geo(), vv, face.encode(), geofont) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXFFT2.rst .. _GXFFT2: GXFFT2 class ================================== .. autoclass:: geosoft.gxapi.GXFFT2 :members: .. _FFT2_PG: FFT2_PG constants ----------------------------------------------------------------------- Pager Direction .. autodata:: geosoft.gxapi.FFT2_PG_FORWARD :annotation: .. autoattribute:: geosoft.gxapi.FFT2_PG_FORWARD .. autodata:: geosoft.gxapi.FFT2_PG_INVERSE :annotation: .. autoattribute:: geosoft.gxapi.FFT2_PG_INVERSE <file_sep>/docs/GXSEGYREADER.rst .. _GXSEGYREADER: GXSEGYREADER class ================================== .. autoclass:: geosoft.gxapi.GXSEGYREADER :members: <file_sep>/docs/GXVM.rst .. _GXVM: GXVM class ================================== .. autoclass:: geosoft.gxapi.GXVM :members: <file_sep>/examples/tutorial/Coordinate systems/translate_coordinates_between_systems.py import geosoft.gxpy.gx as gx import geosoft.gxpy.coordinate_system as gxcs import numpy as np # create context gxc = gx.GXpy() # define coordinate systems and a transformer cs_utm = gxcs.Coordinate_system('NAD83 / UTM zone 15N') cs_nad27 = gxcs.Coordinate_system('NAD27') cs_transform = gxcs.Coordinate_translate(cs_utm, cs_nad27) # example transform a single (x, y) coordinate lon_lat = cs_transform.convert((345000, 64250000)) print('(lon, lat): {}'.format(lon_lat)) # example transform a single (x, y, elevation) coordinate print('(lon, lat, elevation): {}'.format(cs_transform.convert((345000, 64250000, 50)))) # example translate a list of (x, y, z) tuples locations = [(345000, 64250000, 50), (345500, 64250000, 60), (346000, 64250000, 70)] nad27_locations = cs_transform.convert(locations) for xyz in nad27_locations: print(xyz) # example transform a numpy array in-place data = np.array([[345000, 64250000, 50, 55000], [345500, 64250000, 60, 55150], [346000, 64250000, 70, 56000]], dtype=float) nad27_locations = cs_transform.convert(data, in_place=True) for xyz in data: print(xyz) # compare coordinate systems print(cs_utm == cs_nad27) print(gxcs.Coordinate_system('WGS 84') == gxcs.Coordinate_system('WGS 84')) print(gxcs.Coordinate_system('GDA94 [geodetic]') == gxcs.Coordinate_system('GDA94 [geoid]')) <file_sep>/docs/GXPJ.rst .. _GXPJ: GXPJ class ================================== .. autoclass:: geosoft.gxapi.GXPJ :members: .. _PJ_ELEVATION: PJ_ELEVATION constants ----------------------------------------------------------------------- Elevation correction method .. autodata:: geosoft.gxapi.PJ_ELEVATION_NONE :annotation: .. autoattribute:: geosoft.gxapi.PJ_ELEVATION_NONE .. autodata:: geosoft.gxapi.PJ_ELEVATION_GEOCENTRIC :annotation: .. autoattribute:: geosoft.gxapi.PJ_ELEVATION_GEOCENTRIC .. autodata:: geosoft.gxapi.PJ_ELEVATION_GEOID :annotation: .. autoattribute:: geosoft.gxapi.PJ_ELEVATION_GEOID .. _PJ_RECT: PJ_RECT constants ----------------------------------------------------------------------- Conversion direction .. autodata:: geosoft.gxapi.PJ_RECT_XY2LL :annotation: .. autoattribute:: geosoft.gxapi.PJ_RECT_XY2LL .. autodata:: geosoft.gxapi.PJ_RECT_LL2XY :annotation: .. autoattribute:: geosoft.gxapi.PJ_RECT_LL2XY <file_sep>/geosoft/gxapi/GXIGRF.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXIGRF(gxapi_cy.WrapIGRF): """ GXIGRF class. International Geomagnetic Reference Field Methods to work with `GXIGRF <geosoft.gxapi.GXIGRF>` objects. The `GXIGRF <geosoft.gxapi.GXIGRF>` object contains data for the `GXIGRF <geosoft.gxapi.GXIGRF>` model of the geomagnetic reference field. """ def __init__(self, handle=0): super(GXIGRF, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXIGRF <geosoft.gxapi.GXIGRF>` :returns: A null `GXIGRF <geosoft.gxapi.GXIGRF>` :rtype: GXIGRF """ return GXIGRF() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def calc(self, el, lon, lat, str_val, inc, dec): """ Calculate `GXIGRF <geosoft.gxapi.GXIGRF>` data for a given `GXIGRF <geosoft.gxapi.GXIGRF>` model. :param el: Elevation (metres) :param lon: Longitude (-180 to 180) :param lat: Latitude (-90 to 90) Returns :param str_val: Field strength :param inc: Field inclination :param dec: Field declination :type el: float :type lon: float :type lat: float :type str_val: float_ref :type inc: float_ref :type dec: float_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Calculate `GXIGRF <geosoft.gxapi.GXIGRF>` data (total field, inclination, and declination) for a given `GXIGRF <geosoft.gxapi.GXIGRF>` model. The model used will be the same as that obtained with `create <geosoft.gxapi.GXIGRF.create>`. """ str_val.value, inc.value, dec.value = self._calc(el, lon, lat, str_val.value, inc.value, dec.value) def calc_vv(self, gv_vel, gv_vlon, gv_vlat, gv_vfs, gv_vinc, gv_vdec): """ Calculate `GXIGRF <geosoft.gxapi.GXIGRF>` data `GXVV <geosoft.gxapi.GXVV>`'s for a given `GXIGRF <geosoft.gxapi.GXIGRF>` model. :param gv_vel: Input elevation data (metres) :param gv_vlon: Input longitude data (-180 to 180) :param gv_vlat: Input latitude data (-90 to 90) :param gv_vfs: Output total field :param gv_vinc: Output inclination :param gv_vdec: Output declination :type gv_vel: GXVV :type gv_vlon: GXVV :type gv_vlat: GXVV :type gv_vfs: GXVV :type gv_vinc: GXVV :type gv_vdec: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Calculate `GXIGRF <geosoft.gxapi.GXIGRF>` data (total field, inclination, and declination) for a given `GXIGRF <geosoft.gxapi.GXIGRF>` model. The model used will be the same as that obtained with `create <geosoft.gxapi.GXIGRF.create>`. All of the `GXVV <geosoft.gxapi.GXVV>`'s should be the same length. The function will abort if they are not. No assumption is made on what data types are contained by any of the `GXVV <geosoft.gxapi.GXVV>`'s. However, all total field, inclination, and declination values are internally calculated as real data. These values will be converted to the types contained in the output `GXVV <geosoft.gxapi.GXVV>`'s. """ self._calc_vv(gv_vel, gv_vlon, gv_vlat, gv_vfs, gv_vinc, gv_vdec) @classmethod def create(cls, date, year, filename): """ Create an `GXIGRF <geosoft.gxapi.GXIGRF>`. :param date: Date required :param year: Year of the `GXIGRF <geosoft.gxapi.GXIGRF>` model to use :param filename: Name of the `GXIGRF <geosoft.gxapi.GXIGRF>` reference data file :type date: float :type year: int :type filename: str :returns: `GXIGRF <geosoft.gxapi.GXIGRF>` Object :rtype: GXIGRF .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If the year of the `GXIGRF <geosoft.gxapi.GXIGRF>` model is dummy, then the `GXIGRF <geosoft.gxapi.GXIGRF>` year nearest to the line's date will be used. Otherwise, the specified year is used. """ ret_val = gxapi_cy.WrapIGRF._create(GXContext._get_tls_geo(), date, year, filename.encode()) return GXIGRF(ret_val) @classmethod def date_range(cls, file_name, min, max): """ Determine the range of years covered by an `GXIGRF <geosoft.gxapi.GXIGRF>` or DGRF file :param file_name: Model data file name :param min: Minimum year (`rMAX <geosoft.gxapi.rMAX>` if none found) :param max: Maximum year (`rMIN <geosoft.gxapi.rMIN>` if none found) :type file_name: str :type min: float_ref :type max: float_ref .. versionadded:: 6.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This is useful when using a DGRF file, because the system is set up only to calculate for years within the date range, and will return an error otherwise. """ min.value, max.value = gxapi_cy.WrapIGRF._date_range(GXContext._get_tls_geo(), file_name.encode(), min.value, max.value) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/tests/test_gdb.py import unittest import os import shutil import tempfile import numpy as np from PIL import Image import geosoft import geosoft.gxapi as gxapi import geosoft.gxpy.gx as gx import geosoft.gxpy.utility as gxu import geosoft.gxpy.system as gsys import geosoft.gxpy.gdb as gxdb import geosoft.gxpy.vv as gxvv import geosoft.gxpy.va as gxva import geosoft.gxpy.map as gxmap import geosoft.gxpy.metadata as gxmeta import geosoft.gxpy.geometry as gxgeo from base import GXPYTest class Test(GXPYTest): @classmethod def setUpClass(cls): cls.setUpGXPYTest() cls.folder, files = gsys.unzip(os.path.join(os.path.dirname(cls._test_case_py), 'test_database.zip'), folder=cls._gx.temp_folder()) cls.gdb_name = os.path.join(cls.folder, 'test_database.gdb') def test_gdb(self): self.start() self.assertEqual(gxdb.__version__, geosoft.__version__) def setxyz(xyz): gdb.xyz_channels = xyz with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: try: self.assertRaises(gxdb.GdbException, gdb.is_line, 'nope', True) self.assertRaises(gxdb.GdbException, gdb.is_channel, 'nope', True) self.assertRaises(gxdb.GdbException, setxyz, ('x', 'y', 'crazy_cannot_exist')) self.assertRaises(gxdb.GdbException, setxyz, ('crazy_cannot_exist', 'y')) gdb.xyz_channels = ('x', 'y', 'z') self.assertEqual(gdb.xyz_channels[2], 'z') gdb.xyz_channels = ('x', 'y') self.assertEqual(gdb.xyz_channels[2], 'z') finally: gdb.discard() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: self.assertEqual(len(gdb.file_name), len(self.gdb_name)) with gxdb.Geosoft_gdb.open(os.path.splitext(self.gdb_name)[0]) as gdb: self.assertEqual(len(gdb.file_name), len(self.gdb_name)) gdb.commit() try: l = list(gdb.list_lines()) c = list(gdb.list_channels()) self.assertTrue(gdb.exist_symb_(gxdb.Line(gdb, l[0]), gxapi.DB_SYMB_LINE)) self.assertTrue(gdb.exist_symb_(gxdb.Channel(gdb, c[0]), gxapi.DB_SYMB_CHAN)) self.assertFalse(gdb.exist_symb_(gxdb.Channel(gdb, c[0]), gxapi.DB_SYMB_LINE)) finally: gdb.discard() self.assertFalse(gxdb.is_valid_line_name(123)) with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: self.assertEqual(gdb.max_blobs, 650) self.assertEqual(gdb.max_lines, 500) self.assertEqual(gdb.max_channels, 50) self.assertEqual(gdb.used_blobs, 16) self.assertEqual(gdb.used_lines, 5) self.assertEqual(gdb.used_channels, 8) self.assertEqual(gdb.max_compressed_channel_bytes, 67106816) self.assertEqual(gdb.number_of_blocks, 553) self.assertEqual(gdb.lost_blocks, 0) self.assertEqual(gdb.free_blocks, 24) self.assertEqual(gdb.compression, 0) self.assertEqual(gdb.pages_for_blobs, 35) self.assertEqual(gdb.db_size_kb, 915) self.assertEqual(gdb.index_size_kb, 303) self.assertEqual(gdb.max_block_size_bytes, 67106792) self.assertEqual(gdb.data_has_changed, 0) with gxdb.Geosoft_gdb.open(os.path.splitext(self.gdb_name)[0]) as gdb: try: gxdb.Channel.new(gdb, 'empty') data = gdb.read_line('D2','empty')[0] self.assertEqual(len(data), 0) data = gdb.read_line('D2', ('x', 'y', 'empty'))[0] self.assertEqual(len(data), 832) self.assertFalse(np.isfinite(data[:,2]).any()) finally: gdb.discard() def test_noprops_GDB(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: self.assertEqual(len(gdb.file_name), len(self.gdb_name)) self.assertTrue(len(gdb.list_channels()) >= 6) self.assertTrue('X' in gdb.list_channels()) self.assertTrue('dx' in gdb.list_channels(chan=gxdb.CHAN_ALL)) self.assertTrue('vector' in gdb.list_channels(chan=gxdb.CHAN_ARRAY)) self.assertFalse('vector' in gdb.list_channels(chan=gxdb.CHAN_NORMAL)) self.assertEqual(gdb.channel_width('vector'),3) self.assertEqual(gdb.channel_width('x'),1) gdb.discard() def test_empty(self): self.start() name = 'empty' try: with gxdb.Geosoft_gdb.new(name, overwrite=True) as gdb: name = gdb.file_name self.assertEqual(len(gdb.list_lines()), 0) gdb.new_line('some_line') gdb.new_line('_some_line') lines = gdb.list_lines() self.assertEqual(len(lines), 2) self.assertTrue('Some_line' in lines) # note leading 's' was interpreted as line type 'S' self.assertTrue('D_some_line' in lines) npd, ch, fid = gdb.read_line(list(lines)[0]) self.assertEqual(npd.size, 0) self.assertEqual(len(ch), 0) gdb.new_channel('one') npd, ch, fid = gdb.read_line(list(lines)[0]) self.assertEqual(npd.shape, (0, 1)) self.assertEqual(len(ch), 1) self.assertEqual(fid, (0.0, 1.0)) gdb.new_channel('two') npd, ch, fid = gdb.read_line(list(lines)[0]) self.assertEqual(npd.shape, (0, 2)) self.assertEqual(len(ch), 2) self.assertEqual(fid, (0.0, 1.0)) ch = gdb.new_channel('three') line = list(lines)[0] gdb.write_channel(line, ch, [1, 2, 3, 4, 5]) npd, ch, fid = gdb.read_line(list(lines)[0]) self.assertEqual(npd.shape, (5, 3)) self.assertEqual(len(ch), 3) self.assertEqual(fid, (0.0, 1.0)) ch = gdb.new_channel('four', dtype=np.int) line = list(lines)[0] gdb.write_channel(line, ch, [10, 20, 30, 40], fid=(-1.5, 2)) npd, ch, fid = gdb.read_line(line) self.assertEqual(npd.shape, (7, 4)) self.assertEqual(len(ch), 4) self.assertEqual(fid, (-1.5, 1.0)) self.assertEqual(npd[0][0], 10.) self.assertEqual(npd[2][2], 1.5) self.assertEqual(npd[5][2], 4.5) self.assertTrue(np.isnan(npd[0][1])) self.assertTrue(np.isnan(npd[1][2])) self.assertTrue(np.isnan(npd[6][2])) npd, ch, fid = gdb.read_line(line, 'four', dtype=np.int) self.assertEqual(npd.shape, (4, 1)) self.assertEqual(len(ch), 1) self.assertEqual(fid, (-1.5, 2.0)) self.assertEqual(npd[0][0], 10) self.assertEqual(npd[3][0], 40) npd, ch, fid = gdb.read_line(line, 'four', fid=(-0.13333, 0.000666)) self.assertEqual(npd.shape, (6958, 1)) self.assertEqual(len(ch), 1) self.assertEqual(fid, (-0.13333, 0.000666)) self.assertAlmostEqual(npd[0][0], 16.83335) self.assertAlmostEqual(npd[1000][0], 20.16335) finally: gxdb.delete_files(name) def test_read_write_channel_vv_va(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: gdb.delete_channel('test_chan_vv') vv = gxvv.GXvv(np.array([1.,2.,3.]), fid=(-10, 2.5)) gdb.write_channel_vv('T46', 'test_chan_vv', vv) vv = gdb.read_channel_vv('T46', 'test_chan_vv') self.assertEqual(vv.length, 3) self.assertEqual(vv.fid, (-10.0, 2.5)) self.assertRaises(gxva.VAException, gdb.read_channel_va, 'T46', 'test_chan_vv') gdb.delete_channel('test_chan_vv') va = gxva.GXva(np.array([[1., 2., 3.],[8,9,10]]), fid=(-10, 2.5)) gdb.write_channel_va('T46', 'test_chan_va', va) self.assertRaises(gxdb.GdbException, gdb.read_channel_vv, 'T46', 'test_chan_va') va = gdb.read_channel_va('T46', 'test_chan_va') self.assertEqual(va.width, 3) self.assertEqual(va.length, 2) self.assertEqual(va.fid, (-10.0, 2.5)) gdb.delete_channel('test_chan_va') def test_group_VA_read_write(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: self.assertEqual(len(gdb.file_name), len(self.gdb_name)) data, ch, fid = gdb.read_line('D578625') self.assertEqual(data.shape, (832, 10)) gdb.write_line('T45', data, fid=(99, 0.5)) data, ch, fid = gdb.read_line('T45') self.assertEqual(data.shape, (832, 10)) self.assertEqual(len(ch), 10) self.assertEqual(ch[0], 'X') self.assertEqual(fid, (99.0, 0.5)) gdb.write_channel('T46', 'wva', data, fid=(-10, 2.5)) data, fid = gdb.read_channel('T46', 'wva') self.assertEqual(data.shape, (832, 10)) self.assertEqual(fid, (-10.0, 2.5)) gdb.delete_channel('wva') gdb.write_line('T46', data, channels='wideva', fid=(-10, 2.5)) data, ch, fid = gdb.read_line('T46', 'wideva') self.assertEqual(data.shape, (832, 10)) self.assertEqual(len(ch), 10) self.assertEqual(ch[0], 'wideva[0]') self.assertEqual(fid, (-10.0, 2.5)) data, ch, fid = gdb.read_line('T46') self.assertEqual(data.shape, (832, 20)) self.assertEqual(len(ch), 20) self.assertEqual(ch[0], 'X') self.assertEqual(fid, (-10.0, 2.5)) data, fid = gdb.read_channel('T46', 'wideva') self.assertEqual(data.shape, (832, 10)) self.assertEqual(fid, (-10.0, 2.5)) gdb.discard() def test_create_del_GDB(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: gdb.delete_channel('ian') gdb.new_channel('ian') self.assertTrue('ian' in gdb.list_channels()) gdb.delete_channel('ian') self.assertFalse('ian' in gdb.list_channels()) gdb.delete_channel('ian2') gdb.new_channel('ian2', np.int32, array=3) self.assertTrue('ian2' in gdb.list_channels(chan=gxdb.CHAN_ARRAY)) gdb.delete_channel('ian2') self.assertFalse('ian2' in gdb.list_channels()) def test_properties_GDB(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: ch = gdb.list_channels() self.assertTrue('X' in ch) self.assertTrue('vector' in ch) self.assertEqual(ch.get('dx'),1153) try: gdb.line_name_symb(8456712552) self.assertTrue(False) except: pass ln,ls = gdb.line_name_symb('bogus',create=True) self.assertEqual(ln,'bogus') gdb.delete_line('bogus') ls = gdb.line_name_symb('bogus2',create=True)[1] gdb.delete_line(ls) ln,ls = gdb.line_name_symb('D578625') self.assertEqual(ln,'D578625') ln,ls = gdb.line_name_symb('Dwonk') self.assertEqual(ln,'Dwonk') ln,ls = gdb.line_name_symb(ls) self.assertEqual(ln,'Dwonk') gdb.delete_channel('ccva') gdb.new_channel('ccva',array=8) cn,cs = gdb.channel_name_symb('ccva') self.assertEqual(cn,'ccva') cn,cs = gdb.channel_name_symb('ccva[4]') self.assertEqual(cn,'ccva[4]') gdb.discard() cs = gdb.new_channel('cava',dtype=np.int64) self.assertTrue(gdb.channel_dtype(cs).type is np.int64) gdb.select_lines(select=False) ln = gdb.list_lines() self.assertEqual(len(ln),0) gdb.select_lines('bogus') ln = gdb.list_lines() self.assertEqual(len(ln), 0) gdb.select_lines('D2') ln = gdb.list_lines() self.assertEqual(len(ln), 1) gdb.select_lines('D') ln = gdb.list_lines() self.assertEqual(len(ln), 3) self.assertTrue('D2' in ln) self.assertTrue('Dwonk' in ln) self.assertTrue('D578625' in ln) gdb.select_lines('D578625',select=False) ln = gdb.list_lines() self.assertFalse('D578625' in ln) self.assertEqual(len(ln), 2) gdb.discard() def test_read_GDB(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: npd,ch,fid = gdb.read_line('D578625') self.assertEqual(npd.shape[0],832) self.assertEqual(fid[0],0.0) self.assertEqual(fid[1],1.0) ln,ls = gdb.line_name_symb('D578625') npd,ch,fid = gdb.read_line(ls,channels=['X','Y','Z','dx','dy']) self.assertEqual(npd.shape,(832,5)) self.assertEqual(npd[10,:3].tolist(),[578625.0, 7773625.0, -1195.7531280517615]) npd,ch,fid = gdb.read_line(ls,'X') self.assertEqual(npd.shape,(832,1)) self.assertEqual(npd[10],578625.0) npd,ch,fid = gdb.read_line(ls,channels=['X','Y','Z'], dtype='<U32') self.assertEqual(npd.shape,(832,3)) self.assertEqual(npd[10,:3].tolist(),['578625.0', '7773625.0', '-1195.8']) gdb.discard() def test_read_line_dataframe(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: df,ch,fid = gdb.read_line_dataframe('D578625') self.assertEqual(df.shape, (832, 10)) self.assertEqual(fid[0],0.0) self.assertEqual(fid[1],1.0) ln, ls = gdb.line_name_symb('D578625') df,ch,fid = gdb.read_line_dataframe(ls,channels=['X','Y','Z']) self.assertEqual(df.shape, (832, 3)) self.assertEqual(df.values[10, :3].tolist(), [578625.0, 7773625.0, -1195.7531280517615]) gdb.discard() def test_read_vv_GDB(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: data = gdb.read_line_vv('D578625') self.assertEqual(len(data), 10) for chvv in data: vv = chvv[1] fid = vv.fid self.assertEqual(vv.length, 832) self.assertEqual(fid[0], 0.0) self.assertEqual(fid[1], 1.0) data = gdb.read_line_vv('D578625', common_fid=True) self.assertEqual(len(data), 10) for chvv in data: vv = chvv[1] fid = vv.fid self.assertEqual(vv.dtype, np.float64) self.assertEqual(vv.length, 832) self.assertEqual(fid[0], 0.0) self.assertEqual(fid[1], 1.0) data = gdb.read_line_vv('D578625', common_fid=True, chan_dtypes=True) self.assertEqual(len(data), 10) for i, chvv in enumerate(data): vv = chvv[1] fid = vv.fid if i == 5: self.assertEqual(vv.dtype, np.int16) elif i == 6: self.assertEqual(vv.dtype, np.dtype('<U64')) else: self.assertEqual(vv.dtype, np.float64) self.assertEqual(vv.length, 832) self.assertEqual(fid[0], 0.0) self.assertEqual(fid[1], 1.0) data = gdb.read_line_vv('D578625', common_fid=True, fid=(0.1,4.8)) for chvv in data: self.assertEqual(len(data), 10) vv = chvv[1] fid = vv.fid self.assertEqual(vv.length, 175) self.assertEqual(fid[0], 0.1) self.assertEqual(fid[1], 4.8) ln,ls = gdb.line_name_symb('D578625') data = gdb.read_line_vv(ls,channels=['X','Y','Z','dx','dy']) self.assertEqual(len(data), 5) self.assertEqual(data[0][0], 'X') self.assertEqual(data[4][0], 'dy') npd = data[0][1].get_data()[0] self.assertEqual(npd[10], 578625.0) npd = data[1][1].get_data()[0] self.assertEqual(npd[10], 7773625.0) npd = data[2][1].get_data()[0] self.assertEqual(npd[10], -1195.7531280517615) data = gdb.read_line_vv(ls, 'X') self.assertEqual(data[0][0], 'X') npd = data[0][1].get_data()[0] self.assertEqual(npd[10],578625.0) data = gdb.read_line_vv(ls,channels=['X','Y','Z'], dtype='<U32') npd = data[0][1].get_data()[0] self.assertEqual(npd[10], '578625.0') npd = data[1][1].get_data()[0] self.assertEqual(npd[10], '7773625.0') npd = data[2][1].get_data()[0] self.assertEqual(npd[10], '-1195.8') gdb.discard() def test_read_masked_GDB(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: try: npd,ch,fid = gdb.read_line('D2', dummy=gxdb.READ_REMOVE_DUMMYROWS) self.assertEqual(npd.shape, (825, 10)) self.assertEqual(npd.shape[1], 10) self.assertEqual(npd.shape[1], len(ch)) npd,ch,fid = gdb.read_line('D2',dummy=gxdb.READ_REMOVE_DUMMYCOLUMNS) self.assertEqual(npd.shape, (832,2)) self.assertEqual(npd.shape[1], len(ch)) npd,ch,fid = gdb.read_line('D2', channels=('x','y'), dummy=gxdb.READ_REMOVE_DUMMYCOLUMNS) self.assertEqual(npd.shape, (832,1)) self.assertEqual(npd.shape[1], len(ch)) px = geosoft.gxpy.geometry.Point2(gdb.extent_xyz) self.assertEqual(str(px), '_point2_[(578625.0, 7773625.0, -5261.5553894043005) (578625.0, 7782875.0, 1062.4999999999964)]') finally: gdb.discard() def test_extent(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: try: l = gdb.list_lines() gdb.select_lines() self.assertEqual(len(gdb.list_lines()), 5) gdb.select_lines(select=False) self.assertEqual(len(gdb.list_lines()), 0) gdb.select_lines('D2') self.assertEqual(len(gdb.list_lines()), 1) self.assertFalse(gdb.is_line('D0')) self.assertEqual(len(gdb.list_lines(select=False)), 5) self.assertTrue(gdb.is_line('Dwonk')) dy,_ = gdb.read_channel('D2', 'y') dy[:] = np.nan gdb.write_channel('D2', 'y', dy) px = gxgeo.Point2(gdb.extent_xyz) self.assertEqual(str(px), '_point2_[(578625.0, nan, -5261.5553894043005) (578625.0, nan, 1062.4999999999964)]') px2 = gdb.extent self.assertEqual(str(px2), '_point2_[(578625.0, nan, -5261.5553894043005) (578625.0, nan, 1062.4999999999964)]') self.assertEqual(px.coordinate_system, px2.coordinate_system) finally: gdb.discard() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: try: gdb.select_lines(select=False) gdb.select_lines('D2') dx,_ = gdb.read_channel('D2', 'x') dx [:] = np.nan gdb.write_channel('D2', 'x', dx) px = geosoft.gxpy.geometry.Point2(gdb.extent_xyz) self.assertEqual(str(px), '_point2_[(nan, 7773625.0, -5261.5553894043005) (nan, 7782875.0, 1062.4999999999964)]') finally: gdb.discard() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: try: gdb.select_lines(select=False) gdb.select_lines('D2') dx, _ = gdb.read_channel('D2', 'x') dx[:] = np.nan dx[1] = 1 dx[2] = 2 gdb.write_channel('D2', 'x', dx) px = geosoft.gxpy.geometry.Point2(gdb.extent_xyz) self.assertEqual(str(px), '_point2_[(1.0, 7773625.0, -5261.5553894043005) (2.0, 7782875.0, 1062.4999999999964)]') finally: gdb.discard() def test_write_vv_GDB(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: gdb.delete_channel('test') gdb.new_channel('test') vv = gxvv.GXvv(np.array([1.0,2.0,3.0,4.0])) gdb.write_channel_vv('D590875', 'test', vv) npd, ch, fid = gdb.read_line('D590875', channels=['test']) self.assertEqual(gdb.channel_fid('D590875', 'test'), fid) self.assertEqual(npd.shape,(4,1)) self.assertEqual(npd[:,0].tolist(),[1.0,2.0,3.0,4.0]) gdb.delete_channel('test') gdb.new_channel('test', np.float64, details={'unit': 'bubba'}) self.assertEqual(gxdb.Channel(gdb, 'test').unit, 'bubba') vv = gxvv.GXvv(dtype=np.float64) vv.set_data(np.array([1,2,3,4], dtype=np.int64)) gdb.write_channel_vv('D590875', 'test', vv) npd,ch,fid = gdb.read_line('D590875', channels=['test'], dtype=np.int) self.assertEqual(npd.shape,(4,1)) self.assertEqual(npd[:,0].tolist(),[1,2,3,4]) gdb.delete_channel('test') gdb.new_channel('test', np.int32) gdb.write_channel_vv('D590875', 'test', vv) npd,ch,fid = gdb.read_line('D590875', 'test') self.assertEqual(npd.shape,(4,1)) self.assertEqual(npd[:,0].tolist(),[1.0,2.0,3.0,4.0]) gdb.delete_channel('test') gdb.new_channel('test', dtype=np.int32) vv.fid = (3,2) gdb.write_channel_vv('D590875', 'test', vv) npd,ch,fid = gdb.read_line('D590875', 'test', dtype=int) self.assertEqual(npd.shape,(4,1)) self.assertEqual(npd[:,0].tolist(),[1,2,3,4]) self.assertEqual(fid[0],3.0) self.assertEqual(fid[1],2.0) gdb.new_channel('test', np.int32) vv = gxvv.GXvv(np.array([1, 2, 3, 4], dtype=np.int32), fid=(2.5,0.33)) gdb.write_channel_vv('D590875', 'test', vv) npd,ch,fid = gdb.read_line('D590875', channels=['test']) self.assertEqual(npd.shape,(4,1)) self.assertEqual(npd[:,0].tolist(),[1,2,3,4]) self.assertEqual(fid[0], 2.5) self.assertEqual(fid[1], 0.33) gdb.discard() def test_write_vv_GDB(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: gdb.delete_channel('test') gdb.new_channel('test') vv = gxvv.GXvv(np.array([1.0,2.0,3.0,4.0])) gdb.write_channel_vv('D590875', 'test', vv) npd, ch, fid = gdb.read_line('D590875', channels=['test']) self.assertEqual(gdb.channel_fid('D590875', 'test'), fid) self.assertEqual(npd.shape,(4,1)) self.assertEqual(npd[:,0].tolist(),[1.0,2.0,3.0,4.0]) gdb.delete_channel('test') gdb.new_channel('test', np.float64, details={'unit': 'bubba'}) self.assertEqual(gxdb.Channel(gdb, 'test').unit_of_measure, 'bubba') vv = gxvv.GXvv(dtype=np.float64) vv.set_data(np.array([1,2,3,4], dtype=np.int64)) gdb.write_channel_vv('D590875', 'test', vv) npd,ch,fid = gdb.read_line('D590875', channels=['test'], dtype=np.int) self.assertEqual(npd.shape,(4,1)) self.assertEqual(npd[:,0].tolist(),[1,2,3,4]) gdb.delete_channel('test') gdb.new_channel('test', np.int32) gdb.write_channel_vv('D590875', 'test', vv) npd,ch,fid = gdb.read_line('D590875', 'test') self.assertEqual(npd.shape,(4,1)) self.assertEqual(npd[:,0].tolist(),[1.0,2.0,3.0,4.0]) gdb.delete_channel('test') gdb.new_channel('test', dtype=np.int32) vv.fid = (3,2) gdb.write_channel_vv('D590875', 'test', vv) npd,ch,fid = gdb.read_line('D590875', 'test', dtype=int) self.assertEqual(npd.shape,(4,1)) self.assertEqual(npd[:,0].tolist(),[1,2,3,4]) self.assertEqual(fid[0],3.0) self.assertEqual(fid[1],2.0) gdb.new_channel('test', np.int32) vv = gxvv.GXvv(np.array([1, 2, 3, 4], dtype=np.int32), fid=(2.5,0.33)) gdb.write_channel_vv('D590875', 'test', vv) npd,ch,fid = gdb.read_line('D590875', channels=['test']) self.assertEqual(npd.shape,(4,1)) self.assertEqual(npd[:,0].tolist(),[1,2,3,4]) self.assertEqual(fid[0], 2.5) self.assertEqual(fid[1], 0.33) gdb.discard() def test_write_GDB(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: gdb.delete_channel('test') gdb.new_channel('test') gdb.write_channel('D590875','test',np.array([1.0,2.0,3.0,4.0])) npd, ch, fid = gdb.read_line('D590875', channels=['test']) self.assertEqual(npd.shape,(4,1)) self.assertEqual(npd[:,0].tolist(),[1.0,2.0,3.0,4.0]) gdb.delete_channel('test') gdb.new_channel('test', np.float64) gdb.write_channel('D590875','test',np.array([1,2,3,4],dtype=np.int)) npd,ch,fid = gdb.read_line('D590875',channels=['test'],dtype=np.int) self.assertEqual(npd.shape,(4,1)) self.assertEqual(npd[:,0].tolist(),[1,2,3,4]) gdb.delete_channel('test') gdb.new_channel('test', np.int32) gdb.write_channel('D590875','test',np.array([1,2,3,4],dtype=np.int)) npd,ch,fid = gdb.read_line('D590875',channels=['test']) self.assertEqual(npd.shape,(4,1)) self.assertEqual(npd[:,0].tolist(),[1.0,2.0,3.0,4.0]) gdb.delete_channel('test') gdb.new_channel('test', dtype=np.int32) gdb.write_channel('D590875','test',np.array([1,2,3,4],dtype=np.int),fid=(3,2)) npd,ch,fid = gdb.read_line('D590875',channels=['test']) self.assertEqual(npd.shape,(4,1)) self.assertEqual(npd[:,0].tolist(),[1.0,2.0,3.0,4.0]) self.assertEqual(fid[0],3.0) self.assertEqual(fid[1],2.0) gdb.new_channel('test', np.int32) gdb.write_channel('D590875', 'test', np.array([1,2,3,4], dtype=np.int), fid=(2.50,0.33)) npd,ch,fid = gdb.read_line('D590875', channels=['test']) self.assertEqual(npd.shape,(4,1)) self.assertEqual(npd[:,0].tolist(),[1.0,2.0,3.0,4.0]) self.assertEqual(fid[0], 2.5) self.assertEqual(fid[1], 0.33) gdb.discard() def test_write_VA_GDB(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: gdb.delete_channel('testVA') gdb.new_channel('testVA') try: gdb.write_channel('D590875', 'testVA', np.array([[1.0, 2.0, 3.0, 4.0], [10.0, 20.0, 30.0, 40.0], [15.0, 25.0, 35.0, 45.0]])) self.assertTrue(False) except gxdb.GdbException: pass gdb.delete_channel('testVA') gdb.write_channel('D590875', 'testVA', np.array([[1.0, 2.0, 3.0, 4.0], [10.0, 20.0, 30.0, 40.0], [15.0, 25.0, 35.0, 45.0]])) npd,ch,fid = gdb.read_line('D590875', channels=['testVA']) self.assertEqual(npd.shape,(3, 4)) self.assertEqual(npd[0, :].tolist(), [1.0, 2.0, 3.0, 4.0]) self.assertEqual(npd[1, :].tolist(), [10.0, 20.0, 30.0, 40.0]) self.assertEqual(npd[2, :].tolist(), [15.0, 25.0, 35.0, 45.0]) gdb.discard() def test_dummy_GDB(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: gdb.delete_channel('test') gdb.new_channel('test',dtype=np.int) dummy = gxu.gx_dummy(np.int) gdb.write_channel('D590875', 'test', np.array([1, 2, dummy, 4])) npd, ch, fid = gdb.read_line('D590875', channels=['test'], dtype=np.int) self.assertEqual(npd.shape,(4,1)) self.assertEqual(npd[:,0].tolist(),[1,2,dummy,4]) dm = gxu.dummy_mask(npd) self.assertEqual(dm.tolist(),[False,False,True,False]) gdb.discard() def test_newline_vv_GDB(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: data = gdb.read_line_vv('D578625',channels=['dx','dy','vector']) ch = [c[0] for c in data] datalen = data[0][1].length gdb.delete_line('testline') gdb.new_line('testline') gdb.write_line_vv('testline', data) npd2,ch2,fid2 = gdb.read_line('testline', channels=ch) self.assertEqual(npd2.shape, (datalen, len(data))) gdb.delete_line('testline') gdb.new_line('testline', gxdb.SYMB_LINE_NORMAL) gdb.write_line_vv('testline', (("single", data[0][1]),)) npd2,ch2,fid2 = gdb.read_line('testline',"single") self.assertEqual(npd2.shape,(datalen, 1)) gdb.delete_line('testline') gdb.new_line('testline', gxdb.SYMB_LINE_GROUP) gdb.write_line_vv('testline', [("single", data[0][1])]) npd2,ch2,fid2 = gdb.read_line('testline',"single") self.assertEqual(npd2.shape, (datalen, 1)) gdb.discard() def test_newline_GDB(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: npd, ch, fid = gdb.read_line('D578625', channels=['dx','dy','vector']) try: gdb.new_line("&$#@**") self.assertTrue(False) except gxdb.GdbException: pass try: gdb.new_line("D578625") self.assertTrue(False) except gxdb.GdbException: pass gdb.delete_line('wonk') gdb.new_line('wonk',group="wink") try: gdb.new_line('wonk') self.assertTrue(False) except gxdb.GdbException: pass gdb.delete_line('wonk') gdb.delete_line('testline') gdb.new_line('testline') gdb.write_line('testline', npd, channels=ch) npd2,ch2,fid2 = gdb.read_line('testline', channels=ch) self.assertEqual(npd.shape,npd2.shape) gdb.delete_line('testline') gdb.new_line('testline',gxdb.SYMB_LINE_NORMAL) gdb.write_line('testline',npd[:,0],"single") npd2,ch2,fid2 = gdb.read_line('testline',"single") self.assertEqual(npd2.shape,(npd.shape[0],1)) gdb.delete_line('testline') gdb.new_line('testline',gxdb.SYMB_LINE_GROUP) gdb.write_line('testline',npd[:,0],"single") npd2,ch2,fid2 = gdb.read_line('testline',"single") self.assertEqual(npd2.shape,(npd.shape[0],1)) gdb.delete_line('testline') gdb.new_line('testline',linetype=gxdb.SYMB_LINE_FLIGHT) ch = ['a','b','c','d'] try: gdb.write_line('testline', npd, channels=ch) self.assertTrue(False) except gxdb.GdbException: pass ch = ['a','b','c','d','e'] self.assertRaises(gxdb.GdbException, gdb.write_line, 'testline', npd, ['xx', 'yy']) gdb.write_line('testline', npd, channels=ch) npd2, ch2, fid2 = gdb.read_line('testline',channels=ch) self.assertEqual(npd.shape,npd2.shape) self.assertEqual(ch2, ch) gdb.delete_channel(ch) gdb.delete_line('testline') gdb.new_line('testline') gdb.delete_channel("bopper") gdb.write_line('testline',npd,channels="bopper") npd2,ch2,fid2 = gdb.read_line('testline',"bopper") self.assertEqual(npd.shape,npd2.shape) self.assertEqual(ch2[0],"bopper[0]") self.assertEqual(ch2[4],"bopper[4]") gdb.discard() def test_list_values_GDB(self): self.start() self.nl = 0 self.stp = 100 def enough(): self.nl += 1 if self.nl >= self.stp: return True else: return False with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: try: gdb.select_lines(select=False) gdb.select_lines('D578625,D2',select=True) self.nl = 0 self.stp = 2 gdb.delete_channel('testlist') gdb.new_channel('testlist',dtype=np.int) gdb.write_channel('D578625', 'testlist', np.array([1,2,3,4,4,4,5,6,7,7,7,6,5,4], dtype=np.int)) gdb.write_channel('D2', 'testlist', np.array([12,12,12,13,13,13], dtype=np.int)) listVal = gdb.list_values('testlist', umax=100, stop=enough) listVal.sort() self.assertEqual(listVal, ['1','12','13','2','3','4','5','6','7']) self.nl = 0 self.stp = 1 listVal = gdb.list_values('dx', umax=10000) self.assertEqual(len(listVal),29) listVal = gdb.list_values('dx') self.assertEqual(len(listVal),29) finally: gdb.discard() def test_new(self): self.start() gdb_file = os.path.join(self.folder, 'new.gdb') try: with gxdb.Geosoft_gdb.new(gdb_file, overwrite=True) as gdb: # read an image and put it in a new database with open(os.path.join(self.folder, 'image.png'), 'rb') as im_handle: im = Image.open(im_handle) im.thumbnail( (20,20), Image.ANTIALIAS) imageIn = np.asarray(im,dtype=np.float32) gdb.new_channel('R',dtype=np.int) gdb.new_channel('G',dtype=np.int) gdb.new_channel('B', dtype=np.int) gdb.new_channel('A', dtype=np.int) for l in range(imageIn.shape[0]): gdb.write_line('L{}'.format(l), imageIn[l,:,:], channels=['R','G','B','A']) self.assertEqual(len(gdb.list_lines()),imageIn.shape[0]) self.assertEqual(len(gdb.list_channels()),4) d,c,f = gdb.read_line('L5') self.assertEqual(d.shape[0],imageIn.shape[1]) self.assertEqual(d.shape[1],imageIn.shape[2]) self.assertRaises(gxdb.GdbException, gxdb.Geosoft_gdb.new, gdb_file) with gxdb.Geosoft_gdb.new(gdb_file, overwrite=True) as gdb: self.assertEqual(gdb.max_compressed_channel_bytes, 67106816) with gxdb.Geosoft_gdb.new(gdb_file, overwrite=True, page_size=0) as gdb: self.assertEqual(gdb.max_compressed_channel_bytes, 4194176) with gxdb.Geosoft_gdb.new(gdb_file, overwrite=True, page_size=4096) as gdb: self.assertEqual(gdb.max_compressed_channel_bytes, 268427264) try: with gxdb.Geosoft_gdb.new(gdb_file, overwrite=True, page_size=5000) as gdb: self.assertTrue(False) # this should have failed except: pass finally: gxdb.delete_files(gdb_file) def test_details(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: try: cs = gdb.new_channel("detailtest") det = gdb.channel_details(cs) self.assertEqual(det.get('name'),"detailtest") self.assertEqual(det.get('array'),1) self.assertEqual(det.get('decimal'),2) self.assertEqual(det.get('format'),0) self.assertEqual(det.get('label'),'detailtest') self.assertEqual(det.get('protect'),0) self.assertEqual(det.get('symbol'),cs) self.assertEqual(det.get('type'),gxapi.GS_DOUBLE) self.assertEqual(det.get('unit'),'') self.assertEqual(det.get('width'),12) gdb.set_channel_details(cs,{'protect':1, 'decimal':6, 'unit':'ft'}) det2 = gdb.channel_details(cs) self.assertEqual(det2.get('protect'),1) self.assertEqual(det2.get('decimal'),6) self.assertEqual(det2.get('unit'),'ft') gdb.set_channel_details(cs,det) det2 = gdb.channel_details(cs) self.assertEqual(det2.get('protect'),0) self.assertEqual(det2.get('decimal'),2) self.assertEqual(det2.get('unit'),'') det = gdb.line_details('D578625') self.assertEqual(det.get('category'),gxdb.SYMB_LINE_NORMAL) self.assertEqual(det.get('number'),578625) self.assertEqual(det.get('name'),'D578625') self.assertEqual(det.get('flight'),0) self.assertEqual(det.get('version'),0) self.assertEqual(det.get('type'),gxapi.DB_LINE_TYPE_RANDOM) self.assertEqual(det.get('groupclass'), None) gdb.delete_line('testgroup') ls = gdb.new_line('testgroup', group="TeSt") det = gdb.line_details(ls) self.assertEqual(det.get('category'),gxdb.SYMB_LINE_GROUP) self.assertEqual(det.get('name'),'testgroup') self.assertEqual(det.get('symbol'),ls) self.assertEqual(det.get('groupclass'),'TeSt') gdb.delete_line('testgroup') finally: gdb.discard() def test_channel(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: try: gdb.delete_channel("detailtest") ch = gxdb.Channel.new(gdb, "detailtest") name, symb = gdb.channel_name_symb(ch) self.assertEqual(ch.name, name) self.assertEqual(ch.symbol, symb) det = gdb.channel_details(ch.name) self.assertEqual(ch.name, det['name']) self.assertEqual(ch.array, det['array']) self.assertEqual(ch.decimal, det['decimal']) self.assertEqual(ch.format, det['format']) self.assertEqual(ch.label, det['label']) self.assertEqual(ch.protect, det['protect']) self.assertEqual(ch.symbol, det['symbol']) self.assertEqual(ch.type, det['type']) self.assertEqual(ch.unit_of_measure, det['unit']) self.assertEqual(ch.width, det['width']) self.assertEqual(ch.class_, det['class']) ch.protect = 1 ch.decimal = 6 ch.width = 10 ch.unit_of_measure = 'nT' ch.label = 'weirdo' ch.format = gxapi.DB_CHAN_FORMAT_GEOGR ch.class_ = 'geochem' self.assertEqual(ch.protect, True) self.assertEqual(ch.decimal, 6) self.assertEqual(ch.unit_of_measure, 'nT') self.assertEqual(ch.label, 'weirdo') self.assertEqual(ch.format, 4) self.assertEqual(ch.class_, 'geochem') ch.name = "new_name" self.assertEqual(ch.name, "new_name") try: ch.name = list(gdb.list_channels())[0] self.assertTrue(False) except: pass try: ch.name = 45 self.assertTrue(False) except: pass self.assertFalse(ch.locked) ch.lock = gxdb.SYMBOL_LOCK_READ self.assertTrue(ch.locked) self.assertEqual(ch.lock, gxdb.SYMBOL_LOCK_READ) ch.lock = gxdb.SYMBOL_LOCK_WRITE self.assertTrue(ch.locked) self.assertEqual(ch.lock, gxdb.SYMBOL_LOCK_WRITE) ch.locked = False self.assertRaises(gxdb.GdbException, gdb.delete_channel, ch.name) self.assertRaises(gxdb.GdbException, ch.delete) ch.protect = False ch.delete() self.assertEqual(ch.symbol, gxapi.NULLSYMB) finally: gdb.discard() def test_line(self): self.start() tmpgdb = tempfile.NamedTemporaryFile(mode='w', suffix='.gdb', delete=False) tmpgdb.close() shutil.copyfile(self.gdb_name, tmpgdb.name) try: with gxdb.Geosoft_gdb.open(tmpgdb.name) as gdb: gdb.delete_line("T9999") ln = gxdb.Line.new(gdb, "T9999") det = gdb.line_details(ln.name) self.assertEqual(ln.name, det['name']) self.assertEqual(ln.category, det['category']) self.assertEqual(ln.date, det['date']) self.assertEqual(ln.flight, det['flight']) self.assertEqual(ln.number, det['number']) self.assertEqual(ln.type, det['type']) self.assertEqual(ln.version, det['version']) self.assertEqual(ln.group, det['groupclass']) self.assertTrue(ln.selected) ln.date = 2017 self.assertEqual(ln.date, 2017) ln.selected = False self.assertFalse(ln.selected) ln.selected = True self.assertTrue(ln.selected) ln.number = 88.9 self.assertEqual(ln.number, 88) ln.number = -88.9 self.assertEqual(ln.number, -88) ln.type = gxdb.LINE_TYPE_NORMAL self.assertEqual(ln.type, 0) ln.type = gxdb.LINE_TYPE_BASE self.assertEqual(ln.type, 1) ln.type = gxdb.LINE_TYPE_TIE self.assertEqual(ln.type, 2) ln.type = gxdb.LINE_TYPE_TEST self.assertEqual(ln.type, 3) ln.type = gxdb.LINE_TYPE_TREND self.assertEqual(ln.type, 4) ln.type = gxdb.LINE_TYPE_SPECIAL self.assertEqual(ln.type, 5) ln.type = gxdb.LINE_TYPE_RANDOM self.assertEqual(ln.type, 6) ln.version = 7 self.assertEqual(ln.version, 7) ln.flight = 1000 self.assertEqual(ln.flight, 1000) self.assertFalse(ln.grouped) try: ln.group = 'billy' self.assertTrue(False) except gxdb.GdbException: pass self.assertEqual(ln.lock, gxdb.SYMBOL_LOCK_NONE) self.assertFalse(ln.locked) ln.lock = gxdb.SYMBOL_LOCK_READ self.assertTrue(ln.locked) self.assertEqual(ln.lock, gxdb.SYMBOL_LOCK_READ) ln.lock = gxdb.SYMBOL_LOCK_WRITE self.assertTrue(ln.locked) self.assertEqual(ln.lock, gxdb.SYMBOL_LOCK_WRITE) ln.locked = False ln.delete() self.assertEqual(ln.symbol, gxapi.NULLSYMB) gdb.delete_line("L88") ln = gxdb.Line.new(gdb, "L88", group='john') self.assertTrue(ln.grouped) self.assertEqual(ln.group, 'john') ln.group = 'billy' self.assertEqual(ln.group, 'billy') gdb.delete_line(ln.name) # Correct empty line handling gdb.delete_line_data('D578625') data, _, fid = gdb.read_line('D578625') self.assertEqual((0.0, 1.0), fid) self.assertEqual((0, 10), data.shape) df, _, fid = gdb.read_line_dataframe('D578625') self.assertEqual((0, 10), df.shape) self.assertEqual((0.0, 1.0), fid) # Correct single row line handling gdb.write_line('D578625', np.zeros((1, 10), dtype=np.float64)) data, _, fid = gdb.read_line('D578625') self.assertEqual((0.0, 1.0), fid) self.assertEqual((1, 10), data.shape) self.npAssertEqual([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], data) df, _, fid = gdb.read_line_dataframe('D578625') self.assertEqual((1, 10), df.shape) self.assertEqual((0.0, 1.0), fid) self.npAssertEqual(np.array([[0, 0, 0, 0, 0, 0, '0', 0, 0, 0]], dtype=np.object), df.values) finally: tmpgdb.delete = True def test_locks(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: try: gdb.delete_line("T9999") l = gxdb.Line.new(gdb, "T9999") l.lock = gxdb.SYMBOL_LOCK_WRITE self.assertTrue(l.locked) ll = gxdb.Line.new(gdb, "T8") ll.lock = gxdb.SYMBOL_LOCK_READ self.assertTrue(ll.locked) c = gxdb.Channel.new(gdb, "dummy") c.lock = gxdb.SYMBOL_LOCK_WRITE self.assertTrue(c.locked) cc = gxdb.Channel.new(gdb, "dummy2") cc.lock = gxdb.SYMBOL_LOCK_WRITE self.assertTrue(cc.locked) gdb.unlock_all() self.assertFalse(l.locked) self.assertFalse(ll.locked) self.assertFalse(c.locked) self.assertFalse(cc.locked) finally: gdb.discard() def test_create_line_name(self): self.start() self.assertEqual(gxdb.create_line_name(10, gxdb.LINE_TYPE_NORMAL, 4), 'L10.4') self.assertEqual(gxdb.create_line_name(10, gxdb.LINE_TYPE_BASE, 4), 'B10.4') self.assertEqual(gxdb.create_line_name('abc', gxdb.LINE_TYPE_RANDOM, 4), 'Dabc.4') self.assertEqual(gxdb.create_line_name('20', gxdb.LINE_TYPE_SPECIAL, 4), 'P20.4') self.assertEqual(gxdb.create_line_name('899', gxdb.LINE_TYPE_TIE, 1), 'T899.1') self.assertEqual(gxdb.create_line_name('899', gxdb.LINE_TYPE_TEST, 1), 'S899.1') self.assertEqual(gxdb.create_line_name('899', gxdb.LINE_TYPE_TREND, 1), 'R899.1') def test_bearing(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: ln = gxdb.Line(gdb, 'D578625') self.assertAlmostEqual(ln.bearing(), 0.0) def test_metadata(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as g: m = g.metadata gm = m['geosoft'] self.assertEqual(len(gm), 2) self.assertTrue('dataset' in gm) self.assertTrue('georeference' in gm['dataset']) newstuff = {'maki': {'a': 1, 'b': (4, 5, 6), 'units': 'nT'}} g.metadata = newstuff with gxdb.Geosoft_gdb.open(self.gdb_name) as g: m = g.metadata gm = m['geosoft'] self.assertEqual(len(gm), 2) self.assertTrue('dataset' in gm) maki = m['maki'] self.assertEqual(maki['b'], ['4', '5', '6']) self.assertEqual(maki['units'], 'nT') with gxdb.Geosoft_gdb.open(self.gdb_name) as g: d = g.get_gx_metadata().meta_dict() self.assertTrue('Geosoft' in d) g.update_gxmeta({'billy': (1, 2, 3), 'bob': {'thorton': 'interesting'}}) d = g.get_gx_metadata().meta_dict() self.assertEqual(tuple(d['billy']), (1, 2, 3)) self.assertEqual(d['bob']['thorton'], 'interesting') m = gxmeta.Metadata() m.update_dict({'nested': {'billy': (1, 2, 3), 'bob': {'thorton': 'interesting'}}}) g.update_gxmeta(m) d = g.get_gx_metadata().meta_dict() self.assertEqual(d['nested']['bob']['thorton'], 'interesting') def test_coordinate_system(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: try: x, y, z = gdb.xyz_channels self.assertEqual(gxdb.Channel(gdb, x).unit_of_measure, 'm') gdb.coordinate_system = '{"units": "km"}' self.assertEqual(gxdb.Channel(gdb, x).unit_of_measure, 'km') self.assertEqual(gxdb.Channel(gdb, y).unit_of_measure, 'km') self.assertEqual(gxdb.Channel(gdb, z).unit_of_measure, 'km') self.assertEqual(gdb.coordinate_system, '*unknown') gxdb.Channel(gdb, 'Z').delete() self.assertEqual(gdb.xyz_channels, ('X', 'Y', None)) gdb.coordinate_system = '{"units": "cm"}' self.assertEqual(gxdb.Channel(gdb, x).unit_of_measure, 'cm') self.assertEqual(gxdb.Channel(gdb, y).unit_of_measure, 'cm') finally: gdb.discard() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: try: self.assertEqual(str(gdb.coordinate_system), 'Corrego Alegre 1970-72 / UTM zone 23S') gdb.coordinate_system = 'NAD83 / UTM zone 25N' self.assertEqual(str(gdb.coordinate_system), 'NAD83 / UTM zone 25N') finally: gdb.discard() def test_dup(self): self.start() with gxdb.Geosoft_gdb.open(self.gdb_name) as gdb: try: gxdb.Channel(gdb, 'x').width = 45 gxdb.Channel(gdb, 'x').decimal = 4 xx = gxdb.Channel.new(gdb, 'xx', dup='x') self.assertEqual(xx.unit_of_measure, 'm') self.assertEqual(xx.width, 45) self.assertEqual(xx.decimal, 4) master_line = gxdb.Line(gdb, list(gdb.list_lines())[0]) master_line.flight = 77 copyline = gxdb.Line.new(gdb, 'D234567', dup=master_line) self.assertEqual(copyline.flight, 77) finally: gdb.discard() @unittest.skip('skipping to let fixture pass') def test_large_stress(self): self.start() try: name = None with gxdb.Geosoft_gdb.new('new', overwrite=True, comp=gxdb.COMP_NONE, page_size=64) as gdb: name = gdb.file_name npd = np.zeros(1000000) #TODO, this is 8 meg of data. This should not fit in 4 meg. line = gdb.new_line('test') gdb.write_line(line, npd, ['xx']) npd2, ch, fid = gdb.read_line(line) self.assertEqual(len(ch), 1) with gxdb.Geosoft_gdb.new('new', overwrite=True, comp=gxdb.COMP_NONE, page_size=64) as gdb: name = gdb.file_name npd = np.zeros(4000000) #TODO, this is 32 meg of data, reported as 4.11 meg in the error? line = gdb.new_line('test') gdb.write_line(line, npd, ['xx']) npd2, ch, fid = gdb.read_line(line) self.assertEqual(len(ch), 1) finally: gxdb.delete_files(name) def test_code_example(self): self.start() gdb = gxdb.Geosoft_gdb.open(self.gdb_name) try: for line in gdb.list_lines(): npd, ch, fid = gdb.read_line(line, channels=['X', 'Y', 'Z']) npd = np.square(npd) distance_from_origin = np.sqrt(npd[0] + npd[1] + npd[2]) gdb.write_channel(line, 'distance', distance_from_origin, fid) finally: gdb.discard() def test_figure_line(self): self.start() map_file = gxdb.Geosoft_gdb.open(self.gdb_name).figure_map(file_name='figure_line.map', draw=gxdb.DRAW_AS_LINES).file_name self.crc_map(map_file) def test_figure_point(self): self.start() map_file = gxdb.Geosoft_gdb.open(self.gdb_name).figure_map(file_name='figure_point.map').file_name self.crc_map(map_file) def test_temp_gdb(self): self.start() gdb = gxdb.Geosoft_gdb.new() gdb.write_line('L0', np.array([1., 2., 3.]), 'x') gdb.write_line('L1', [1, 2, 3], 'y') self.assertEqual(len(gdb.list_lines()), 2) self.assertEqual(len(gdb.list_channels()), 2) self.assertTrue('x' in gdb.list_channels() and 'y' in gdb.list_channels()) gdb.close(discard=True) def test_wide_db_read(self): self.start() with gxdb.Geosoft_gdb.new() as gdb: line = gxdb.Line.new(gdb, 'L0') x = gxdb.Channel.new(gdb, 'x') y = gxdb.Channel.new(gdb, 'y') c1 = gxdb.Channel.new(gdb, 'c1', array=1024) c2 = gxdb.Channel.new(gdb, 'c2', array=1024) data = np.array(range(1024 * 10), dtype=np.float64).reshape((-1, 1024)) gdb.write_channel(line, x, data[:, 0]) gdb.write_channel(line, y, data[:, 1]) gdb.write_channel(line, c1, data) gdb.write_channel(line, c2, data) npd, ch, fid = gdb.read_line(line) self.assertEqual(npd.shape, (10, 2050)) self.assertEqual(ch[0], 'x') self.assertEqual(ch[1], 'y') self.assertEqual(ch[2], 'c1[0]') self.assertEqual(ch[3], 'c1[1]') self.assertEqual(ch[2049], 'c2[1023]') ############################################################################################### if __name__ == '__main__': unittest.main() <file_sep>/geosoft/gxapi/GXMVIEW.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXAGG import GXAGG from .GXBF import GXBF from .GXCSYMB import GXCSYMB from .GXDATALINKD import GXDATALINKD from .GXITR import GXITR from .GXLST import GXLST from .GXMAP import GXMAP from .GXMETA import GXMETA from .GXREG import GXREG from .GXTPAT import GXTPAT from .GXVECTOR3D import GXVECTOR3D from .GXVOXD import GXVOXD ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXMVIEW(gxapi_cy.WrapMVIEW): """ GXMVIEW class. A view (`GXMVIEW <geosoft.gxapi.GXMVIEW>` class) has a 2-D/3-D translation matrix, a map projection and a clip region. A view contains any number of "groups", and each "group" contains one or more graphics elements (entities). Different types of groups will contain different types of entities: **Note:** `GXCSYMB <geosoft.gxapi.GXCSYMB>` groups (color symbols) contain data and rules for presenting the data as color symbols. See `col_symbol <geosoft.gxapi.GXMVIEW.col_symbol>` and the `GXCSYMB <geosoft.gxapi.GXCSYMB>` class. `GXAGG <geosoft.gxapi.GXAGG>` groups (aggregates) contain images. See `aggregate <geosoft.gxapi.GXMVIEW.aggregate>` and the `GXAGG <geosoft.gxapi.GXAGG>` class. Standard groups contain symbols, lines, polylines, and polygons. See `start_group <geosoft.gxapi.GXMVIEW.start_group>`. """ def __init__(self, handle=0): super(GXMVIEW, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXMVIEW <geosoft.gxapi.GXMVIEW>` :returns: A null `GXMVIEW <geosoft.gxapi.GXMVIEW>` :rtype: GXMVIEW """ return GXMVIEW() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # 3D Entity def box_3d(self, min_x, min_y, min_z, max_x, max_y, max_z): """ Draw a 3D box :param min_x: Min X :param min_y: Min Y :param min_z: Min Z :param max_x: Max X :param max_y: Max Y :param max_z: Max Z :type min_x: float :type min_y: float :type min_z: float :type max_x: float :type max_y: float :type max_z: float .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The Fill color is used to color the box. """ self._box_3d(min_x, min_y, min_z, max_x, max_y, max_z) def crc_view(self, crc, file): """ Generate an XML CRC of a View :param crc: CRC returned :param file: Name of xml to generate (.zip added) :type crc: int_ref :type file: str .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ crc.value = self._crc_view(crc.value, file.encode()) def crc_view_group(self, group, crc, file): """ Generate an XML CRC of a Group :param group: Name of Group :param crc: CRC returned :param file: Name of xml to generate (.zip added) :type group: str :type crc: int_ref :type file: str .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ crc.value = self._crc_view_group(group.encode(), crc.value, file.encode()) def view_group_json(self, group, file): """ Generate a JSON representation of a Group. :param group: Name of Group :param file: Name of JSON file to generate. :type group: str :type file: str .. versionadded:: 9.7 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._view_group_json(group.encode(), file.encode()) def cylinder_3d(self, start_x, start_y, start_z, end_x, end_y, end_z, start_radius, end_radius, flags): """ Draw a 3D cylinder :param start_x: Start X :param start_y: Start Y :param start_z: Start Z :param end_x: End X :param end_y: End Y :param end_z: End Z :param start_radius: Start Radius (can be zero) :param end_radius: End Radius (can be zero) :param flags: :ref:`MVIEW_CYLINDER3D` :type start_x: float :type start_y: float :type start_z: float :type end_x: float :type end_y: float :type end_z: float :type start_radius: float :type end_radius: float :type flags: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The Fill color is used to color the cylinder. The flags determine if the cylinder is open and what end are closed. Note that you can create cones by specifying a 0 radius for one of the ends. """ self._cylinder_3d(start_x, start_y, start_z, end_x, end_y, end_z, start_radius, end_radius, flags) def draw_object_3d(self, type, mode, objects, default_count, vert_v_vx, vert_v_vy, vert_v_vz, norm_v_vx, norm_v_vy, norm_v_vz, color_vv, index_vv, count_vv): """ Draw a 3D object optimized for rendering :param type: :ref:`MVIEW_DRAWOBJ3D_ENTITY` :param mode: :ref:`MVIEW_DRAWOBJ3D_MODE` :param objects: Number of Objects :param default_count: Default Count (if variable and not specified) :param vert_v_vx: Verticies X :param vert_v_vy: Verticies Y :param vert_v_vz: Verticies Z :param norm_v_vx: Normals X (can be NULL) :param norm_v_vy: Normals Y (can be NULL) :param norm_v_vz: Normals Z (can be NULL) :param color_vv: Colors `GXVV <geosoft.gxapi.GXVV>` (can be NULL) :param index_vv: Index `GXVV <geosoft.gxapi.GXVV>` (can be NULL) :param count_vv: Count `GXVV <geosoft.gxapi.GXVV>` (can be NULL) :type type: int :type mode: int :type objects: int :type default_count: int :type vert_v_vx: GXVV :type vert_v_vy: GXVV :type vert_v_vz: GXVV :type norm_v_vx: GXVV :type norm_v_vy: GXVV :type norm_v_vz: GXVV :type color_vv: GXVV :type index_vv: GXVV :type count_vv: GXVV .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._draw_object_3d(type, mode, objects, default_count, vert_v_vx, vert_v_vy, vert_v_vz, norm_v_vx, norm_v_vy, norm_v_vz, color_vv, index_vv, count_vv) def draw_surface_3d_ex(self, group_name, vert_v_vx, vert_v_vy, vert_v_vz, norm_v_vx, norm_v_vy, norm_v_vz, color_vv, color, tri_vv_pt1, tri_vv_pt2, tri_vv_pt3, ipj): """ Draw a 3D object built from triangles :param group_name: Group name :param vert_v_vx: Vertices X (`GS_REAL <geosoft.gxapi.GS_REAL>`) :param vert_v_vy: Vertices Y (`GS_REAL <geosoft.gxapi.GS_REAL>`) :param vert_v_vz: Vertices Z (`GS_REAL <geosoft.gxapi.GS_REAL>`) :param norm_v_vx: Normals X (`GS_REAL <geosoft.gxapi.GS_REAL>`) :param norm_v_vy: Normals Y (`GS_REAL <geosoft.gxapi.GS_REAL>`) :param norm_v_vz: Normals Z (`GS_REAL <geosoft.gxapi.GS_REAL>`) :param color_vv: Colors `GXVV <geosoft.gxapi.GXVV>` (`GS_INT <geosoft.gxapi.GS_INT>`) [can be NULL] :param color: Color used if above `GXVV <geosoft.gxapi.GXVV>` is NULL [0 for `GXMVIEW <geosoft.gxapi.GXMVIEW>`'s fill color] :param tri_vv_pt1: Triangles Point 1 (`GS_INT <geosoft.gxapi.GS_INT>`) :param tri_vv_pt2: Triangles Point 2 (`GS_INT <geosoft.gxapi.GS_INT>`) :param tri_vv_pt3: Triangles Point 3 (`GS_INT <geosoft.gxapi.GS_INT>`) :param ipj: Native `GXIPJ <geosoft.gxapi.GXIPJ>` of 3D object :type group_name: str :type vert_v_vx: GXVV :type vert_v_vy: GXVV :type vert_v_vz: GXVV :type norm_v_vx: GXVV :type norm_v_vy: GXVV :type norm_v_vz: GXVV :type color_vv: GXVV :type color: int :type tri_vv_pt1: GXVV :type tri_vv_pt2: GXVV :type tri_vv_pt3: GXVV :type ipj: GXIPJ .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Provide one normal per vertex. Triangles are defined by indices into the set of vertices. """ self._draw_surface_3d_ex(group_name.encode(), vert_v_vx, vert_v_vy, vert_v_vz, norm_v_vx, norm_v_vy, norm_v_vz, color_vv, color, tri_vv_pt1, tri_vv_pt2, tri_vv_pt3, ipj) def draw_surface_3d_from_file(self, group_name, surface_file): """ Draw a 3D object from a surface file :param group_name: Group name :param surface_file: Surface file :type group_name: str :type surface_file: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._draw_surface_3d_from_file(group_name.encode(), surface_file.encode()) @classmethod def font_weight_lst(cls, lst): """ Fill a `GXLST <geosoft.gxapi.GXLST>` with the different font weights. :param lst: `GXLST <geosoft.gxapi.GXLST>` object :type lst: GXLST .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVIEW._font_weight_lst(GXContext._get_tls_geo(), lst) def get_agg_file_names(self, group, vv): """ Get the names of grid files stored in an `GXAGG <geosoft.gxapi.GXAGG>`. :param group: Group name :param vv: Returned string `GXVV <geosoft.gxapi.GXVV>` of type -`STR_FILE <geosoft.gxapi.STR_FILE>` :type group: str :type vv: GXVV .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The group must be an `GXAGG <geosoft.gxapi.GXAGG>` group. Check this using `is_group <geosoft.gxapi.GXMVIEW.is_group>` and `MVIEW_IS_AGG <geosoft.gxapi.MVIEW_IS_AGG>`. """ self._get_agg_file_names(group.encode(), vv) def get_meta(self, group, meta): """ Retrieves Metadata from a group :param group: Group Name :param meta: Meta name :type group: str :type meta: str_ref :returns: `GXMETA <geosoft.gxapi.GXMETA>` Object :rtype: GXMETA .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, meta.value = self._get_meta(group.encode(), meta.value.encode()) return GXMETA(ret_val) def measure_text(self, text, x_min, y_min, x_max, y_max): """ Compute the bounding rectangle in view units of the text using the current attributes. :param text: Text string :param x_min: X minimum :param y_min: Y minimum :param x_max: X maximum :param y_max: Y maximum :type text: str :type x_min: float_ref :type y_min: float_ref :type x_max: float_ref :type y_max: float_ref .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Area will be 0 if error occurred (does not fail). This will return the bounding rectangle as if the text was placed at 0,0 and adjusted according to the current text alignment and angle set for the view. Also see notes for `text_size <geosoft.gxapi.GXMVIEW.text_size>`. """ x_min.value, y_min.value, x_max.value, y_max.value = self._measure_text(text.encode(), x_min.value, y_min.value, x_max.value, y_max.value) def point_3d(self, x, y, z): """ Draw a 3D point. :param x: X :param y: Y :param z: Z :type x: float :type y: float :type z: float .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The Line color and line thickness will affect rendering. """ self._point_3d(x, y, z) def poly_line_3d(self, vv_x, vv_y, vv_z): """ Draw a 3D polyline. :param vv_x: X coordinates. :param vv_y: Y coordinates. :param vv_z: Z coordinates. :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Dummies are not allowed in the line. Line Color, Thickness is supported on rendering """ self._poly_line_3d(vv_x, vv_y, vv_z) def relocate_group(self, group, min_x, min_y, max_x, max_y, asp): """ Re-locate a group in a view. :param group: Group name :param min_x: Area X minimum :param min_y: Area Y minimum :param max_x: Area X maximum :param max_y: Area Y maximum :param asp: :ref:`MVIEW_RELOCATE` :type group: str :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type asp: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._relocate_group(group.encode(), min_x, min_y, max_x, max_y, asp) def set_meta(self, group, meta, name): """ Update the `GXMETA <geosoft.gxapi.GXMETA>` in this group with the new meta object. :param group: Group Name :param meta: `GXMETA <geosoft.gxapi.GXMETA>` object :param name: Meta name of Object :type group: str :type meta: GXMETA :type name: str .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_meta(group.encode(), meta, name.encode()) def sphere_3d(self, x, y, z, radius): """ Draw a 3D sphere :param x: Center X :param y: Center Y :param z: Center Z :param radius: Radius :type x: float :type y: float :type z: float :type radius: float .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The Fill color is used to color the sphere. """ self._sphere_3d(x, y, z, radius) def update_met_afrom_group(self, group, meta): """ Fill the `GXMETA <geosoft.gxapi.GXMETA>` with group dataset information :param group: Group Name :param meta: `GXMETA <geosoft.gxapi.GXMETA>` object to fill :type group: str :type meta: GXMETA .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._update_met_afrom_group(group.encode(), meta) # 3D Plane def delete_plane(self, plane, del_grp): """ Delete a plane in a view :param plane: Plane number to delete :param del_grp: TRUE to delete all groups on the plane :type plane: int :type del_grp: int .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the groups on the plane are not deleted, they will remain in the 3D view as "New" groups but will be unassigned to a plane. The SetAllNewGroupsToPlane function can be used to assign these groups to a different plane. """ self._delete_plane(plane, del_grp) def get_plane_clip_ply(self, plane, pply): """ Get the Plane Clip Region :param plane: Plane index :param pply: Clip Region :type plane: int :type pply: GXPLY .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** By default it is the View's Clip Region """ self._get_plane_clip_ply(plane, pply) def get_plane_equation(self, plane, pitch, yaw, roll, x, y, z, sx, sy, sz): """ Get the equation of a plane :param plane: Plane index :param pitch: Rotation about X (Y toward Z +ve, between -360 and 360) :param yaw: Rotation about Y (Z toward X +ve, between -360 and 360) :param roll: Rotation about Z (Y toward X +ve, between -360 and 360) :param x: X offset of plane :param y: Y offset of plane :param z: Z offset of plane :param sx: X scale :param sy: Y scale :param sz: Z scale :type plane: int :type pitch: float_ref :type yaw: float_ref :type roll: float_ref :type x: float_ref :type y: float_ref :type z: float_ref :type sx: float_ref :type sy: float_ref :type sz: float_ref .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ pitch.value, yaw.value, roll.value, x.value, y.value, z.value, sx.value, sy.value, sz.value = self._get_plane_equation(plane, pitch.value, yaw.value, roll.value, x.value, y.value, z.value, sx.value, sy.value, sz.value) def get_view_plane_equation(self, pitch, yaw, roll, x, y, z, sx, sy, sz): """ Get the View's Plane Equation :param pitch: Angle in X :param yaw: Angle in Y :param roll: Angle in Z :param x: Offset in X :param y: Offset in Y :param z: Offset in Z :param sx: Scale in X :param sy: Scale in Y :param sz: Scale in Z :type pitch: float_ref :type yaw: float_ref :type roll: float_ref :type x: float_ref :type y: float_ref :type z: float_ref :type sx: float_ref :type sy: float_ref :type sz: float_ref .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ pitch.value, yaw.value, roll.value, x.value, y.value, z.value, sx.value, sy.value, sz.value = self._get_view_plane_equation(pitch.value, yaw.value, roll.value, x.value, y.value, z.value, sx.value, sy.value, sz.value) def create_plane(self, plane): """ Create a 3D Plane for 2D Groups :param plane: Name of Plane :type plane: str :returns: x - Index of plane :rtype: int .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._create_plane(plane.encode()) return ret_val def find_plane(self, plane): """ Find a plane in a view :param plane: Name of the plane :type plane: str :returns: Plane number, -1 if not found :rtype: int .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._find_plane(plane.encode()) return ret_val def is_surface_plane(self, plane): """ Is a surface plane? :param plane: Name of the plane :type plane: str :rtype: bool .. versionadded:: 9.7 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_surface_plane(plane.encode()) return ret_val def is_plane_visible(self, plane): """ Is the plane visible? :param plane: Name of the plane :type plane: str :rtype: bool .. versionadded:: 9.7 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_plane_visible(plane.encode()) return ret_val def get_def_plane(self, name): """ Get the default drawing plane. :param name: Name :type name: str_ref .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 2D drawing to a 3D View will always be placed on the default drawing plane. If no default drawing plane has been set, the first valid plane in the view is used as the default drawing plane. """ name.value = self._get_def_plane(name.value.encode()) def is_view_3d(self): """ Is the view 3D? :returns: TRUE if view is 3D :rtype: int .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_view_3d() return ret_val def is_section(self): """ Is the view a section view? :returns: TRUE if view is a section view. :rtype: int .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Section views are recognized because their projection contains one of the following orientations: `IPJ_ORIENT_SECTION <geosoft.gxapi.IPJ_ORIENT_SECTION>` - Target-type sections with Z projection horizontally `IPJ_ORIENT_SECTION_NORMAL <geosoft.gxapi.IPJ_ORIENT_SECTION_NORMAL>` - Like `IPJ_ORIENT_SECTION <geosoft.gxapi.IPJ_ORIENT_SECTION>`, but Z projects perpendicular to the secton plane. `IPJ_ORIENT_SECTION_CROOKED <geosoft.gxapi.IPJ_ORIENT_SECTION_CROOKED>` - Crooked sections `IPJ_ORIENT_3D <geosoft.gxapi.IPJ_ORIENT_3D>` - Some Sections extracted from a voxel - e.g. VoxelToGrids, as the voxel can have any orientation in 3D. """ ret_val = self._is_section() return ret_val def list_plane_groups(self, plane, lst): """ List all groups in a specific plane of a 3D view :param plane: Plane number :param lst: List of plane names and numbers :type plane: int :type lst: GXLST .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The group names are placed in the list names, group numbers are placed in the list values. Groups are added to the end of the `GXLST <geosoft.gxapi.GXLST>`. """ self._list_plane_groups(plane, lst) def list_planes(self, lst): """ List all planes in a 3D view :param lst: List of plane names and numbers :type lst: GXLST .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The plane names are placed in the list names, plane numbers are placed in the list values. Planes are added to the end of the `GXLST <geosoft.gxapi.GXLST>`. """ self._list_planes(lst) def set_all_groups_to_plane(self, plane): """ Set all groups to be within one plane :param plane: Plane Index to set all groups to :type plane: int .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_all_groups_to_plane(plane) def set_all_new_groups_to_plane(self, plane): """ Set all groups that are not in any plane to this plane :param plane: Plane Index to set all groups to :type plane: int .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_all_new_groups_to_plane(plane) def set_def_plane(self, name): """ Set the default drawing plane. :param name: Name :type name: str .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 2D drawing to a 3D View will always be placed on the default drawing plane. If no default drawing plane has been set, the first valid plane in the view is used as the default drawing plane. """ self._set_def_plane(name.encode()) def set_group_to_plane(self, plane, group): """ Set a group to a plane :param plane: Plane Index to set all groups to :param group: Name of group to set :type plane: int :type group: str .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_group_to_plane(plane, group.encode()) def set_3dn(self, o3dn): """ Set the `GX3DN <geosoft.gxapi.GX3DN>` object for this view :param o3dn: `GX3DN <geosoft.gxapi.GX3DN>` to set (NULL for 2D view) :type o3dn: GX3DN .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** To make the view a 2D view, set a `GX3DN <geosoft.gxapi.GX3DN>` of NULL. """ self._set_3dn(o3dn) def get_3d_point_of_view(self, x, y, z, distance, declination, inclination): """ Get 3D point of view (values are will be `rDUMMY <geosoft.gxapi.rDUMMY>` if view for 2D views) :param x: X center :param y: Y center :param z: Z center :param distance: Distance from center :param declination: Declination, 0 to 360 CW from Y :param inclination: Inclination, -90 to +90 :type x: float_ref :type y: float_ref :type z: float_ref :type distance: float_ref :type declination: float_ref :type inclination: float_ref .. versionadded:: 9.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ x.value, y.value, z.value, distance.value, declination.value, inclination.value = self._get_3d_point_of_view(x.value, y.value, z.value, distance.value, declination.value, inclination.value) def set_3d_point_of_view(self, x, y, z, distance, declination, inclination): """ Set 3D point of view (no effect on 2D views) :param x: X center :param y: Y center :param z: Z center :param distance: Distance from center :param declination: Declination, 0 to 360 CW from Y :param inclination: Inclination, -90 to +90 :type x: float :type y: float :type z: float :type distance: float :type declination: float :type inclination: float .. versionadded:: 9.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_3d_point_of_view(x, y, z, distance, declination, inclination) def set_plane_clip_ply(self, plane, pply): """ Set the Plane Clip Region :param plane: Plane index :param pply: Clip Region :type plane: int :type pply: GXPLY .. versionadded:: 5.1.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** By default it is the View's Clip Region """ self._set_plane_clip_ply(plane, pply) def set_plane_equation(self, plane, pitch, yaw, roll, x, y, z, sx, sy, sz): """ Set the equation of a plane :param plane: Plane index :param pitch: Rotation about X (Z toward Y +ve, between -360 and 360) :param yaw: Rotation about Y (Z toward X +ve, between -360 and 360) :param roll: Rotation about Z (Y toward X +ve, between -360 and 360) :param x: X offset of plane :param y: Y offset of plane :param z: Z offset of plane :param sx: X scale :param sy: Y scale :param sz: Z scale :type plane: int :type pitch: float :type yaw: float :type roll: float :type x: float :type y: float :type z: float :type sx: float :type sy: float :type sz: float .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** For a grid with the "Y" axis giving elevation: use rotations = (-90, 0, 0) for a section with azimuth 90 (E-W) use rotations = (-90, 0, -90) for a section with azimuth 0 (N-S) """ self._set_plane_equation(plane, pitch, yaw, roll, x, y, z, sx, sy, sz) def set_plane_surface(self, plane, surface): """ Set the surface image of a plane :param plane: Plane index :param surface: Optional surface image/grid name, can be empty :type plane: int :type surface: str .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_plane_surface(plane, surface.encode()) def get_plane_surface(self, plane, surface): """ Get the surface image of a plane :param plane: Plane index :param surface: Optional surface image/grid name, can be empty :type plane: int :type surface: str_ref .. versionadded:: 9.9 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ surface.value = self._get_plane_surface(plane, surface.value.encode()) def set_plane_surf_info(self, plane, sample, base, scale, min, max): """ Set the surface information :param plane: Plane index :param sample: Sample rate (>=1) :param base: Base :param scale: Scale :param min: Min :param max: Max :type plane: int :type sample: int :type base: float :type scale: float :type min: float :type max: float .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_plane_surf_info(plane, sample, base, scale, min, max) def get_plane_surf_info(self, plane, sample, base, scale, min, max): """ Get the surface information :param plane: Plane index :param sample: Sample rate (>=1) :param base: Base :param scale: Scale :param min: Min :param max: Max :type plane: int :type sample: int_ref :type base: float_ref :type scale: float_ref :type min: float_ref :type max: float_ref .. versionadded:: 9.9 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ sample.value, base.value, scale.value, min.value, max.value = self._get_plane_surf_info(plane, sample.value, base.value, scale.value, min.value, max.value) # 3D Rendering 2D def define_plane_3d(self, center_x, center_y, center_z, x_vector_x, x_vector_y, x_vector_z, y_vector_x, y_vector_y, y_vector_z): """ Define a 2D drawing plane based on point and normal :param center_x: Center point X :param center_y: Center point Y :param center_z: Center point Z :param x_vector_x: X Vector X :param x_vector_y: X Vector Y :param x_vector_z: X Vector Z :param y_vector_x: Y Vector X :param y_vector_y: Y Vector Y :param y_vector_z: Y Vector Z :type center_x: float :type center_y: float :type center_z: float :type x_vector_x: float :type x_vector_y: float :type x_vector_z: float :type y_vector_x: float :type y_vector_y: float :type y_vector_z: float .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 2D rendering commands are translated to 3D commands based on the plane. """ self._define_plane_3d(center_x, center_y, center_z, x_vector_x, x_vector_y, x_vector_z, y_vector_x, y_vector_y, y_vector_z) def define_viewer_axis_3d(self, center_x, center_y, center_z, dir_point_x, dir_point_y, dir_point_z): """ Define a 2D drawing plane based on the user's view that oriented around the vector. :param center_x: Center point X :param center_y: Center point Y :param center_z: Center point Z :param dir_point_x: Directional Point X :param dir_point_y: Directional Point Y :param dir_point_z: Directional Point Z :type center_x: float :type center_y: float :type center_z: float :type dir_point_x: float :type dir_point_y: float :type dir_point_z: float .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._define_viewer_axis_3d(center_x, center_y, center_z, dir_point_x, dir_point_y, dir_point_z) def define_viewer_plane_3d(self, center_x, center_y, center_z): """ Define a 2D drawing plane based on the user's view. :param center_x: Center point X :param center_y: Center point Y :param center_z: Center point Z :type center_x: float :type center_y: float :type center_z: float .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The plane is always facing the viewer. Otherwise the this is identical to the previous """ self._define_viewer_plane_3d(center_x, center_y, center_z) # 3D Snapshots def get_3d_snapshots(self): """ Get the list of 3D snapshots in a 3D view. :returns: `GXLST <geosoft.gxapi.GXLST>` object :rtype: GXLST .. versionadded:: 9.9 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Returns name/guid pairs. """ ret_val = self._get_3d_snapshots() return GXLST(ret_val) def restore_3d_snapshot(self, guid): """ Restore 3D view to specific snapshot state. :param guid: Snapshot GUID :type guid: str .. versionadded:: 9.9 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._restore_3d_snapshot(guid.encode()) def capture_3d_snapshot(self, name, description, light_weight, guid): """ Capture current 3D view state to a snapshot. :param name: Snapshot name :param description: Snapshot description :param light_weight: Is this a light weight snapshot, i.e. just captures view orientation and type and not group visibility/clipping etc. :param guid: Snapshot GUID :type name: str :type description: str :type light_weight: bool :type guid: str_ref .. versionadded:: 9.9 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ guid.value = self._capture_3d_snapshot(name.encode(), description.encode(), light_weight, guid.value.encode()) # Clipping def clip_poly_ex(self, vv_x, vv_y, unit, exclude): """ Add a polygon to the clip region. :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` :param unit: :ref:`MVIEW_UNIT` :param exclude: Exclude :type vv_x: GXVV :type vv_y: GXVV :type unit: int :type exclude: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The polygon will be added to the current clip region. The `GXVV <geosoft.gxapi.GXVV>`'s cannot have any dummy elements. """ self._clip_poly_ex(vv_x, vv_y, unit, exclude) def clip_rect_ex(self, min_x, min_y, max_x, max_y, unit, exclude): """ Add a rectangle to the clip region. :param min_x: X minimum :param min_y: Y minimum :param max_x: X maximum :param max_y: Y maximum :param unit: :ref:`MVIEW_UNIT` :param exclude: Exclude :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type unit: int :type exclude: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The rectangle will be added to the current clip region. """ self._clip_rect_ex(min_x, min_y, max_x, max_y, unit, exclude) def clip_clear(self): """ Remove/clear the view clip region. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._clip_clear() def clip_groups(self, mode): """ Set the Clipping mode on/off for all groups. :param mode: :ref:`MVIEW_CLIP` :type mode: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._clip_groups(mode) def clip_marked_groups(self, mode): """ Set the Clipping mode on/off for marked groups. :param mode: :ref:`MVIEW_CLIP` :type mode: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._clip_marked_groups(mode) def clip_poly(self, vv_x, vv_y, unit): """ Add a polygon to the clip region. :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` :param unit: :ref:`MVIEW_UNIT` :type vv_x: GXVV :type vv_y: GXVV :type unit: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The polygon will be added to the current clip region. The `GXVV <geosoft.gxapi.GXVV>`'s cannot have any dummy elements. """ self._clip_poly(vv_x, vv_y, unit) def clip_rect(self, min_x, min_y, max_x, max_y, unit): """ Add a rectangle to the clip region. :param min_x: X minimum :param min_y: Y minimum :param max_x: X maximum :param max_y: Y maximum :param unit: :ref:`MVIEW_UNIT` :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type unit: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The rectangle will be added to the current clip region. """ self._clip_rect(min_x, min_y, max_x, max_y, unit) def delete_ext_clip_ply(self, ext_ply): """ Deletes an extended clip `GXPLY <geosoft.gxapi.GXPLY>` object used by this view. :param ext_ply: Extended ClipPLY number :type ext_ply: int .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._delete_ext_clip_ply(ext_ply) def ext_clip_ply_list(self, lst): """ Get the names of existing extended clip `GXPLY <geosoft.gxapi.GXPLY>` objects in this view as list. :type lst: GXLST .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._ext_clip_ply_list(lst) def get_clip_ply(self, poly): """ Get clipping polygons, in the user projection :param poly: Poly :type poly: GXPLY .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The returned `GXPLY <geosoft.gxapi.GXPLY>` is recast into the User projection. For oriented views (especially sections), use `get_ply <geosoft.gxapi.GXMVIEW.get_ply>`, which returns the Clip `GXPLY <geosoft.gxapi.GXPLY>` in the view's native projection (e.g. the one set using `set_ipj <geosoft.gxapi.GXMVIEW.set_ipj>`). """ self._get_clip_ply(poly) def get_ext_clip_ply(self, ext_ply, ply): """ Get an extended clip `GXPLY <geosoft.gxapi.GXPLY>` object used by this view. :param ext_ply: Extended ClipPLY number :param ply: `GXPLY <geosoft.gxapi.GXPLY>` object to get :type ext_ply: int :type ply: GXPLY .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_ext_clip_ply(ext_ply, ply) def get_group_ext_clip_ply(self, group, ext_ply): """ Gets extended clip information for group in view. :param group: Group Name :param ext_ply: Extended `GXPLY <geosoft.gxapi.GXPLY>` number (returned, -1 if not set) :type group: str :type ext_ply: int_ref .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ext_ply.value = self._get_group_ext_clip_ply(group.encode(), ext_ply.value) def get_ply(self, poly): """ Get clipping polygons, in the base projection :param poly: Poly :type poly: GXPLY .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This should be used to get the clipping polygon for oriented views (especially sections). """ self._get_ply(poly) def group_clip_mode(self, mode): """ Set the Clipping mode on or off for new groups. :param mode: :ref:`MVIEW_CLIP` :type mode: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** All new groups will be clipped. """ self._group_clip_mode(mode) def get_name_ext_clip_ply(self, ext_ply, name): """ Get the name of the extended clip `GXPLY <geosoft.gxapi.GXPLY>` object in this view. :param ext_ply: Extended ClipPLY number :param name: Name :type ext_ply: int :type name: str_ref .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ name.value = self._get_name_ext_clip_ply(ext_ply, name.value.encode()) def num_ext_clip_ply(self): """ Get the number of extended clip `GXPLY <geosoft.gxapi.GXPLY>` objects in this view. :returns: Number of PLYs :rtype: int .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._num_ext_clip_ply() return ret_val def set_ext_clip_ply(self, ext_ply, name, ply): """ Set an extended clip `GXPLY <geosoft.gxapi.GXPLY>` object used by this view. :param ext_ply: Extended ClipPLY number, If greater or equal to the return value of `num_ext_clip_ply <geosoft.gxapi.GXMVIEW.num_ext_clip_ply>` it will be added to the end of the current list :param name: Name (Has to be unique, otherwise error will be returned) :param ply: `GXPLY <geosoft.gxapi.GXPLY>` object to set, use a null `GXPLY <geosoft.gxapi.GXPLY>` to rename an existing object :type ext_ply: int :type name: str :type ply: GXPLY :returns: Index of new or changed `GXPLY <geosoft.gxapi.GXPLY>`, -1 on error :rtype: int .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._set_ext_clip_ply(ext_ply, name.encode(), ply) return ret_val def set_clip_ply(self, poly): """ Set clipping region to a `GXPLY <geosoft.gxapi.GXPLY>` :param poly: Poly :type poly: GXPLY .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_clip_ply(poly) def set_group_ext_clip_ply(self, group, ext_ply): """ Sets extended clip information for group in view. :param group: Group Name :param ext_ply: Extended `GXPLY <geosoft.gxapi.GXPLY>` number (-1 to clear) :type group: str :type ext_ply: int .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_group_ext_clip_ply(group.encode(), ext_ply) # Color @classmethod def color2_rgb(cls, color, r, g, b): """ Convert to RGB values. :param color: Color value :param r: Red :param g: Green :param b: Blue :type color: int :type r: int_ref :type g: int_ref :type b: int_ref .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Color component intensities will be in the range 0-255. .. seealso:: `color <geosoft.gxapi.GXMVIEW.color>` """ r.value, g.value, b.value = gxapi_cy.WrapMVIEW._color2_rgb(GXContext._get_tls_geo(), color, r.value, g.value, b.value) @classmethod def color_descr(cls, color, color_descr): """ Convert a color to a color string label :param color: COL_ANY variable :param color_descr: Color descriptor returned :type color: int :type color_descr: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See `color <geosoft.gxapi.GXMVIEW.color>`. """ color_descr.value = gxapi_cy.WrapMVIEW._color_descr(GXContext._get_tls_geo(), color, color_descr.value.encode()) @classmethod def color(cls, color): """ Get a color from a color string label :param color: Color name string :type color: str :returns: Color int :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Color strings may be "R","G","B","C","M","Y", "H","S","V", or "K" or a combination of these characters, each followed by up to three digits specifying a number between 0 and 255. An empty string produce C_ANY_NONE. You must stay in the same color model, RGB, CMY, HSV or K. For example "R", "R127G22", "H255S127V32" Characters are not case sensitive. .. seealso:: iColorXXX_MVIEW macros """ ret_val = gxapi_cy.WrapMVIEW._color(GXContext._get_tls_geo(), color.encode()) return ret_val @classmethod def color_cmy(cls, c, m, y): """ Return CMY color. :param c: Cyan :param m: Magenta :param y: Yellow :type c: int :type m: int :type y: int :returns: Color int based on color model. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Color component intensities must be in the range 0-255. .. seealso:: `color <geosoft.gxapi.GXMVIEW.color>` """ ret_val = gxapi_cy.WrapMVIEW._color_cmy(GXContext._get_tls_geo(), c, m, y) return ret_val @classmethod def color_hsv(cls, h, s, v): """ Return HSV color. :param h: Hue :param s: Saturation :param v: Color :type h: int :type s: int :type v: int :returns: Color int based on color model. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Color component intensities must be in the range 0-255. .. seealso:: `color <geosoft.gxapi.GXMVIEW.color>` """ ret_val = gxapi_cy.WrapMVIEW._color_hsv(GXContext._get_tls_geo(), h, s, v) return ret_val @classmethod def color_rgb(cls, r, g, b): """ Return RGB color. :param r: Red :param g: Green :param b: Blue :type r: int :type g: int :type b: int :returns: Color int based on color model. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Color component intensities must be in the range 0-255. .. seealso:: `color <geosoft.gxapi.GXMVIEW.color>` """ ret_val = gxapi_cy.WrapMVIEW._color_rgb(GXContext._get_tls_geo(), r, g, b) return ret_val # Drawing Attribute def clip_mode(self, mode): """ Set the view clipping mode on or off. :param mode: :ref:`MVIEW_CLIP` :type mode: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Entitles that follow in this group will be clipped or not clipped depending on this mode. The montaj editor cannot change the clip mode of embedded clipped/unclipped enties that are controlled by this call. Use the Group clipping functions instead. It is highly recommended that you use the `group_clip_mode <geosoft.gxapi.GXMVIEW.group_clip_mode>` function to control clipping on a group-by-group basis, instead of using `clip_mode <geosoft.gxapi.GXMVIEW.clip_mode>` when inside a group, as it is impossible to determine the true visible extents of a group. In such cases, the "zoom to full map extents" command may give incorrect results. """ self._clip_mode(mode) def fill_color(self, color): """ Set the fill color. :param color: Color :type color: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._fill_color(color) def line_color(self, color): """ Set the line color. :param color: Color :type color: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._line_color(color) def line_smooth(self, smooth): """ Set the line edge smoothing. :param smooth: :ref:`MVIEW_SMOOTH` :type smooth: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._line_smooth(smooth) def line_style(self, style, pitch): """ Set the style of a line. :param style: Line Style #, see default.lpt :param pitch: Pitch in view units :type style: int :type pitch: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Line styles are selected by ordinal value (line style #) from those defined in default.lpt. If default.lpt does not have a the style specified, the file user.lpt is searched. If this file does not contain the line style solid is assumed. Note that line styles from default.lpt and user.lpt are read into the map at the time the map is created, not at display time. """ self._line_style(style, pitch) def line_thick(self, thick): """ Set the line thickness. :param thick: Line thickness in view space units :type thick: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._line_thick(thick) def pat_angle(self, angle): """ Sets the pattern angle :param angle: Angle :type angle: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Allows the user to apply a rotation to the basic pattern. Care should be taken to ensure that the tiling remains continuous; i.e. if the pattern consists of horizontal lines, only angles of -90, 0, 90, 180 (etc.) would give seamless tiling. However, simple, closed figure, such as a star, could be given any angle. Rotations about the center point (0.5, 0.5) of the unit cell are performed prior to applying PatSize. The default value is 0.0. Setting an angle of -999 inititates the random angle feature, and each pattern tile is rotated to a different angle. Using this along with PatStyle(View, `MVIEW_TILE_RANDOM <geosoft.gxapi.MVIEW_TILE_RANDOM>`) can give a "hand-drawn" effect to geological fills. See the IMPORTANT note for sPatNumber_MVIEW(). """ self._pat_angle(angle) def pat_density(self, density): """ Sets the tiling density. :param density: Relative density (default = 1). :type density: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This number is the ratio between the plotted unit cell size and the distance between the plotted tile centers. The default value is 1. A value larger than 1 increases the density of the pattern, while values less than 1 make the pattern more "spread out". This can be used along with sPatStyleMethod to create more complicated fills from simple patterns. See the IMPORTANT note for sPatNumber_MVIEW(). """ self._pat_density(density) def pat_number(self, number): """ Sets the pattern number :param number: Pattern number :type number: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Pattern 0 is solid fill.(default) Set the pattern color using `fill_color <geosoft.gxapi.GXMVIEW.fill_color>`. Patterns are selected by ordinal value (pattern number) from those defined in default.pat. If default.pat does not have a the pattern specified, the file user.pat is searched. If this file does not contain the pattern solid is assumed. Note that patterns from default.pat and user.pat are read into the map at the time the map is created, not at display time. IMPORTANT: A call to this function resets all the various pattern attributes to those defined for the selected pattern. If you want to modify any attributes, call that function (e.g. sPatSize_MVIEW(), AFTER you call sPatNumber_MVIEW(). """ self._pat_number(number) def pat_size(self, size): """ Sets the pattern unit cell size (X) :param size: Pattern size in view units :type size: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See the IMPORTANT note for sPatNumber_MVIEW(). """ self._pat_size(size) def pat_style(self, style): """ Sets the tiling method (i.e. rectangle, triangle) :param style: :ref:`MVIEW_TILE` :type style: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Normally, the unit cell is duplicated across the fill area like floor tiles (`MVIEW_TILE_RECTANGULAR <geosoft.gxapi.MVIEW_TILE_RECTANGULAR>`). DIAGONAL tiling rotates the tiling positions (but not the tiles) by 45 degrees. TRIANGULAR tiling Offsets each succeeding row by half the unit cell size, and lessens the vertical offset, so that the unit cell centers form a triangular grid pattern. RANDOM tiling adds small random offsets in both directions to give the diffuse effect seen on many geological maps. NOTE: Some patterns are designed to be interlocking and may only work "correctly" with one tiling method. See the IMPORTANT note for sPatNumber_MVIEW(). """ self._pat_style(style) def pat_thick(self, thick): """ Sets the pattern line thickness :param thick: Line thickness as fraction of pattern size (ie. 0.05) :type thick: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See the IMPORTANT note for sPatNumber_MVIEW(). """ self._pat_thick(thick) def symb_angle(self, angle): """ Set the Symb angle. :param angle: Angle in degrees CCW from +X :type angle: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._symb_angle(angle) def symb_color(self, color): """ Set the Symbol color. :param color: Color :type color: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._symb_color(color) def symb_fill_color(self, color): """ Set the Symbol color fill. :param color: Color :type color: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._symb_fill_color(color) def symb_font(self, face, geofont, weight, italic): """ Set the symbol font and style. :param face: Face name :param geofont: Geosoft font? :param weight: :ref:`MVIEW_FONT_WEIGHT` :param italic: Italic font? :type face: str :type geofont: bool :type weight: int :type italic: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the font cannot be found, the DEFAULT_SYMBOL_FONT specified in the [MONTAJ] section of GEOSOFT.INI will be used. See `text_font <geosoft.gxapi.GXMVIEW.text_font>` for the font name syntax. """ self._symb_font(face.encode(), geofont, weight, italic) def symb_number(self, number): """ Set the Symbol number. :param number: Symbol number :type number: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The lower 16 bits of the number is interpreted as UTF-16 with a valid Unicode character code point. GFN fonts wil produce valid symbols depending on the font for 0x01-0x7f and the degree, plus-minus and diameter symbol(latin small letter o with stroke) for 0xB0, 0xB1 and 0xF8 respectively. It is possible to check if a character is valid using `GXUNC.is_valid_utf16_char <geosoft.gxapi.GXUNC.is_valid_utf16_char>`. The high 16-bits are reserved for future use. Also see: `GXUNC.valid_symbol <geosoft.gxapi.GXUNC.valid_symbol>` and `GXUNC.validate_symbols <geosoft.gxapi.GXUNC.validate_symbols>`. """ self._symb_number(number) def symb_size(self, size): """ Set the Symb size. :param size: Size in view units :type size: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._symb_size(size) def text_angle(self, angle): """ Set the text angle. :param angle: Angle in degrees CCW from +X :type angle: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._text_angle(angle) def text_color(self, color): """ Set the Text color. :param color: Color :type color: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._text_color(color) def text_font(self, face, geo_font, weight, italic): """ Set the text font. :param face: Font face name :param geo_font: Geosoft font? (TRUE or FALSE) :param weight: :ref:`MVIEW_FONT_WEIGHT` :param italic: Italic font? (TRUE or FALSE) :type face: str :type geo_font: int :type weight: int :type italic: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Font characteristics can be defined using the function parameters, or may be defined as decorations in the font name. A decorated font name has the following format: font_name(type,weight,italics,charset) where type - "TT" or "GFN" weight - last word from MVIEW_FONT_WEIGHT_ (ie. "LIGHT") italics - "ITALICS" for for italics charset - Before version 6.2. this decoration was honoured and it affected the display of characters above ASCII 127. 6.2. introduced Unicode in the core montaj engine that eliminated the need for such a setting. All strings on the GX API level are encoded in :ref:`UTF8` during runtime which makes it possible to represent all possible characters without using character sets. This decoration will now be ignored. Qualifiers take precidence over passed parameters. The order of qualifiers is not relevant. examples: "sr(GFN,ITALICS)" - geosoft GFN font, normal weight, italics "Arial(TT,XBOLD)" - TrueType font, bold "Times(TT,ITALICS,_EastEurope)" - TrueType font, italics, Eastern Europe charcters Decorated name qualifiers take precedence over passed parameters. If the font cannot be found, or if "Default" is used, the DEFAULT_MAP_FONT specified in the [MONTAJ] section of GEOSOFT.INI will be used. """ self._text_font(face.encode(), geo_font, weight, italic) def text_ref(self, ref): """ Set the text plot reference point. :param ref: :ref:`TEXT_REF` :type ref: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._text_ref(ref) def text_size(self, size): """ Set the text size. :param size: Size in view units :type size: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Because views may have differing X and Y scales this size can only make sense in one of these directions otherwise text would appear warped on these kinds of views. The X direction was chosen to represent the font size. For instance if the X scale is 1 unit/mm and my Y scale is 2 units/mm a font size of 3.0 view units will result in un-rotated text that appears 6 view units or 3mm high in the Y direction. Another important thing to keep in mind that this size represents what is known as the "ascent" height of the font. The full height of the text may be higher if characters with accents or lower extension (e.g. the lowercase y) appear in the text. For TrueType fonts the mapping system will do a best effort positioning and sizing of the text using the alignment set and information about the font that it queries from the operating system. For instance; if Arial text "Blog" is placed at (0,0) and the alignment setting is Left-Bottom the left side of the B should be aligned at 0 in the X direction and the bottom of all the letters except y will be at 0 in the Y direction. The lower part of the y will extend below 0 in the Y (this is known as the "descent" height of the font at this size). The letters B and l should be very close to the size set here (this may differ slightly for different fonts). """ self._text_size(size) def transparency(self, trans): """ Sets the transparency for new objects. :param trans: Transparency (1.0 - Opaque, 0.0 - Transparent) :type trans: float .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 1.0 Renders completely opaque objects while 0.0 will be transparent. Objects written after this will have a combined transparency value with the group transparency if it is set (e.g. 0.5 for group and 0.8 stream will result in 0.4). """ self._transparency(trans) def z_value(self, val): """ Sets Z-value info. :param val: Z-Value :type val: float .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This number is stored in map mainly for exports to other vector formats (e.g ShapeFiles) A contour map that's exported to a shape file will use this value as a Z-value attributes for its shapes. """ self._z_value(val) # Drawing Entity def arc(self, x, y, radius, ratio, angle, start, end): """ Draw an arc. :param x: Center x :param y: Center y :param radius: Radius :param ratio: Ratio x/y :param angle: Angle :param start: Start angle :param end: End angle :type x: float :type y: float :type radius: float :type ratio: float :type angle: float :type start: float :type end: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._arc(x, y, radius, ratio, angle, start, end) def chord(self, x, y, radius, ratio, angle, start, end): """ Draw a filled arc. :param x: Center x :param y: Center y :param radius: Radius :param ratio: Ratio x/y :param angle: Angle :param start: Start angle :param end: End angle :type x: float :type y: float :type radius: float :type ratio: float :type angle: float :type start: float :type end: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._chord(x, y, radius, ratio, angle, start, end) def classified_symbols(self, vv_x, vv_y, vv_z, scal_mm, zmin, zval, size, fcol): """ Plot classified symbols :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` :param vv_z: Data `GXVV <geosoft.gxapi.GXVV>` :param scal_mm: Scale factor to convert mm to view units :param zmin: Classified minimum Z to plot :param zval: Comma delimited list of Z maximums :param size: Comma delimited list of sizes in mm :param fcol: Comma delimited list of color strings :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type scal_mm: float :type zmin: float :type zval: str :type size: str :type fcol: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** For example, to plot three levels <95, 95-100 and 100-120, three string arguments would be: "95,100,120" maximums of each class "2.0,2.5,3.0" sizes in mm "y,g,r" fill colors """ self._classified_symbols(vv_x, vv_y, vv_z, scal_mm, zmin, zval.encode(), size.encode(), fcol.encode()) def complex_polygon(self, vv_i, vv_x, vv_y): """ Draw a polygon with holes in it. :param vv_i: `GXVV <geosoft.gxapi.GXVV>` of type int holding the number of points for each polygon :param vv_x: X coordinates. :param vv_y: Y coordinates. :type vv_i: GXVV :type vv_x: GXVV :type vv_y: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** You pass a `GXVV <geosoft.gxapi.GXVV>` with polygon sizes and 2 point vvs. """ self._complex_polygon(vv_i, vv_x, vv_y) def ellipse(self, x, y, radius, ratio, angle): """ Draw an ellipse :param x: Center x :param y: Center y :param radius: Radius :param ratio: Ratio x/y :param angle: Angle :type x: float :type y: float :type radius: float :type ratio: float :type angle: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._ellipse(x, y, radius, ratio, angle) def line(self, x0, y0, x1, y1): """ Draw a line. :param x0: X0 :param y0: Y0 :param x1: X1 :param y1: Y1 :type x0: float :type y0: float :type x1: float :type y1: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._line(x0, y0, x1, y1) def line_vv(self, gvv): """ Draw line segments stored in a GS_D2LINE `GXVV <geosoft.gxapi.GXVV>`. :param gvv: `GXVV <geosoft.gxapi.GXVV>` for GS_D2LINE :type gvv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._line_vv(gvv) def polygon_dm(self, vv_x, vv_y): """ Like PolyLineDm, but draw polygons. :param vv_x: X coordinates. :param vv_y: Y coordinates. :type vv_x: GXVV :type vv_y: GXVV .. versionadded:: 5.0.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._polygon_dm(vv_x, vv_y) def polygon_ply(self, ply): """ Draw a complex polygon from `GXPLY <geosoft.gxapi.GXPLY>`. :type ply: GXPLY .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._polygon_ply(ply) def polygon_mply(self, mply): """ Draw multiple complex polygons from `GXMPLY <geosoft.gxapi.GXMPLY>`. :type mply: GXMPLY .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._polygon_mply(mply) def poly_line(self, type, vv_x, vv_y): """ Draw a polyline or polygon (dummies deleted). :param type: :ref:`MVIEW_DRAW` :param vv_x: X coordinates. :param vv_y: Y coordinates. :type type: int :type vv_x: GXVV :type vv_y: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Dummies in X and/or Y `GXVV <geosoft.gxapi.GXVV>` are deleted and it results in 'solid' line. Using `poly_line_dm <geosoft.gxapi.GXMVIEW.poly_line_dm>` (below) function if gaps from dummies are to be kept. """ self._poly_line(type, vv_x, vv_y) def poly_line_dm(self, vv_x, vv_y): """ Draw a polyline with gaps defined by dummies in X/Y VVs :param vv_x: X coordinates. :param vv_y: Y coordinates. :type vv_x: GXVV :type vv_y: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._poly_line_dm(vv_x, vv_y) def poly_wrap(self, vv_x, vv_y): """ Draw wrapped polylines from X and Y `GXVV <geosoft.gxapi.GXVV>`'s. :param vv_x: X coordinates. :param vv_y: Y coordinates. :type vv_x: GXVV :type vv_y: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Convert a given VVy into a wrapped VVy using the current view window as the wrap region. Then draw polylines from it. .. seealso:: `poly_line <geosoft.gxapi.GXMVIEW.poly_line>` """ self._poly_wrap(vv_x, vv_y) def rectangle(self, x0, y0, x1, y1): """ Draw a rectangle. :param x0: X0 :param y0: Y0 :param x1: X1 :param y1: Y1 :type x0: float :type y0: float :type x1: float :type y1: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._rectangle(x0, y0, x1, y1) def segment(self, x, y, radius, ratio, angle, start, end): """ Draw a filled segment of an ellipse. :param x: Center x :param y: Center y :param radius: Radius :param ratio: Ratio x/y :param angle: Angle :param start: Start angle :param end: End angle :type x: float :type y: float :type radius: float :type ratio: float :type angle: float :type start: float :type end: float .. versionadded:: 5.0.7 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._segment(x, y, radius, ratio, angle, start, end) def size_symbols(self, vv_x, vv_y, vv_z): """ Plot sized symbols :param vv_x: X :param vv_y: Y :param vv_z: Symbol sizes (in view units) :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._size_symbols(vv_x, vv_y, vv_z) def symbol(self, x, y): """ Plot a symbol :param x: X :param y: Y :type x: float :type y: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._symbol(x, y) def symbols(self, vv_x, vv_y): """ Plot symbols :param vv_x: X :param vv_y: Y :type vv_x: GXVV :type vv_y: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._symbols(vv_x, vv_y) def symbols_itr(self, itr, vv_x, vv_y, vv_z): """ Plot symbols using an `GXITR <geosoft.gxapi.GXITR>` :param itr: `GXITR <geosoft.gxapi.GXITR>` file name (ZON or `GXITR <geosoft.gxapi.GXITR>`) :param vv_x: X :param vv_y: Y :param vv_z: Z :type itr: str :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._symbols_itr(itr.encode(), vv_x, vv_y, vv_z) def text(self, text, x, y): """ Draw text. :param text: Text to plot :param x: X location of text :param y: Y location of text :type text: str :type x: float :type y: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._text(text.encode(), x, y) # Drawing Object def aggregate(self, agg, name): """ Add an aggregate to a view. :param agg: Aggregate :param name: Aggregate name Maximum length is `MVIEW_NAME_LENGTH <geosoft.gxapi.MVIEW_NAME_LENGTH>` :type agg: GXAGG :type name: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._aggregate(agg, name.encode()) def get_aggregate(self, group): """ Get an existing Aggregate object from the view. :param group: Group number :type group: int :returns: `GXAGG <geosoft.gxapi.GXAGG>` object :rtype: GXAGG .. versionadded:: 9.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method returns a cached object owned by the `GXMVIEW <geosoft.gxapi.GXMVIEW>` and will be destroyed automatically when the `GXMVIEW <geosoft.gxapi.GXMVIEW>` is disposed """ ret_val = self._get_aggregate(group) return GXAGG(ret_val) def change_line_message(self, line): """ Change the specified line in a view. :param line: Change to this line :type line: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The line name can be created by calling LineLabel_DB using `DB_LINE_LABEL_FORMAT_LINK <geosoft.gxapi.DB_LINE_LABEL_FORMAT_LINK>`. This insures that the label is created is the same way as used in the database. """ self._change_line_message(line.encode()) def col_symbol(self, name, csymb): """ Add a colored symbol object to a view. :param name: Name of the color symbol group :param csymb: `GXCSYMB <geosoft.gxapi.GXCSYMB>` object :type name: str :type csymb: GXCSYMB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._col_symbol(name.encode(), csymb) def get_col_symbol(self, group): """ Get an existing colored symbol object from the view. :param group: Group number :type group: int :returns: `GXCSYMB <geosoft.gxapi.GXCSYMB>` object :rtype: GXCSYMB .. versionadded:: 9.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method returns a cached object owned by the `GXMVIEW <geosoft.gxapi.GXMVIEW>` and will be destroyed automatically when the `GXMVIEW <geosoft.gxapi.GXMVIEW>` is disposed """ ret_val = self._get_col_symbol(group) return GXCSYMB(ret_val) def datalinkd(self, datalinkd, name): """ Add a Data Link Display (`GXDATALINKD <geosoft.gxapi.GXDATALINKD>`) object to the view. :param name: Name Maximum length is `MVIEW_NAME_LENGTH <geosoft.gxapi.MVIEW_NAME_LENGTH>` :type datalinkd: GXDATALINKD :type name: str .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._datalinkd(datalinkd, name.encode()) def get_datalinkd(self, group): """ Get an existing Data Link Display (`GXDATALINKD <geosoft.gxapi.GXDATALINKD>`) object from the view. :param group: Group number :type group: int :returns: `GXDATALINKD <geosoft.gxapi.GXDATALINKD>` object :rtype: GXDATALINKD .. versionadded:: 9.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method returns a cached object owned by the `GXMVIEW <geosoft.gxapi.GXMVIEW>` and will be destroyed automatically when the `GXMVIEW <geosoft.gxapi.GXMVIEW>` is disposed """ ret_val = self._get_datalinkd(group) return GXDATALINKD(ret_val) def easy_maker(self, name, groups): """ Used for GX makers which use both maps and databases. :param name: Maker name, used in menu prompt :param groups: INI groups (terminate each with a ";") :type name: str :type groups: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._easy_maker(name.encode(), groups.encode()) def get_maker_name(self, group, str_val): """ Used to retrieve the maker for a particular view group. :param group: Group number :param str_val: String in which to place the maker name :type group: int :type str_val: str_ref .. versionadded:: 9.7 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ str_val.value = self._get_maker_name(group, str_val.value.encode()) def emf_object(self, min_x, min_y, max_x, max_y, file): """ Add an EMF file data object to the view. :param min_x: Min X :param min_y: Min Y :param max_x: Max X :param max_y: Max Y :param file: EMF File holding data :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type file: str .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._emf_object(min_x, min_y, max_x, max_y, file.encode()) def external_string_object(self, min_x, min_y, max_x, max_y, name, cl, data): """ Add an external string data object to the view. :param min_x: Min X :param min_y: Min Y :param max_x: Max X :param max_y: Max Y :param name: Name of external object :param cl: Class of external object :param data: String data of external object :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type name: str :type cl: str :type data: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._external_string_object(min_x, min_y, max_x, max_y, name.encode(), cl.encode(), data.encode()) def link(self, db, name): """ Make a link to a database. :param db: Database handle :param name: Link name :type db: GXDB :type name: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._link(db, name.encode()) def maker(self, db, map, prog, type, name, groups): """ Generates a Maker for the database and/or map. :param db: Database required? (0 = No, 1 = Yes) :param map: Map required? (0 = No, 1 = Yes) :param prog: Program name :param type: :ref:`MAKER` :param name: Maker name, used in menu prompt :param groups: INI groups (terminate each with a ";") :type db: int :type map: int :type prog: str :type type: int :type name: str :type groups: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._maker(db, map, prog.encode(), type, name.encode(), groups.encode()) def meta(self, meta, name): """ Store Metadata in a group :param meta: `GXMETA <geosoft.gxapi.GXMETA>` object :param name: Menu name of Object :type meta: GXMETA :type name: str .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._meta(meta, name.encode()) def voxd(self, voxd, name): """ Add a Voxel Display (`GXVOXD <geosoft.gxapi.GXVOXD>`) object to the view. :param name: Name Maximum length is `MVIEW_NAME_LENGTH <geosoft.gxapi.MVIEW_NAME_LENGTH>` :type voxd: GXVOXD :type name: str .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._voxd(voxd, name.encode()) def get_voxd(self, group): """ Get an existing `GXVOXD <geosoft.gxapi.GXVOXD>` object from the view. :param group: Group number :type group: int :returns: `GXVOXD <geosoft.gxapi.GXVOXD>` object :rtype: GXVOXD .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method returns a cached object owned by the `GXMVIEW <geosoft.gxapi.GXMVIEW>` and will be destroyed automatically when the `GXMVIEW <geosoft.gxapi.GXMVIEW>` is disposed """ ret_val = self._get_voxd(group) return GXVOXD(ret_val) def draw_vector_voxel_vectors(self, vox, group, itr, scale_factor, height_base_ratio, max_base_size_ratio, cutoff_value, max_vectors): """ Display vectors from a vector voxel in the view. :param group: View group name Maximum length is `MVIEW_NAME_LENGTH <geosoft.gxapi.MVIEW_NAME_LENGTH>` :param itr: Image transform - must contain zones :param scale_factor: Vector length scale factor - w.r.t. the voxel minimum horizontal cell size (default 1) :param height_base_ratio: Ratio of the vector cone height to its base (default 4) :param max_base_size_ratio: Ratio of maximum base size to minimum horizontal cell size (default 0.25) :param cutoff_value: Cutoff value - do not plot vectors with amplitudes less than this value (`rDUMMY <geosoft.gxapi.rDUMMY>` or 0 to plot all) :param max_vectors: Maximum number of vectors - decimate as required to reduce (`iDUMMY <geosoft.gxapi.iDUMMY>` to plot all) :type vox: GXVOX :type group: str :type itr: GXITR :type scale_factor: float :type height_base_ratio: float :type max_base_size_ratio: float :type cutoff_value: float :type max_vectors: int .. versionadded:: 7.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This will result in a `GXVECTOR3D <geosoft.gxapi.GXVECTOR3D>` group object within the view """ self._draw_vector_voxel_vectors(vox, group.encode(), itr, scale_factor, height_base_ratio, max_base_size_ratio, cutoff_value, max_vectors) def get_vector_3d(self, group): """ Get an existing `GXVECTOR3D <geosoft.gxapi.GXVECTOR3D>` object from the view. :param group: Group number :type group: int :returns: `GXVECTOR3D <geosoft.gxapi.GXVECTOR3D>` object :rtype: GXVECTOR3D .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method returns a cached object owned by the `GXMVIEW <geosoft.gxapi.GXMVIEW>` and will be destroyed automatically when the `GXMVIEW <geosoft.gxapi.GXMVIEW>` is disposed """ ret_val = self._get_vector_3d(group) return GXVECTOR3D(ret_val) def draw_vectors_3d(self, group, vv_x, vv_y, vv_z, vv_vx, vv_vy, vv_vz, itr, scale_for_max_vector, height_base_ratio, max_base_size_ratio): """ Display vectors in the view. :param group: View group name Maximum length is `MVIEW_NAME_LENGTH <geosoft.gxapi.MVIEW_NAME_LENGTH>` :param vv_x: X locations :param vv_y: Y locations :param vv_z: Z locations :param vv_vx: Vector X component :param vv_vy: Vector Y component :param vv_vz: Vector Z component :param itr: Image transform - must contain zones :param scale_for_max_vector: Scale factor for the longest vector in map units / vector units. Vector lengths for the rest of the vectors scale by the square root of the vector amplitudes. This results in the apparent (viewed) area of the vector being proportional to the amplitude. :param height_base_ratio: Ratio of the vector cone height to its base (default 4) :param max_base_size_ratio: Maximum base size in view units. Leave blank (dummy) for no limit. If applied this can make larger vectors skinnier, but does not reduce the length, so they don't obscure other vectors as much. :type group: str :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_vx: GXVV :type vv_vy: GXVV :type vv_vz: GXVV :type itr: GXITR :type scale_for_max_vector: float :type height_base_ratio: float :type max_base_size_ratio: float .. versionadded:: 8.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._draw_vectors_3d(group.encode(), vv_x, vv_y, vv_z, vv_vx, vv_vy, vv_vz, itr, scale_for_max_vector, height_base_ratio, max_base_size_ratio) # Group Methods def set_group_itr(self, group, itr): """ Set group `GXITR <geosoft.gxapi.GXITR>` :param group: Group number :type group: int :type itr: GXITR .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A group `GXITR <geosoft.gxapi.GXITR>` associate a color distribution with mixed vector groups (e.g. Drillhole Lithology tubes) groups. Used by legend UI support in 3D. Note that modifying this information does not currently change the group contents and a group needs to be regenerated (e.g. with maker) to refresh the objects. """ self._set_group_itr(group, itr) def get_group_itr(self, group): """ Get group `GXITR <geosoft.gxapi.GXITR>` :param group: Group number :type group: int :rtype: GXITR .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A group `GXITR <geosoft.gxapi.GXITR>` associate a color distribution with mixed vector groups (e.g. Drillhole Lithology tubes) groups. Used by legend UI support in 3D. Note that modifying this information does not currently change the group contents and a group needs to be regenerated (e.g. with maker) to refresh the objects. """ ret_val = self._get_group_itr(group) return GXITR(ret_val) def group_itr_exists(self, group): """ Determine if group `GXITR <geosoft.gxapi.GXITR>` exists. :param group: Group number :type group: int :returns: 1 - `GXITR <geosoft.gxapi.GXITR>` exists, 0 - `GXITR <geosoft.gxapi.GXITR>` does not exist :rtype: int .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A group `GXITR <geosoft.gxapi.GXITR>` associate a color distribution with mixed vector groups (e.g. Drillhole Lithology tubes) groups. Used by legend UI support in 3D. Note that modifying this information does not currently change the group contents and a group needs to be regenerated (e.g. with maker) to refresh the objects. """ ret_val = self._group_itr_exists(group) return ret_val def delete_group_itr(self, group): """ Deletes existing `GXITR <geosoft.gxapi.GXITR>` associated with a group. :param group: Group number :type group: int .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A group `GXITR <geosoft.gxapi.GXITR>` associate a color distribution with mixed vector groups (e.g. Drillhole Lithology tubes) groups. Used by legend UI support in 3D. Note that modifying this information does not currently change the group contents and a group needs to be regenerated (e.g. with maker) to refresh the objects. """ self._delete_group_itr(group) def set_group_tpat(self, group, tpat): """ Set group `GXTPAT <geosoft.gxapi.GXTPAT>` :param group: Group number :type group: int :type tpat: GXTPAT .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A group `GXTPAT <geosoft.gxapi.GXTPAT>` associate a thematic color map with mixed vector groups (e.g. Drillhole Lithology tubes) groups. Used by legend UI support in 3D. Note that modifying this information does not currently change the group contents and a group needs to be regenerated (e.g. with maker) to refresh the objects. """ self._set_group_tpat(group, tpat) def get_group_tpat(self, group): """ Get group `GXTPAT <geosoft.gxapi.GXTPAT>` :param group: Group number :type group: int :rtype: GXTPAT .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A group `GXTPAT <geosoft.gxapi.GXTPAT>` associate a thematic color map with mixed vector groups (e.g. Drillhole Lithology tubes) groups. Used by legend UI support in 3D. Note that modifying this information does not currently change the group contents and a group needs to be regenerated (e.g. with maker) to refresh the objects. """ ret_val = self._get_group_tpat(group) return GXTPAT(ret_val) def group_tpat_exists(self, group): """ Determine if group `GXTPAT <geosoft.gxapi.GXTPAT>` exists. :param group: Group number :type group: int :returns: 1 - `GXTPAT <geosoft.gxapi.GXTPAT>` exists, 0 - `GXTPAT <geosoft.gxapi.GXTPAT>` does not exist :rtype: int .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A group `GXTPAT <geosoft.gxapi.GXTPAT>` associate a thematic color map with mixed vector groups (e.g. Drillhole Lithology tubes) groups. Used by legend UI support in 3D. Note that modifying this information does not currently change the group contents and a group needs to be regenerated (e.g. with maker) to refresh the objects. """ ret_val = self._group_tpat_exists(group) return ret_val def delete_group_tpat(self, group): """ Deletes existing `GXTPAT <geosoft.gxapi.GXTPAT>` associated with a group. :param group: Group number :type group: int .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A group `GXTPAT <geosoft.gxapi.GXTPAT>` associate a thematic color map with mixed vector groups (e.g. Drillhole Lithology tubes) groups. Used by legend UI support in 3D. Note that modifying this information does not currently change the group contents and a group needs to be regenerated (e.g. with maker) to refresh the objects. """ self._delete_group_tpat(group) def group_storage_exists(self, group, storage_name): """ Determine if generic storage associated with a group exists. :param group: Group number :param storage_name: Storage name :type group: int :type storage_name: str :returns: 1 - storage exists, 0 - storage does not exist :rtype: int .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** External API users should not use storage names starting with "Geosoft" """ ret_val = self._group_storage_exists(group, storage_name.encode()) return ret_val def read_group_storage(self, group, storage_name): """ Reads existing generic storage associated with a group into an in-memory `GXBF <geosoft.gxapi.GXBF>`. :param group: Group number :param storage_name: Storage name :type group: int :type storage_name: str :returns: `GXBF <geosoft.gxapi.GXBF>` Object :rtype: GXBF .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** External API users should not use storage names starting with "Geosoft" """ ret_val = self._read_group_storage(group, storage_name.encode()) return GXBF(ret_val) def delete_group_storage(self, group, storage_name): """ Deletes existing generic storage associated with a group. :param group: Group number :param storage_name: Storage name :type group: int :type storage_name: str .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** External API users should not use storage names starting with "Geosoft" """ self._delete_group_storage(group, storage_name.encode()) def write_group_storage(self, group, storage_name, bf): """ Open generic existing storage associated with a group for reading. :param group: Group number :param storage_name: Storage name :param bf: `GXBF <geosoft.gxapi.GXBF>` to read from :type group: int :type storage_name: str :type bf: GXBF .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** External API users should not use storage names starting with "Geosoft" """ self._write_group_storage(group, storage_name.encode(), bf) def copy_marked_groups(self, mvie_wd): """ Copies all marked groups from one view into another view :param mvie_wd: Destination `GXMVIEW <geosoft.gxapi.GXMVIEW>` :type mvie_wd: GXMVIEW .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Projections in source and destination views are used to copy the entities. Entities are clipped by the destination view's clipping region. """ self._copy_marked_groups(mvie_wd) def copy_raw_marked_groups(self, mvie_wd): """ Copies all marked groups raw from one view into another :param mvie_wd: Destination `GXMVIEW <geosoft.gxapi.GXMVIEW>` :type mvie_wd: GXMVIEW .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The projections, and clipping is completly ignored. """ self._copy_raw_marked_groups(mvie_wd) def crc_group(self, name, crc): """ Compute CRC for a group. :param name: Group name :param crc: CRC to start (use `CRC_INIT_VALUE <geosoft.gxapi.CRC_INIT_VALUE>`) :type name: str :type crc: int :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._crc_group(name.encode(), crc) return ret_val def delete_group(self, group): """ Delete a group. :param group: Group name :type group: str .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Does nothing if the group does not already exist. """ self._delete_group(group.encode()) def del_marked_groups(self): """ Delete marked groups. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._del_marked_groups() def get_group_extent(self, group_name, xmin, ymin, xmax, ymax, unit): """ Get extent of a group in a view :param group_name: Group name :param xmin: Minimum X, returned :param ymin: Minimum Y, returned :param xmax: Maximum X, returned :param ymax: Maximum Y, returned :param unit: :ref:`MVIEW_UNIT` :type group_name: str :type xmin: float_ref :type ymin: float_ref :type xmax: float_ref :type ymax: float_ref :type unit: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ xmin.value, ymin.value, xmax.value, ymax.value = self._get_group_extent(group_name.encode(), xmin.value, ymin.value, xmax.value, ymax.value, unit) def get_group_transparency(self, group_name, trans): """ Gets the transparency value of group :param group_name: Group name :param trans: Transparency (1.0 - Opaque, 0.0 - Transparent) :type group_name: str :type trans: float_ref .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ trans.value = self._get_group_transparency(group_name.encode(), trans.value) def group_to_ply(self, name, pply): """ Save all polygons in group into `GXPLY <geosoft.gxapi.GXPLY>` obj. :param name: Group name :param pply: `GXPLY <geosoft.gxapi.GXPLY>` to add to :type name: str :type pply: GXPLY .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The coordinates will be in the working coordinate system of the view. The SetWorkingIPJ_MVIEW method can be used to change the working coordinate system. This function will return an empty `GXPLY <geosoft.gxapi.GXPLY>` if the group is hidden. """ self._group_to_ply(name.encode(), pply) def hide_marked_groups(self, mode): """ Hide/Show marked groups. :param mode: :ref:`MVIEW_HIDE` :type mode: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._hide_marked_groups(mode) def hide_shadow_2d_interpretations(self, mode): """ Hide/Show 2d shadow interpretations. :param mode: :ref:`MVIEW_HIDE` :type mode: int .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._hide_shadow_2d_interpretations(mode) def exist_group(self, name): """ Checks to see if a group exists. :param name: Group name :type name: str :returns: 0 - group does not exist. 1 - group exists. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._exist_group(name.encode()) return ret_val def gen_new_group_name(self, group, new_name): """ Generate the name of a group from a base name that is new. (always unique and won't overwrite existing objects). :param group: Base Name of group :param new_name: New Name of group :type group: str :type new_name: str_ref .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ new_name.value = self._gen_new_group_name(group.encode(), new_name.value.encode()) def is_group(self, group, what): """ Query a status or characteristic of a group :param group: Group name :param what: :ref:`MVIEW_IS` :type group: str :type what: int :returns: TRUE or FALSE (1 or 0) :rtype: int .. versionadded:: 5.0.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_group(group.encode(), what) return ret_val def is_group_empty(self, group): """ Is the group empty? :param group: Group name :type group: str :returns: TRUE or FALSE (1 or 0) :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_group_empty(group.encode()) return ret_val def is_movable(self): """ Is this view movable? :rtype: bool .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Views are always physically movable in the API, this flag is for preventing accidental moving in the `GXGUI <geosoft.gxapi.GXGUI>`. By default views are not movable. """ ret_val = self._is_movable() return ret_val def is_visible(self): """ Is this view visible? :rtype: bool .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_visible() return ret_val def list_groups(self, lst, flag): """ Get a list of the groups in a view. :param lst: List :param flag: :ref:`MVIEW_GROUP_LIST` :type lst: GXLST :type flag: int :returns: Number of groups in the list :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._list_groups(lst, flag) return ret_val def render_order(self): """ Query the view render order :returns: Render order :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Views with lower numbers should render first, `iDUMMY <geosoft.gxapi.iDUMMY>` is undefined """ ret_val = self._render_order() return ret_val def is_group_exportable(self, group): """ Query whether the group is an exportable type. :param group: Group name :type group: str :returns: TRUE or FALSE (1 or 0) :rtype: int .. versionadded:: 9.7 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_group_exportable(group.encode()) return ret_val def mark_all_groups(self, mark): """ Mark or unmark all groups. :param mark: 0 - unmark, 1 - mark :type mark: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._mark_all_groups(mark) def mark_empty_groups(self, mark): """ Mark/unmark all empty groups. :param mark: 0 - unmark, 1 - mark :type mark: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._mark_empty_groups(mark) def mark_group(self, name, mark): """ Mark or unmark a specific group :param name: Group name :param mark: 0 - unmark, 1 - mark :type name: str :type mark: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._mark_group(name.encode(), mark) def move_group_backward(self, group): """ Move the group backward one position (render sooner). :param group: Group name :type group: str .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._move_group_backward(group.encode()) def move_group_forward(self, group): """ Move the group forward one position (render later). :param group: Group name :type group: str .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._move_group_forward(group.encode()) def move_group_to_back(self, group): """ Move the group to the back (render first). :param group: Group name :type group: str .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._move_group_to_back(group.encode()) def move_group_to_front(self, group): """ Move the group to the front (render last). :param group: Group name :type group: str .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._move_group_to_front(group.encode()) def rename_group(self, old, new_group_name): """ Rename a group. :param old: Old group name :param new_group_name: New group name :type old: str :type new_group_name: str .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Does nothing if the group does not already exist. """ self._rename_group(old.encode(), new_group_name.encode()) def set_group_moveable(self, group, move): """ Set the movable attribute of a group. :param group: Group name :param move: 0 - not movable, 1 - movable :type group: str :type move: int .. versionadded:: 5.0.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_group_moveable(group.encode(), move) def set_group_transparency(self, group_name, trans): """ Sets the transparency value of group :param group_name: Group name :param trans: Transparency (1.0 - Opaque, 0.0 - Transparent) :type group_name: str :type trans: float .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_group_transparency(group_name.encode(), trans) def set_mark_moveable(self, move): """ Set the movable attribute of marked groups. :param move: 0 - not movable, 1 - movable :type move: int .. versionadded:: 5.0.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_mark_moveable(move) def set_movability(self, movable): """ Set the view movability :type movable: bool .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Views are always physically movable in the API, this flag is for preventing accidental moving in the `GXGUI <geosoft.gxapi.GXGUI>`. By default views are not movable. """ self._set_movability(movable) def set_render_order(self, order): """ Set the view render order :param order: Render order :type order: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Views with lower numbers should render first, `iDUMMY <geosoft.gxapi.iDUMMY>` is undefined """ self._set_render_order(order) def set_visibility(self, visible): """ Set the view visibility :type visible: bool .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_visibility(visible) def start_group(self, name, mode): """ Start a group. :param name: Group name, can be NULL, Maximum length is `MVIEW_NAME_LENGTH <geosoft.gxapi.MVIEW_NAME_LENGTH>` :param mode: :ref:`MVIEW_GROUP` :type name: str :type mode: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Line and fill colors and thickness must be set before drawing to a group. If the group name is NULL, output will be sent to the primary group stream and the :ref:`MVIEW_GROUP` is ignored. Group names must be different from view names. """ self._start_group(name.encode(), mode) def get_group_guid(self, group, guid): """ Gets a GUID of a group in the `GXMVIEW <geosoft.gxapi.GXMVIEW>`. :param group: Group number :param guid: GUID :type group: int :type guid: str_ref .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If a GUID was never queried a new one will be assigned and the map will be modified. Only if the map is saved will this value then persist. """ guid.value = self._get_group_guid(group, guid.value.encode()) def get_folder_items_3d(self, parent_guid): """ Get the list of folders in the `GXMVIEW <geosoft.gxapi.GXMVIEW>`. :param parent_guid: Folder parent GUID, or empty string for parent MVIEW :type parent_guid: str :rtype: GXLST .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Returns name/guid pairs. Empty GUID indicates item is a group. """ ret_val = self._get_folder_items_3d(parent_guid.encode()) return GXLST(ret_val) def get_folder_items_2d(self, parent_name): """ Get the list of key-value pairs representing the name(key) and the type(value) of all children in the specified parent folders in the `GXMVIEW <geosoft.gxapi.GXMVIEW>`. :param parent_name: Parent folder name, or empty string for root MVIEW :type parent_name: str :rtype: GXLST .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Returns name/type pairs. Types can be 'Folder', 'Group' or 'Unknown' """ ret_val = self._get_folder_items_2d(parent_name.encode()) return GXLST(ret_val) def add_folder_3d(self, name, parent_guid, guid): """ Add a 3DView folder to the `GXMVIEW <geosoft.gxapi.GXMVIEW>`. :param name: Folder name :param parent_guid: Folder parent GUID, or empty string for parent `GXMVIEW <geosoft.gxapi.GXMVIEW>`. :param guid: GUID :type name: str :type parent_guid: str :type guid: str_ref .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ guid.value = self._add_folder_3d(name.encode(), parent_guid.encode(), guid.value.encode()) def add_folder_2d(self, name, parent_name): """ Add a Map folder to the `GXMVIEW <geosoft.gxapi.GXMVIEW>`. :param name: Folder name :param parent_name: Parent folder name, or empty string for root `GXMVIEW <geosoft.gxapi.GXMVIEW>`. :type name: str :type parent_name: str .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._add_folder_2d(name.encode(), parent_name.encode()) def move_group_to_folder_3d(self, guid, group): """ Add group to a 3DView folder in `GXMVIEW <geosoft.gxapi.GXMVIEW>`. :param guid: Folder parent GUID, or empty string for parent `GXMVIEW <geosoft.gxapi.GXMVIEW>`. :param group: Group number :type guid: str :type group: int .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._move_group_to_folder_3d(guid.encode(), group) def move_group_to_folder_2d(self, parent_name, group): """ Add group to a Map folder in `GXMVIEW <geosoft.gxapi.GXMVIEW>`. :param parent_name: Parent folder name, or empty string for root `GXMVIEW <geosoft.gxapi.GXMVIEW>`. :param group: Group number :type parent_name: str :type group: int .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._move_group_to_folder_2d(parent_name.encode(), group) def delete_folder_3d(self, guid): """ Delete a Map folder. :param guid: Folder GUID. :type guid: str .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._delete_folder_3d(guid.encode()) def delete_folder_2d(self, name): """ Delete a 3DView folder. :param name: Folder name. :type name: str .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._delete_folder_2d(name.encode()) def find_group_by_guid(self, guid): """ Find a Group by name. :param guid: GUID :type guid: str :returns: Group Number. :rtype: int .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._find_group_by_guid(guid.encode()) return ret_val # Projection def set_working_ipj(self, ipj): """ Set the working projection of the view. :param ipj: The input projection :type ipj: GXIPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The working projection is the coordinate system of coordinates drawn to the view. The working coordinate system can be different than the view coordinate system, in which case the coordinates are re-projected to the view coordinate system before they are placed in the view. .. seealso:: `mode_pj <geosoft.gxapi.GXMVIEW.mode_pj>` to control use of the working projection. """ self._set_working_ipj(ipj) def clear_esrild_ts(self): """ Clear ESRI local datum transformations currently in use. .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._clear_esrild_ts() def is_projection_empty(self): """ Returns 1 if the view projection and view user projection are both empty (undefined). :returns: 1 if the view projection and view user projection are both empty. :rtype: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Use, for instance, to see if the map view contains projection information. The first time you add data that has projection information you should set up an empty view projection so that subsequent data added with a different projection is properly displayed in relation to the initial data. """ ret_val = self._is_projection_empty() return ret_val def get_ipj(self, ipj): """ Get the projection of the view. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` in which to place the view `GXIPJ <geosoft.gxapi.GXIPJ>` :type ipj: GXIPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_ipj(ipj) def get_user_ipj(self, ipj): """ Get the user projection of the view. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` in which to place the view `GXIPJ <geosoft.gxapi.GXIPJ>` :type ipj: GXIPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_user_ipj(ipj) def mode_pj(self, mode): """ Set the working projection mode :param mode: :ref:`MVIEW_PJ` :type mode: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This controls how your coordinates and attributes will be interpreted. A working projection must be set useing SetWorkingIPJ_MVIEW for this method to have any effect. .. seealso:: SetWorkingIPJ """ self._mode_pj(mode) def north(self): """ Returns North direction at center of view. :returns: North direction id deg. azimuth relative to view Y. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** North is calculated from the `GXIPJ <geosoft.gxapi.GXIPJ>` North direction. It will be `rDUMMY <geosoft.gxapi.rDUMMY>` if `GXIPJ <geosoft.gxapi.GXIPJ>` is unknown. """ ret_val = self._north() return ret_val def set_ipj(self, ipj): """ Set the projection of the view. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` to place in the view :type ipj: GXIPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This function also sets the User `GXIPJ <geosoft.gxapi.GXIPJ>`, and automatically clears the WARP before doing so. This would be equivalent to calling :func:`_ClearWarp_IPJ' followed by `set_user_ipj <geosoft.gxapi.GXMVIEW.set_user_ipj>` on the view. """ self._set_ipj(ipj) def set_user_ipj(self, ipj): """ Set the user projection of the view. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` to place in the view :type ipj: GXIPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_user_ipj(ipj) # Render def get_3d_group_flags(self, group_num): """ Get a 3D geometry group's 3D rendering flags. :param group_num: Group number :type group_num: int :returns: Combination of :ref:`MVIEW_3D_RENDER` flags or 0 :rtype: int .. versionadded:: 9.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_3d_group_flags(group_num) return ret_val def set_3d_group_flags(self, group_num, flags): """ Set a 3D geometry group's 3D rendering flags. :param group_num: Group number :param flags: Combination of :ref:`MVIEW_3D_RENDER` flags or 0 :type group_num: int :type flags: int .. versionadded:: 9.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_3d_group_flags(group_num, flags) def get_group_freeze_scale(self, group_num, scale): """ Get a scale freezing value for the group (`rDUMMY <geosoft.gxapi.rDUMMY>` for disabled). :param group_num: Group number :param scale: Variable to fill with freeze scale :type group_num: int :type scale: float_ref .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ scale.value = self._get_group_freeze_scale(group_num, scale.value) def set_freeze_scale(self, scale): """ Set a scale freezing value into stream (`rDUMMY <geosoft.gxapi.rDUMMY>` for disabled). :param scale: Freeze Scale value :type scale: float .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Objects written after this will override any scale freezing set for the group """ self._set_freeze_scale(scale) def set_group_freeze_scale(self, group_num, scale): """ Set a scale freezing value for the group (`rDUMMY <geosoft.gxapi.rDUMMY>` for disabled). :param group_num: Group number :param scale: Variable to fill with freeze scale :type group_num: int :type scale: float .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_group_freeze_scale(group_num, scale) def find_group(self, group_name): """ Find a Group by name. :param group_name: Group name :type group_name: str :returns: Group Number. :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._find_group(group_name.encode()) return ret_val def group_name(self, group_num, group_name): """ Get a group name :param group_num: Group number, error if not valid :param group_name: Group Name :type group_num: int :type group_name: str_ref .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ group_name.value = self._group_name(group_num, group_name.value.encode()) def render(self, hdc, left, bottom, right, top, min_x, min_y, max_x, max_y): """ Render a specified area of view onto a Windows DC handle :param hdc: DC Handle :param left: Left value of the render rect in Windows coordinates (bottom>top) :param bottom: Bottom value :param right: Right value :param top: Top value :param min_x: Area X minimum :param min_y: Area Y minimum :param max_x: Area X maximum :param max_y: Area Y maximum :type hdc: int :type left: int :type bottom: int :type right: int :type top: int :type min_x: float :type min_y: float :type max_x: float :type max_y: float .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._render(hdc, left, bottom, right, top, min_x, min_y, max_x, max_y) def render_ex(self, hdc, left, bottom, right, top, min_x, min_y, max_x, max_y, asp, padding_pixel, text_rendering_hint): """ Render a specified area of view onto a Windows DC handle, setting the type of it and returning the new data extents :param hdc: DC Handle :param left: Left value of the render rect in Windows coordinates (bottom>top) :param bottom: Bottom value :param right: Right value :param top: Top value :param min_x: Area X minimum :param min_y: Area Y minimum :param max_x: Area X maximum :param max_y: Area Y maximum :param asp: :ref:`MVIEW_RELOCATE` :param padding_pixel: Add padding to avoid x/y axis border out of view if needed, default 0 :param text_rendering_hint: A (GDI/Graphics) TextRenderingHint to apply while rendering the MVIEW. A value < 0or null reference will not effect the current TextRenderingHint. :type hdc: int :type left: int :type bottom: int :type right: int :type top: int :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref :type asp: int :type padding_pixel: int :type text_rendering_hint: int .. versionadded:: 2022.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ min_x.value, min_y.value, max_x.value, max_y.value = self._render_ex(hdc, left, bottom, right, top, min_x.value, min_y.value, max_x.value, max_y.value, asp, padding_pixel, text_rendering_hint) # Utility Drawing def set_u_fac(self, hdc): """ Set the unit conversion of a view. :param hdc: New UFac value :type hdc: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_u_fac(hdc) def axis_x(self, y_loc, left, right, major_tick, minor_tick, tick_size): """ Draw an X axis :param y_loc: Y location in view units :param left: Left X :param right: Right X :param major_tick: Major tick interval :param minor_tick: Minor tick interval (half size of major) :param tick_size: Tick size in view units (negative for down ticks) :type y_loc: float :type left: float :type right: float :type major_tick: float :type minor_tick: float :type tick_size: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** All coordinates are in view units. .. seealso:: rOptimumTick_MVIEW """ self._axis_x(y_loc, left, right, major_tick, minor_tick, tick_size) def axis_y(self, x_loc, bottom, top, major_tick, minor_tick, tick_size): """ Draw a Y axis :param x_loc: X location in view units :param bottom: Bottom Y :param top: Top Y :param major_tick: Major tick interval :param minor_tick: Minor tick interval (half size of major) :param tick_size: Tick size in view units (negative for left ticks) :type x_loc: float :type bottom: float :type top: float :type major_tick: float :type minor_tick: float :type tick_size: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** All coordinates are in view units. .. seealso:: rOptimumTick_MVIEW """ self._axis_y(x_loc, bottom, top, major_tick, minor_tick, tick_size) def grid(self, x_inc, y_inc, dx, dy, grid_type): """ Draw a grid in the current window :param x_inc: X grid increment :param y_inc: Y grid increment :param dx: dX dot increment/cross X size :param dy: dY dot increment/cross Y size :param grid_type: :ref:`MVIEW_GRID` :type x_inc: float :type y_inc: float :type dx: float :type dy: float :type grid_type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The grid will be drawn in the current window specified by the last SetWindow call. .. seealso:: `axis_x <geosoft.gxapi.GXMVIEW.axis_x>`, `axis_y <geosoft.gxapi.GXMVIEW.axis_y>`, `optimum_tick <geosoft.gxapi.GXMVIEW.optimum_tick>` """ self._grid(x_inc, y_inc, dx, dy, grid_type) def label_fid(self, vv_x, fid_start, fid_incr, interval, y_loc, y_scale): """ Label fiducials on a profile :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` :param fid_start: Fiducial start :param fid_incr: Fiducial increment :param interval: Fiducial label interval, default 100.0 :param y_loc: Y location in view unit :param y_scale: Y scale :type vv_x: GXVV :type fid_start: float :type fid_incr: float :type interval: float :type y_loc: float :type y_scale: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A 1mm long vertical tick is drawn at the place where a label is present. The label is drawn below the tick. The incoming X `GXVV <geosoft.gxapi.GXVV>` is used to define the place for label. """ self._label_fid(vv_x, fid_start, fid_incr, interval, y_loc, y_scale) def label_x(self, l_loc, left, right, lable_int, just, bound, orient): """ Label annotations on the X axis :param l_loc: Y location in view units :param left: Left X :param right: Right X :param lable_int: Label interval :param just: :ref:`MVIEW_LABEL_JUST` :param bound: :ref:`MVIEW_LABEL_BOUND` :param orient: :ref:`MVIEW_LABEL_ORIENT` :type l_loc: float :type left: float :type right: float :type lable_int: float :type just: int :type bound: int :type orient: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Label bounding will justify edge labels to be inside the bar limits. But bounding does not apply if labels are drawn vertically (top right or top left) .. seealso:: `axis_x <geosoft.gxapi.GXMVIEW.axis_x>`, `axis_y <geosoft.gxapi.GXMVIEW.axis_y>`, `optimum_tick <geosoft.gxapi.GXMVIEW.optimum_tick>` """ self._label_x(l_loc, left, right, lable_int, just, bound, orient) def label_y(self, x, bottom, top, lable_int, just, bound, orient): """ Label annotations on the Y axis :param x: X location in view units :param bottom: Bottom Y :param top: Top Y :param lable_int: Label interval :param just: :ref:`MVIEW_LABEL_JUST` :param bound: :ref:`MVIEW_LABEL_BOUND` :param orient: :ref:`MVIEW_LABEL_ORIENT` :type x: float :type bottom: float :type top: float :type lable_int: float :type just: int :type bound: int :type orient: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Label bounding will justify edge labels to be inside the bar limits. But bounding does not apply if labels are drawn vertically (top right or top left) .. seealso:: `axis_x <geosoft.gxapi.GXMVIEW.axis_x>`, `axis_y <geosoft.gxapi.GXMVIEW.axis_y>`, `optimum_tick <geosoft.gxapi.GXMVIEW.optimum_tick>` """ self._label_y(x, bottom, top, lable_int, just, bound, orient) @classmethod def optimum_tick(cls, min, max, sep): """ Return a default optimum tick interval :param min: Minimum of range :param max: Maximum :param sep: Optimum interval :type min: float :type max: float :type sep: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ sep.value = gxapi_cy.WrapMVIEW._optimum_tick(GXContext._get_tls_geo(), min, max, sep.value) # View @classmethod def create(cls, map, name, mode): """ Create `GXMVIEW <geosoft.gxapi.GXMVIEW>`. :param map: `GXMAP <geosoft.gxapi.GXMAP>` on which to place the view :param name: View name (maximum `MVIEW_NAME_LENGTH <geosoft.gxapi.MVIEW_NAME_LENGTH>`) :param mode: :ref:`MVIEW_OPEN` :type map: GXMAP :type name: str :type mode: int :returns: `GXMVIEW <geosoft.gxapi.GXMVIEW>`, aborts if creation fails :rtype: GXMVIEW .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** View scaling is set to mm on the map and the view origin is set to the map origin. """ ret_val = gxapi_cy.WrapMVIEW._create(GXContext._get_tls_geo(), map, name.encode(), mode) return GXMVIEW(ret_val) @classmethod def create_crooked_section(cls, map, ipj, name, x0, y0, xs, ys, scale, v_ex, dist0, elev, v_vxs, v_vx, v_vy): """ Creates a new crooked section view. :param map: `GXMAP <geosoft.gxapi.GXMAP>` Object :param ipj: Geographic projection of input X, Y locations below (without orientation) :param name: View Name :param x0: Base view bottom left corner X (mm) :param y0: Base view bottom left corner Y (mm) :param xs: Base view size in X (mm) :param ys: Base view size in Y (mm) :param scale: Map horizontal scale (X-axis) :param v_ex: Vertical exaggeration (1.0 is normal, must be >0.0) :param dist0: Starting distance at the left side of the view. :param elev: Elevation at TOP of the view :param v_vxs: Cumulative distances along the secton :param v_vx: True X locations along the section :param v_vy: True Y locations along the section :type map: GXMAP :type ipj: GXIPJ :type name: str :type x0: float :type y0: float :type xs: float :type ys: float :type scale: float :type v_ex: float :type dist0: float :type elev: float :type v_vxs: GXVV :type v_vx: GXVV :type v_vy: GXVV :returns: `GXMVIEW <geosoft.gxapi.GXMVIEW>`, aborts if creation fails :rtype: GXMVIEW .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A crooked section is a section running vertically beneath a path of (X, Y) locations, like a river. This view supports linking to other plan, section, or 3D views. The data view coordinates are set up so that vertical coordinate corresponds to elevation, and the X coordinate is the distance along the crooked feature, beginning at zero on the left, but the status bar will show the true (X, Y, Z) location. If the scale is set to `rDUMMY <geosoft.gxapi.rDUMMY>`, then it will be calculated so that the points will all fit horizontally. """ ret_val = gxapi_cy.WrapMVIEW._create_crooked_section(GXContext._get_tls_geo(), map, ipj, name.encode(), x0, y0, xs, ys, scale, v_ex, dist0, elev, v_vxs, v_vx, v_vy) return GXMVIEW(ret_val) @classmethod def create_crooked_section_data_profile(cls, map, ipj, name, x0, y0, xs, ys, scale, dist0, min_z, max_z, log_z, v_vxs, v_vx, v_vy): """ Creates a new crooked section data profile view. :param map: `GXMAP <geosoft.gxapi.GXMAP>` Object :param ipj: Geographic projection of input X, Y locations below (without orientation) :param name: View Name :param x0: Base view bottom left corner X (mm) :param y0: Base view bottom left corner Y (mm) :param xs: Base view size in X (mm) :param ys: Base view size in Y (mm) :param scale: Map horizontal scale (X-axis) :param dist0: Starting distance at the left side of the view. :param min_z: Data value at bottom of the view :param max_z: Data value at top of the view :param log_z: Make logarithmic Y-axis (0:No, 1:Yes)? :param v_vxs: Cumulative distances along the secton :param v_vx: True X locations along the section :param v_vy: True Y locations along the section :type map: GXMAP :type ipj: GXIPJ :type name: str :type x0: float :type y0: float :type xs: float :type ys: float :type scale: float :type dist0: float :type min_z: float :type max_z: float :type log_z: int :type v_vxs: GXVV :type v_vx: GXVV :type v_vy: GXVV :returns: `GXMVIEW <geosoft.gxapi.GXMVIEW>`, aborts if creation fails :rtype: GXMVIEW .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is the same as `create_crooked_section <geosoft.gxapi.GXMVIEW.create_crooked_section>`, except that the vertical axis plots a data value, not elevation, and allows for logarithmic scaling. See Also: `create_crooked_section <geosoft.gxapi.GXMVIEW.create_crooked_section>`. """ ret_val = gxapi_cy.WrapMVIEW._create_crooked_section_data_profile(GXContext._get_tls_geo(), map, ipj, name.encode(), x0, y0, xs, ys, scale, dist0, min_z, max_z, log_z, v_vxs, v_vx, v_vy) return GXMVIEW(ret_val) def extent(self, what, unit, min_x, min_y, max_x, max_y): """ Get the view extents :param what: :ref:`MVIEW_EXTENT` :param unit: :ref:`MVIEW_EXTENT_UNIT` :param min_x: X minimum :param min_y: Y minimum :param max_x: X maximum :param max_y: Y maximum :type what: int :type unit: int :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The CLIP region is the current view window or the limits of the current clip polygon. If `MVIEW_EXTENT_ALL <geosoft.gxapi.MVIEW_EXTENT_ALL>` is requested and the view has no groups, the clip extents are returned. If clip extents are requested and there are no clip extents, an area 0.0,0.0 1.0,1.0 is returned. The `MVIEW_EXTENT_VISIBLE <geosoft.gxapi.MVIEW_EXTENT_VISIBLE>` flag will return the union of the `MVIEW_EXTENT_CLIP <geosoft.gxapi.MVIEW_EXTENT_CLIP>` area and the extents of all non-masked visible groups in the view. """ min_x.value, min_y.value, max_x.value, max_y.value = self._extent(what, unit, min_x.value, min_y.value, max_x.value, max_y.value) def get_map(self): """ Get the `GXMAP <geosoft.gxapi.GXMAP>` of the view. :returns: The `GXMAP <geosoft.gxapi.GXMAP>` of the View. :rtype: GXMAP .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_map() return GXMAP(ret_val) def get_reg(self): """ Get the `GXREG <geosoft.gxapi.GXREG>` of the view. :returns: The `GXREG <geosoft.gxapi.GXREG>` of the View. :rtype: GXREG .. versionadded:: 5.0.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_reg() return GXREG(ret_val) def get_name(self, name): """ Gets the name of a view. :param name: View name returned :type name: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ name.value = self._get_name(name.value.encode()) def get_guid(self, guid): """ Gets the GUID of the `GXMVIEW <geosoft.gxapi.GXMVIEW>`. :param guid: GUID :type guid: str_ref .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If a GUID was never queried a new one will be assigned and the map will be modified. Only if the map is saved will this value then persist. """ guid.value = self._get_guid(guid.value.encode()) # View Control def plot_to_view(self, x, y): """ Convert a plot coordinate in mm to a VIEW coordinate. :param x: X in plot mm, returned in View coordinates :param y: Y in plot mm, returned in View coordinates :type x: float_ref :type y: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ x.value, y.value = self._plot_to_view(x.value, y.value) def set_thin_res(self, thin): """ Set polyline/polygon thinning resolution :param thin: Thinning resolution in mm, -1.0 to turn off. :type thin: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The thinning resolution controls the removal of redundant points from polylines and polygons. Points that deviate from a straight line by less than the thinning resolution are removed. This can significantly reduce the size of a `GXMAP <geosoft.gxapi.GXMAP>` file. We recommend that you set the thinning resolution to 0.02 mm. By default, the thinning resolution is set to 0.05mm. Set resolution to 0.0 to remove colinear points only. To turn off thinning after turning it on, call SetThinRes_MVIEW with a resolution of -1. """ self._set_thin_res(thin) def view_to_plot(self, x, y): """ Convert a VIEW coordinate to a plot coordinate in mm. :param x: X in View, returned in mm from plot origin :param y: Y in View, returned in mm from plot origin :type x: float_ref :type y: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ x.value, y.value = self._view_to_plot(x.value, y.value) def best_fit_window(self, m_min_x, m_min_y, m_max_x, m_max_y, v_min_x, v_min_y, v_max_x, v_max_y, fit_view): """ Fit an area in ground coordinates centered to an area in mm on map or vise versa keeping X and Y scales the same. :param m_min_x: X minimum (mm) of the area in map relative to map origin :param m_min_y: Y minimum .. :param m_max_x: X maximum .. :param m_max_y: Y maximum .. :param v_min_x: Min X in ground coordinate to fit to the area defined above :param v_min_y: Min Y in ground coordinate .. :param v_max_x: Max X in ground coordinate .. :param v_max_y: Max Y in ground coordinate .. :param fit_view: :ref:`MVIEW_FIT` :type m_min_x: float_ref :type m_min_y: float_ref :type m_max_x: float_ref :type m_max_y: float_ref :type v_min_x: float_ref :type v_min_y: float_ref :type v_max_x: float_ref :type v_max_y: float_ref :type fit_view: int .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** X and Y scales will be redefined and units will remain unchanged. The final X and Y ranges (if changed) are returned. .. seealso:: `fit_window <geosoft.gxapi.GXMVIEW.fit_window>` """ m_min_x.value, m_min_y.value, m_max_x.value, m_max_y.value, v_min_x.value, v_min_y.value, v_max_x.value, v_max_y.value = self._best_fit_window(m_min_x.value, m_min_y.value, m_max_x.value, m_max_y.value, v_min_x.value, v_min_y.value, v_max_x.value, v_max_y.value, fit_view) def fit_map_window_3d(self, m_min_x, m_min_y, m_max_x, m_max_y, v_min_x, v_min_y, v_max_x, v_max_y): """ Set the 2D view window for a 3D view. :param m_min_x: X minimum (mm) of the area in map relative to map origin :param m_min_y: Y minimum .. :param m_max_x: X maximum .. :param m_max_y: Y maximum .. :param v_min_x: Min X in ground coordinate to fit to the area defined above :param v_min_y: Min Y in ground coordinate .. :param v_max_x: Max X in ground coordinate .. :param v_max_y: Max Y in ground coordinate .. :type m_min_x: float :type m_min_y: float :type m_max_x: float :type m_max_y: float :type v_min_x: float :type v_min_y: float :type v_max_x: float :type v_max_y: float .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 3D views are placed in 2D maps within a 2D mapping window that is analgous to a 2D View. This allows all 2D functions (such as changing a view location and size) to treat a 3D view just like a 2D view. The `fit_map_window_3d <geosoft.gxapi.GXMVIEW.fit_map_window_3d>` function allows you to locate and set the "apparent" 2D mapping of a 3D view on the map. An intial map window is established as specified on the map, and the view scaling is established to fit the specified area within that map area. """ self._fit_map_window_3d(m_min_x, m_min_y, m_max_x, m_max_y, v_min_x, v_min_y, v_max_x, v_max_y) def fit_window(self, m_min_x, m_min_y, m_max_x, m_max_y, v_min_x, v_min_y, v_max_x, v_max_y): """ Fit an area in ground coordinates to an area in mm on map. :param m_min_x: X minimum (mm) of the area in map relative to map origin :param m_min_y: Y minimum .. :param m_max_x: X maximum .. :param m_max_y: Y maximum .. :param v_min_x: Min X in ground coordinate to fit to the area defined above :param v_min_y: Min Y in ground coordinate .. :param v_max_x: Max X in ground coordinate .. :param v_max_y: Max Y in ground coordinate .. :type m_min_x: float :type m_min_y: float :type m_max_x: float :type m_max_y: float :type v_min_x: float :type v_min_y: float :type v_max_x: float :type v_max_y: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** X and Y scales will be redefined and the units will be set to <unknown>. Coordinate ranges must be greater than 0.0. .. seealso:: `set_window <geosoft.gxapi.GXMVIEW.set_window>` """ self._fit_window(m_min_x, m_min_y, m_max_x, m_max_y, v_min_x, v_min_y, v_max_x, v_max_y) def get_class_name(self, cl, name): """ Get a class name. :param cl: Class :param name: Name :type cl: str :type name: str_ref .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** `GXMVIEW <geosoft.gxapi.GXMVIEW>` class names are intended to be used to record the names of certain classes in the view, such as "Plane" for the default drawing plane. """ name.value = self._get_class_name(cl.encode(), name.value.encode()) def map_origin(self, x_origin, y_origin): """ Get the map origin from a view :param x_origin: Returned map origin - X :param y_origin: Returned map origin - Y :type x_origin: float_ref :type y_origin: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ x_origin.value, y_origin.value = self._map_origin(x_origin.value, y_origin.value) def re_scale(self, scale): """ Change the scale of a view. :param scale: Scale factor (> 0.0) :type scale: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The view size is multiplied by the scale factor. The view location will move relative to the map origin by the scale factor. """ self._re_scale(scale) def get_map_scale(self): """ Get the current map scale of the view :returns: The current map scale to 6 significant digits :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_map_scale() return ret_val def scale_mm(self): """ Get the horizontal scale in view X units/mm :returns: Returns horizontal scale in view X units/mm :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The scale factor is intended to be used by methods that would like to specify sizes in mm. Examples would be text sizes, line thicknesses and line pitch. """ ret_val = self._scale_mm() return ret_val def scale_pj_mm(self): """ Get horizontal scale in projected user units/mm :returns: Returns horizontal scale in projected user units/mm :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The scale factor is intended to be used by methods that would like to specify sizes in mm. Examples would be text sizes, line thicknesses and line pitch. Same as rScaleMM if working projection not defined """ ret_val = self._scale_pj_mm() return ret_val def scale_ymm(self): """ Get the vertical scale in Y units/mm :returns: Returns vertical scale in view Y units/mm :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The scale factor is intended to be used by methods that would like to specify sizes in mm. Examples would be text sizes, line thicknesses and line pitch. """ ret_val = self._scale_ymm() return ret_val def scale_all_group(self, xs, ys): """ Scale all groups (except for GRID) in a view :param xs: X scale :param ys: Y scale :type xs: float :type ys: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** X (and Y) scale is the ratio of the new dimension over the old dimension of a reference object. For example, if a horizontal straight line of 10m long becomes 20m, X scale should be 2. The view is then scaled back so that the view occupies the same area size as before. The view's clip area is updated as well. """ self._scale_all_group(xs, ys) def scale_window(self, min_x, min_y, max_x, max_y, bot_x, bot_y, x_scal, y_scal): """ Assign view coordinates to define a window. :param min_x: X minimum in view coordinates :param min_y: Y minimum :param max_x: X maximum :param max_y: Y maximum :param bot_x: X minimum in plot coordinates :param bot_y: Y minimum :param x_scal: Horizontal scale (view unit/plot unit in mm) :param y_scal: Vertical scale :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type bot_x: float :type bot_y: float :type x_scal: float :type y_scal: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The provided coordinates are converted to map mm using the current view translation and scaling. SetWindow is effectively called. .. seealso:: `set_window <geosoft.gxapi.GXMVIEW.set_window>`, `scale_window <geosoft.gxapi.GXMVIEW.scale_window>`, `tran_scale <geosoft.gxapi.GXMVIEW.tran_scale>` """ self._scale_window(min_x, min_y, max_x, max_y, bot_x, bot_y, x_scal, y_scal) def set_class_name(self, cl, name): """ Set a class name. :param cl: Class :param name: Name :type cl: str :type name: str .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** `GXMVIEW <geosoft.gxapi.GXMVIEW>` class names are intended to be used to record the names of certain classes in the view, such as "Plane" for the default drawing plane. """ self._set_class_name(cl.encode(), name.encode()) def set_window(self, min_x, min_y, max_x, max_y, unit): """ Set the view window :param min_x: X minimum :param min_y: Y minimum :param max_x: X maximum :param max_y: Y maximum :param unit: :ref:`MVIEW_UNIT` :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type unit: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The current clip region will be set to the clip window. .. seealso:: `fit_window <geosoft.gxapi.GXMVIEW.fit_window>`, `scale_window <geosoft.gxapi.GXMVIEW.scale_window>`, `extent <geosoft.gxapi.GXMVIEW.extent>`. """ self._set_window(min_x, min_y, max_x, max_y, unit) def tran_scale(self, x, y, xs, ys): """ Set the view translation and scaling :param x: X origin (user X to be placed at map 0) :param y: Y origin (user Y to be placed at map 0) :param xs: X mm/user unit :param ys: Y mm/user unit :type x: float :type y: float :type xs: float :type ys: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Warning. For reasons unknown (and maybe a bug), this function resets the view `GXIPJ <geosoft.gxapi.GXIPJ>` units. It is a good idea to call the SetUnits_IPJ function after calling this function in order to restore them. This will be addressed in v6.4. """ self._tran_scale(x, y, xs, ys) def user_to_view(self, x, y): """ Convert a USERplot in mm to a VIEW coordinate :param x: X in USER, returned in View coordinates :param y: Y in USER, returned in View coordinates :type x: float_ref :type y: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `set_user_ipj <geosoft.gxapi.GXMVIEW.set_user_ipj>` `get_user_ipj <geosoft.gxapi.GXMVIEW.get_user_ipj>` """ x.value, y.value = self._user_to_view(x.value, y.value) def view_to_user(self, x, y): """ Convert a VIEW coordinate to a USER coordinate. :param x: X in View, returned in user coordinates :param y: Y in View, returned in user coordinates :type x: float_ref :type y: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `set_user_ipj <geosoft.gxapi.GXMVIEW.set_user_ipj>` `get_user_ipj <geosoft.gxapi.GXMVIEW.get_user_ipj>` """ x.value, y.value = self._view_to_user(x.value, y.value) # Obsolete def get_surface_filename(self, group, filename): """ Get the surface filename. :param group: Group name :param filename: filename returned :type group: str :type filename: str_ref .. versionadded:: 9.7 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The group must be a `GXSURFACE <geosoft.gxapi.GXSURFACE>` group. Check this using `is_group <geosoft.gxapi.GXMVIEW.is_group>` and `MVIEW_IS_GENSURF <geosoft.gxapi.MVIEW_IS_GENSURF>` or `MVIEW_IS_VOXSURF <geosoft.gxapi.MVIEW_IS_VOXSURF>` . """ filename.value = self._get_surface_filename(group.encode(), filename.value.encode()) def is_surface_item_visible(self, group, guid): """ Is the surface item visible? :param group: Group name :param guid: Item GUID :type group: str :type guid: str :rtype: bool .. versionadded:: 9.7 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_surface_item_visible(group.encode(), guid.encode()) return ret_val ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/tutorial/Grids and Images/grid_convert_format.py import geosoft.gxpy.gx as gx import geosoft.gxpy.grid as gxgrid # create context gxc = gx.GXpy() # open grid grid_surfer = gxgrid.Grid.open('elevation_surfer.grd(SRF;VER=V7)') # copy the grid to an ER Mapper format grid file grid_erm = gxgrid.Grid.copy(grid_surfer, 'elevation.ers(ERM)', overwrite=True) exit() <file_sep>/geosoft/gxapi/GXVVU.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXVVU(gxapi_cy.WrapVVU): """ GXVVU class. These methods are not a class. Utility methods perform various operations on `GXVV <geosoft.gxapi.GXVV>` objects, including pruning, splining, clipping and filtering. """ def __init__(self, handle=0): super(GXVVU, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXVVU <geosoft.gxapi.GXVVU>` :returns: A null `GXVVU <geosoft.gxapi.GXVVU>` :rtype: GXVVU """ return GXVVU() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def average_repeat(cls, ref_vv, dat_vv): """ Average repeat values. :param ref_vv: Reference `GXVV <geosoft.gxapi.GXVV>` :param dat_vv: Data `GXVV <geosoft.gxapi.GXVV>` to average :type ref_vv: GXVV :type dat_vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Repeated values in the reference `GXVV <geosoft.gxapi.GXVV>` will be averaged in the data `GXVV <geosoft.gxapi.GXVV>`. The first value in the data `GXVV <geosoft.gxapi.GXVV>` will be set to the average and subsequent data `GXVV <geosoft.gxapi.GXVV>` values will be dummied out. Data is processed only to the minimum length of the input `GXVV <geosoft.gxapi.GXVV>` lengths. .. seealso:: `remove_dummy <geosoft.gxapi.GXVVU.remove_dummy>` """ gxapi_cy.WrapVVU._average_repeat(GXContext._get_tls_geo(), ref_vv, dat_vv) @classmethod def average_repeat_ex(cls, ref_vv, dat_vv, mode): """ Average repeat values. :param ref_vv: Reference `GXVV <geosoft.gxapi.GXVV>` :param dat_vv: Data `GXVV <geosoft.gxapi.GXVV>` to average :param mode: :ref:`VVU_MODE` :type ref_vv: GXVV :type dat_vv: GXVV :type mode: int .. versionadded:: 8.0.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Repeated values in the reference `GXVV <geosoft.gxapi.GXVV>` will be set to the mean, median, minimum or maximum value in the data `GXVV <geosoft.gxapi.GXVV>`. For minimum and maximum, the index in the data `GXVV <geosoft.gxapi.GXVV>` containing the minimum or maximum value is retained, and the other repeated values are dummied out. For mean and median, the first value in the data `GXVV <geosoft.gxapi.GXVV>` will be reset and subsequent data `GXVV <geosoft.gxapi.GXVV>` values will be dummied out. Data is processed only to the minimum length of the input `GXVV <geosoft.gxapi.GXVV>` lengths. .. seealso:: `remove_dummy <geosoft.gxapi.GXVVU.remove_dummy>` """ gxapi_cy.WrapVVU._average_repeat_ex(GXContext._get_tls_geo(), ref_vv, dat_vv, mode) @classmethod def average_repeat2(cls, ref_vv1, ref_vv2, dat_vv): """ Average repeat values based on 2 reference channels. :param ref_vv1: Reference `GXVV <geosoft.gxapi.GXVV>` :param ref_vv2: Reference `GXVV <geosoft.gxapi.GXVV>` :param dat_vv: Data `GXVV <geosoft.gxapi.GXVV>` to average :type ref_vv1: GXVV :type ref_vv2: GXVV :type dat_vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Repeated values in the reference `GXVV <geosoft.gxapi.GXVV>` will be averaged in the data `GXVV <geosoft.gxapi.GXVV>`. The first value in the data `GXVV <geosoft.gxapi.GXVV>` will be set to the average and subsequent data `GXVV <geosoft.gxapi.GXVV>` values will be dummied out. Data is processed only to the minimum length of the input `GXVV <geosoft.gxapi.GXVV>` lengths. Both the reference `GXVV <geosoft.gxapi.GXVV>` values must repeat for the averaging to occur. This version is useful for averaging on repeated (X,Y) locations. .. seealso:: RemoveDummy_VV """ gxapi_cy.WrapVVU._average_repeat2(GXContext._get_tls_geo(), ref_vv1, ref_vv2, dat_vv) @classmethod def average_repeat2_ex(cls, ref_vv1, ref_vv2, dat_vv, mode): """ Average repeat values based on 2 reference channels. :param ref_vv1: Reference `GXVV <geosoft.gxapi.GXVV>` :param ref_vv2: Reference `GXVV <geosoft.gxapi.GXVV>` :param dat_vv: Data `GXVV <geosoft.gxapi.GXVV>` to average :param mode: :ref:`VVU_MODE` :type ref_vv1: GXVV :type ref_vv2: GXVV :type dat_vv: GXVV :type mode: int .. versionadded:: 8.0.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Repeated values in the reference `GXVV <geosoft.gxapi.GXVV>` will be set to the mean, median, minimum or maximum value in the data `GXVV <geosoft.gxapi.GXVV>`. The first value in the data `GXVV <geosoft.gxapi.GXVV>` will be reset and subsequent data `GXVV <geosoft.gxapi.GXVV>` values will be dummied out. Data is processed only to the minimum length of the input `GXVV <geosoft.gxapi.GXVV>` lengths. Both the reference `GXVV <geosoft.gxapi.GXVV>` values must repeat for the averaging to occur. This version is useful for averaging on repeated (X,Y) locations. .. seealso:: RemoveDummy_VV """ gxapi_cy.WrapVVU._average_repeat2_ex(GXContext._get_tls_geo(), ref_vv1, ref_vv2, dat_vv, mode) @classmethod def binary_search(cls, vv, val, l_min, l_max): """ Search numeric value in a `GXVV <geosoft.gxapi.GXVV>`. :param val: Value to search for. :param l_min: Minimum Location :param l_max: Maximum Location :type vv: GXVV :type val: float :type l_min: int_ref :type l_max: int_ref .. versionadded:: 7.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The `GXVV <geosoft.gxapi.GXVV>` should be sorted.Search comparison is made on double comparison of the data. """ l_min.value, l_max.value = gxapi_cy.WrapVVU._binary_search(GXContext._get_tls_geo(), vv, val, l_min.value, l_max.value) @classmethod def box_cox(cls, vv, lm): """ Run Box-Cox (lambda) Transformation on `GXVV <geosoft.gxapi.GXVV>`. :param vv: [i/o] `GXVV <geosoft.gxapi.GXVV>` :param lm: [i] Lambda Value :type vv: GXVV :type lm: float .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapVVU._box_cox(GXContext._get_tls_geo(), vv, lm) @classmethod def bp_filt(cls, vv_i, vv_o, pr_sw, pr_lw, flen): """ Band-pass filter to the specified. :param vv_i: Input `GXVV <geosoft.gxapi.GXVV>` :param vv_o: Filtered `GXVV <geosoft.gxapi.GXVV>` :param pr_sw: Short wavelength cutoff, 0 for highpass :param pr_lw: Long wavelength cutoff, 0 for lowpass :param flen: Filter Length, 0 for default length :type vv_i: GXVV :type vv_o: GXVV :type pr_sw: float :type pr_lw: float :type flen: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If the short and long wavelengths are <= 0, the input channel is simply copied to the output channel without filtering. The wavelengths are in fiducials. """ gxapi_cy.WrapVVU._bp_filt(GXContext._get_tls_geo(), vv_i, vv_o, pr_sw, pr_lw, flen) @classmethod def clip(cls, vv, min, max, clip): """ Clip a `GXVV <geosoft.gxapi.GXVV>` to a range. :param vv: `GXVV <geosoft.gxapi.GXVV>` to clip :param min: Minimum value, `rDUMMY <geosoft.gxapi.rDUMMY>` for no minimum clip :param max: Maximum value, `rDUMMY <geosoft.gxapi.rDUMMY>` for no maximum clip :param clip: :ref:`VVU_CLIP` :type vv: GXVV :type min: float :type max: float :type clip: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapVVU._clip(GXContext._get_tls_geo(), vv, min, max, clip) @classmethod def clip_to_detect_limit(cls, vv, det_limit, conv): """ Apply detection limit clipping of data. :param vv: Input data vv (altered). :param det_limit: Detection limit :param conv: Auto-convert negatives? :type vv: GXVV :type det_limit: float :type conv: int .. versionadded:: 5.1.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Flow: 1. If auto-converting negatives, then all negative values are replaced by -0.5*value, and detection limit is ignored. 2. If not auto-converting negatives, and the detection limit is not `rDUMMY <geosoft.gxapi.rDUMMY>`, then values less than the detection limit are converted to one-half the detection limit. This function is identical to `GXCHIMERA.clip_to_detect_limit <geosoft.gxapi.GXCHIMERA.clip_to_detect_limit>`. """ gxapi_cy.WrapVVU._clip_to_detect_limit(GXContext._get_tls_geo(), vv, det_limit, conv) @classmethod def decimate(cls, vv, decimate): """ Decimate a `GXVV <geosoft.gxapi.GXVV>`. :param decimate: Decimation factor (must be > 0) :type vv: GXVV :type decimate: int .. versionadded:: 6.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** For a decimation factor N, will remove all values except those with indices equal to MN, where M is an integer. """ gxapi_cy.WrapVVU._decimate(GXContext._get_tls_geo(), vv, decimate) @classmethod def deviation(cls, vv_x, vv_y, vv_d, x1, y1, x2, y2, line): """ Calculate distance of point locations to a straight line :param vv_x: X `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param vv_d: Output deviation `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param x1: X of 1st point to define straight line :param y1: Y of 1st point to define straight line :param x2: X of 2nd point or line azimuth in degrees (North is 0 degree) :param y2: Y of 2nd point or `GS_R8DM <geosoft.gxapi.GS_R8DM>` if line azimuth is defined :param line: :ref:`VVU_LINE` :type vv_x: GXVV :type vv_y: GXVV :type vv_d: GXVV :type x1: float :type y1: float :type x2: float :type y2: float :type line: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapVVU._deviation(GXContext._get_tls_geo(), vv_x, vv_y, vv_d, x1, y1, x2, y2, line) @classmethod def distance(cls, vv_x, vv_y, vv_d, x_fid_start, x_fid_incr, y_fid_start, y_fid_incr): """ Create a cumulative distance `GXVV <geosoft.gxapi.GXVV>` :param vv_x: X `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param vv_d: Output distance `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param x_fid_start: X `GXVV <geosoft.gxapi.GXVV>` fid start :param x_fid_incr: X `GXVV <geosoft.gxapi.GXVV>` fid incr :param y_fid_start: Y `GXVV <geosoft.gxapi.GXVV>` fid start :param y_fid_incr: Y `GXVV <geosoft.gxapi.GXVV>` fid incr :type vv_x: GXVV :type vv_y: GXVV :type vv_d: GXVV :type x_fid_start: float :type x_fid_incr: float :type y_fid_start: float :type y_fid_incr: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapVVU._distance(GXContext._get_tls_geo(), vv_x, vv_y, vv_d, x_fid_start, x_fid_incr, y_fid_start, y_fid_incr) @classmethod def distance_link_non_dummies(cls, vv_x, vv_y, vv_d, x_fid_start, x_fid_incr, y_fid_start, y_fid_incr): """ Create distance linking non-dummies `GXVV <geosoft.gxapi.GXVV>` :param vv_x: X `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param vv_d: Output distance `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param x_fid_start: X `GXVV <geosoft.gxapi.GXVV>` fid start :param x_fid_incr: X `GXVV <geosoft.gxapi.GXVV>` fid incr :param y_fid_start: Y `GXVV <geosoft.gxapi.GXVV>` fid start :param y_fid_incr: Y `GXVV <geosoft.gxapi.GXVV>` fid incr :type vv_x: GXVV :type vv_y: GXVV :type vv_d: GXVV :type x_fid_start: float :type x_fid_incr: float :type y_fid_start: float :type y_fid_incr: float .. versionadded:: 2022.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapVVU._distance_link_non_dummies(GXContext._get_tls_geo(), vv_x, vv_y, vv_d, x_fid_start, x_fid_incr, y_fid_start, y_fid_incr) @classmethod def distance_non_cumulative(cls, vv_x, vv_y, vv_d, x_fid_start, x_fid_incr, y_fid_start, y_fid_incr): """ Create a non cumulative distance `GXVV <geosoft.gxapi.GXVV>` i.e each distance element is the distance of the corresponding (X,Y) element and the previous element. :param vv_x: X `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param vv_d: Output distance `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param x_fid_start: X `GXVV <geosoft.gxapi.GXVV>` fid start :param x_fid_incr: X `GXVV <geosoft.gxapi.GXVV>` fid incr :param y_fid_start: Y `GXVV <geosoft.gxapi.GXVV>` fid start :param y_fid_incr: Y `GXVV <geosoft.gxapi.GXVV>` fid incr :type vv_x: GXVV :type vv_y: GXVV :type vv_d: GXVV :type x_fid_start: float :type x_fid_incr: float :type y_fid_start: float :type y_fid_incr: float .. versionadded:: 7.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The fist distace element is `rDUMMY <geosoft.gxapi.rDUMMY>`. """ gxapi_cy.WrapVVU._distance_non_cumulative(GXContext._get_tls_geo(), vv_x, vv_y, vv_d, x_fid_start, x_fid_incr, y_fid_start, y_fid_incr) @classmethod def distance_3d(cls, vv_x, vv_y, vv_z, start_distance, vv_d): """ Create a cumulative distance `GXVV <geosoft.gxapi.GXVV>` from X, Y and Z VVs :param vv_x: X `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param vv_z: Z `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param start_distance: Distance at first location :param vv_d: Output distance `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type start_distance: float :type vv_d: GXVV .. versionadded:: 8.0.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The output `GXVV <geosoft.gxapi.GXVV>` is the length of the shortest X,Y or Z input `GXVV <geosoft.gxapi.GXVV>`. Any values with dummies are ignored - the distance at that point is equal to the distance at the previous valid point. The returned `GXVV <geosoft.gxapi.GXVV>` is the cumulative straight-line distance between the points. No re-sampling is performed. VVs of any type are supported. """ gxapi_cy.WrapVVU._distance_3d(GXContext._get_tls_geo(), vv_x, vv_y, vv_z, start_distance, vv_d) @classmethod def find_gaps_3d(cls, vv_x, vv_y, vv_z, gap, vv_g): """ Return indices of locations separated from previous locations by more than the input gap distance. :param vv_x: X `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param vv_z: Z `GXVV <geosoft.gxapi.GXVV>`,REAL `GXVV <geosoft.gxapi.GXVV>` :param gap: Gap size (must be greater than zero) :param vv_g: Returned indices of start of sections after gaps (INT `GXVV <geosoft.gxapi.GXVV>`) :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type gap: float :type vv_g: GXVV .. versionadded:: 8.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Locate the starting points of line segements determined by an input gap distance. The returned indices indicate where to break the line, given an input gap. The number of returned indices is one less than the number of line segments. (So if there are no gaps the returned `GXVV <geosoft.gxapi.GXVV>` has zero length). """ gxapi_cy.WrapVVU._find_gaps_3d(GXContext._get_tls_geo(), vv_x, vv_y, vv_z, gap, vv_g) @classmethod def dummy_range(cls, vv, min, max, inside, incl): """ Dummy values inside or outside a range in a `GXVV <geosoft.gxapi.GXVV>` :param vv: `GXVV <geosoft.gxapi.GXVV>` handle :param min: Minimum range value :param max: Maximum range value :param inside: If TRUE, dummy inside the range :param incl: If TRUE, include Min, Max in the range. :type vv: GXVV :type min: float :type max: float :type inside: int :type incl: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the Inside flag is TRUE, values within the specified range are set to dummy. If the inside flag is FALSE, values outside the range are set to dummy. If the Inclusive flag is TRUE, then dMin and dMax are considered part of the range. If it is FALSE, then < or > are used, and dMin and dMax lie outside the range. """ gxapi_cy.WrapVVU._dummy_range(GXContext._get_tls_geo(), vv, min, max, inside, incl) @classmethod def dummy_range_ex(cls, vv, min, max, inside, include_min, include_max): """ Like DummyRangeVVU, with inclusion options for both ends. :param vv: `GXVV <geosoft.gxapi.GXVV>` handle :param min: Minimum range value :param max: Maximum range value :param inside: If TRUE, dummy inside the range :param include_min: If TRUE, include Min in the range. :param include_max: If TRUE, include Max in the range. :type vv: GXVV :type min: float :type max: float :type inside: int :type include_min: int :type include_max: int .. versionadded:: 5.0.7 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the Inside flag is TRUE, values within the specified range are set to dummy. If the inside flag is FALSE, values outside the range are set to dummy. If the Inclusive flag is TRUE, then dMin and dMax are considered part of the range. If it is FALSE, then < or > are used, and dMin and dMax lie outside the range. """ gxapi_cy.WrapVVU._dummy_range_ex(GXContext._get_tls_geo(), vv, min, max, inside, include_min, include_max) @classmethod def dummy_repeat(cls, vv, mode): """ Dummy repeat values in a `GXVV <geosoft.gxapi.GXVV>`. :param mode: :ref:`VVU_DUMMYREPEAT` :type vv: GXVV :type mode: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Either the first, middle or last point will be left. Use `interp <geosoft.gxapi.GXVVU.interp>` to interpolate after if desired. """ gxapi_cy.WrapVVU._dummy_repeat(GXContext._get_tls_geo(), vv, mode) @classmethod def dup_stats(cls, data_vv, sample_vv, mean_vv, diff_vv): """ Calculate means and differences for duplicate sample pairs :param data_vv: Duplicate data `GXVV <geosoft.gxapi.GXVV>` :param sample_vv: Sample Type `GXVV <geosoft.gxapi.GXVV>` :param mean_vv: Mean values `GXVV <geosoft.gxapi.GXVV>` (returned) :param diff_vv: Diff values `GXVV <geosoft.gxapi.GXVV>` (returned) :type data_vv: GXVV :type sample_vv: GXVV :type mean_vv: GXVV :type diff_vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Created for duplicate sample handling in `GXCHIMERA <geosoft.gxapi.GXCHIMERA>`. On input, a numeric `GXVV <geosoft.gxapi.GXVV>` containing data values, and a sample type `GXVV <geosoft.gxapi.GXVV>`. Sample pairs have types "1" and "2". This routine searches for types in order "1 2 1 2", and writes the mean values of pairs to the mean value `GXVV <geosoft.gxapi.GXVV>`, and the differences with the mean (equal values, negative and positive) to the difference `GXVV <geosoft.gxapi.GXVV>`. Results for samples out of order, for unmatched values, or when the sample type does not equal "1" or "2" are set to dummy. """ gxapi_cy.WrapVVU._dup_stats(GXContext._get_tls_geo(), data_vv, sample_vv, mean_vv, diff_vv) @classmethod def exp_dist(cls, vv, seed, mean, length): """ Fill with exponentially distributed values. :param vv: `GXVV <geosoft.gxapi.GXVV>` object :param seed: Random number generator seed :param mean: Mean value of distribution (> 0.0) :param length: Number of values (-1 for all) :type vv: GXVV :type seed: int :type mean: float :type length: int .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** `GXVV <geosoft.gxapi.GXVV>` is set to input length (except for -1) See RAND for a short discription of the random number generator used. """ gxapi_cy.WrapVVU._exp_dist(GXContext._get_tls_geo(), vv, seed, mean, length) @classmethod def filter(cls, vv_i, vv_o, flt): """ Apply a convolution filter to a `GXVV <geosoft.gxapi.GXVV>`. :param vv_i: Input `GXVV <geosoft.gxapi.GXVV>` :param vv_o: Filtered `GXVV <geosoft.gxapi.GXVV>` :param flt: Filter handle (see `GXFLT <geosoft.gxapi.GXFLT>`) :type vv_i: GXVV :type vv_o: GXVV :type flt: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapVVU._filter(GXContext._get_tls_geo(), vv_i, vv_o, flt) @classmethod def find_string_items(cls, vv_source, vv_search, pis_source_sorted, pis_search_sorted, pis_case_tolerant, vv_i): """ Searches a `GXVV <geosoft.gxapi.GXVV>` for items in a second `GXVV <geosoft.gxapi.GXVV>`, returns indices of those found. :param vv_source: String `GXVV <geosoft.gxapi.GXVV>` in which to locate items :param vv_search: String `GXVV <geosoft.gxapi.GXVV>` Items to search for :param pis_source_sorted: Is the first `GXVV <geosoft.gxapi.GXVV>` already sorted? :param pis_search_sorted: Is the second `GXVV <geosoft.gxapi.GXVV>` already sorted :param pis_case_tolerant: Case tolerance for string comparisons :param vv_i: `GS_LONG <geosoft.gxapi.GS_LONG>` `GXVV <geosoft.gxapi.GXVV>` of returned indices into the first `GXLST <geosoft.gxapi.GXLST>`. :type vv_source: GXVV :type vv_search: GXVV :type pis_source_sorted: int :type pis_search_sorted: int :type pis_case_tolerant: int :type vv_i: GXVV .. versionadded:: 7.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This is a much more efficient way of determining if items in one `GXVV <geosoft.gxapi.GXVV>` are found in a second, than by searching repeatedly in a loop. The returned `GS_LONG <geosoft.gxapi.GS_LONG>` `GXVV <geosoft.gxapi.GXVV>` contains the same number of items as the "search items" `GXVV <geosoft.gxapi.GXVV>`, and contains -1 for items where the value is not found, and the index of items that are found. Comparisons are case-tolerant. Non-string VVs are converted to string type VVs (element size 24) internally. The method requires that the `GXVV <geosoft.gxapi.GXVV>` items be sorted, and will do so internally. Since the input VVs may already be sorted, the method will run faster if this stage can be skipped. """ gxapi_cy.WrapVVU._find_string_items(GXContext._get_tls_geo(), vv_source, vv_search, pis_source_sorted, pis_search_sorted, pis_case_tolerant, vv_i) @classmethod def fractal_filter(cls, vv_i, order, number, vv_o): """ Fractal filter a `GXVV <geosoft.gxapi.GXVV>`. :param vv_i: [i] `GXVV <geosoft.gxapi.GXVV>` :param order: [i] filter order :param number: [i] filter number :param vv_o: [o] filtered `GXVV <geosoft.gxapi.GXVV>` :type vv_i: GXVV :type order: int :type number: int :type vv_o: GXVV .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapVVU._fractal_filter(GXContext._get_tls_geo(), vv_i, order, number, vv_o) @classmethod def close_xy(cls, vv_x, vv_y, x, y): """ Find the closest point to an input point (XY). :param vv_x: X locations :param vv_y: Y locations :param x: Input X :param y: Input Y :type vv_x: GXVV :type vv_y: GXVV :type x: float :type y: float :returns: Index of closest point, -1 if no valid locations, or data is masked. :rtype: int .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Input X and Y location VVs, and a location. Returns the index of the point in the `GXVV <geosoft.gxapi.GXVV>` closest to the input point. """ ret_val = gxapi_cy.WrapVVU._close_xy(GXContext._get_tls_geo(), vv_x, vv_y, x, y) return ret_val @classmethod def close_xym(cls, vv_x, vv_y, vv_m, x, y): """ Find the closest point to an input point, with mask (XY). :param vv_x: X locations :param vv_y: Y locations :param vv_m: Mask values :param x: Input X :param y: Input Y :type vv_x: GXVV :type vv_y: GXVV :type vv_m: GXVV :type x: float :type y: float :returns: Index of closest point, -1 if no valid locations, or data is masked. :rtype: int .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Input X and Y location VVs, and a location. Returns the index of the point in the `GXVV <geosoft.gxapi.GXVV>` closest to the input point. This skips points where the mask value is dummy. If no valid points are in the VVs, or all the mask `GXVV <geosoft.gxapi.GXVV>` values are dummy, the returned index is -1. """ ret_val = gxapi_cy.WrapVVU._close_xym(GXContext._get_tls_geo(), vv_x, vv_y, vv_m, x, y) return ret_val @classmethod def close_xyz(cls, vv_x, vv_y, vv_z, x, y, z): """ Find the closest point to an input point (XYZ). :param vv_x: X locations :param vv_y: Y locations :param vv_z: Z locations :param x: Input X :param y: Input Y :param z: Input Z :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type x: float :type y: float :type z: float :returns: Index of closest point, -1 if no valid locations, or data is masked. :rtype: int .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Input X, Y and Z location VVs, and a location. Returns the index of the point in the `GXVV <geosoft.gxapi.GXVV>` closest to the input point. """ ret_val = gxapi_cy.WrapVVU._close_xyz(GXContext._get_tls_geo(), vv_x, vv_y, vv_z, x, y, z) return ret_val @classmethod def close_xyzm(cls, vv_x, vv_y, vv_z, vv_m, x, y, z): """ Find the closest point to an input point, with mask (XYZ). :param vv_x: X locations :param vv_y: Y locations :param vv_z: Z locations :param vv_m: Mask values :param x: Input X :param y: Input Y :param z: Input Z :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_m: GXVV :type x: float :type y: float :type z: float :returns: Index of closest point, -1 if no valid locations, or data is masked. :rtype: int .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Input X, Y and Z location VVs, and a location. Returns the index of the point in the `GXVV <geosoft.gxapi.GXVV>` closest to the input point. This skips points where the mask value is dummy. If no valid points are in the VVs, or all the mask `GXVV <geosoft.gxapi.GXVV>` values are dummy, the returned index is -1. """ ret_val = gxapi_cy.WrapVVU._close_xyzm(GXContext._get_tls_geo(), vv_x, vv_y, vv_z, vv_m, x, y, z) return ret_val @classmethod def dummy_back_tracks(cls, vv): """ Dummy all points that keep a `GXVV <geosoft.gxapi.GXVV>` from being monotonically increasing. :param vv: `GXVV <geosoft.gxapi.GXVV>` handle :type vv: GXVV :returns: The number of items dummied in order to render the `GXVV <geosoft.gxapi.GXVV>` montonically increasing. :rtype: int .. versionadded:: 7.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The `GXVV <geosoft.gxapi.GXVV>` length remains the same. Any point that is less than or equal to the previous (valid) point in the `GXVV <geosoft.gxapi.GXVV>` is dummied. """ ret_val = gxapi_cy.WrapVVU._dummy_back_tracks(GXContext._get_tls_geo(), vv) return ret_val @classmethod def find_dummy(cls, vv, dir, type, start, end): """ Find the first dummy|non-dummy value in `GXVV <geosoft.gxapi.GXVV>` :param vv: `GXVV <geosoft.gxapi.GXVV>` handle :param dir: 0 increasing order 1 decreasing order :param type: 0 to find the first dummy 1 find first non-dummy :param start: Start search range at element :param end: End search range at element (-1 for last) :type vv: GXVV :type dir: int :type type: int :type start: int :type end: int :returns: The index of the first dummy|non-dummy value in `GXVV <geosoft.gxapi.GXVV>` -1 if not found or if length of `GXVV <geosoft.gxapi.GXVV>` is 0 :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Start and end of range are always defined lowest to largest even if decreasing search order. To search entire `GXVV <geosoft.gxapi.GXVV>` range, specify 0,-1. """ ret_val = gxapi_cy.WrapVVU._find_dummy(GXContext._get_tls_geo(), vv, dir, type, start, end) return ret_val @classmethod def interp(cls, vv, input, output): """ Replace all dummies by interpolating from valid data. :param vv: Input `GXVV <geosoft.gxapi.GXVV>` :param input: :ref:`VVU_INTERP` :param output: :ref:`VVU_INTERP_EDGE` :type vv: GXVV :type input: int :type output: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Edge behaviour Dummies at the ends are treated as follows for various combinations of the inside and outside interpolation choices: :: if ((iOutside==VV_INTERP_EDGE_NEAREST) || (iOutside==VV_INTERP_EDGE_SAME && iInside==VV_INTERP_NEAREST)) // -- Set dummies to the same value as the last defined element else if ((iOutside==VV_INTERP_EDGE_LINEAR) || (iOutside==VV_INTERP_EDGE_SAME && iInside==VV_INTERP_LINEAR)) // --- Set dummies using the slope of the last two defined elements endif In all other cases and combinations of the two interpolation choices, the dummies are left "as is". """ gxapi_cy.WrapVVU._interp(GXContext._get_tls_geo(), vv, input, output) @classmethod def qc_fill_gaps(cls, vvx, vvy, vvf, vvd, dist): """ Calculate fill in line segments :param vvx: Input/output X `GXVV <geosoft.gxapi.GXVV>` on which to operate Required in `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` or `GS_FLOAT <geosoft.gxapi.GS_FLOAT>` :param vvy: Input/output Y `GXVV <geosoft.gxapi.GXVV>` on which to operate In `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` or `GS_FLOAT <geosoft.gxapi.GS_FLOAT>` :param vvf: Input Flag `GXVV <geosoft.gxapi.GXVV>` Required in `GS_BYTE <geosoft.gxapi.GS_BYTE>` :param vvd: Input Gap `GXVV <geosoft.gxapi.GXVV>` to use for locating the fill inline segments In `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` or `GS_FLOAT <geosoft.gxapi.GS_FLOAT>` :param dist: Min segment length (required) :type vvx: GXVV :type vvy: GXVV :type vvf: GXVV :type vvd: GXVV :type dist: float :returns: 1 if error, 0 if successful :rtype: int .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The X & Y VVs are returned as the calculated fill in line segments. """ ret_val = gxapi_cy.WrapVVU._qc_fill_gaps(GXContext._get_tls_geo(), vvx, vvy, vvf, vvd, dist) return ret_val @classmethod def search_text(cls, vv, text, case_sensitive, match, start, dir): """ Search for a text value in a `GXVV <geosoft.gxapi.GXVV>` :param vv: `GXVV <geosoft.gxapi.GXVV>` to search :param text: Text to match :param case_sensitive: :ref:`VVU_CASE` :param match: :ref:`VVU_MATCH` :param start: Index to begin search (-1 for full `GXVV <geosoft.gxapi.GXVV>`) :param dir: 1: forward search, -1: backward search :type vv: GXVV :type text: str :type case_sensitive: int :type match: int :type start: int :type dir: int :returns: Index of first matching text, -1 if not found. :rtype: int .. versionadded:: 5.0.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Search comparison is made on string comparison of the data. Returns index of first item matching the input string. If start index is -1 or dummy, then full `GXVV <geosoft.gxapi.GXVV>` is searched. Use `VVU_MATCH_INPUT_LENGTH <geosoft.gxapi.VVU_MATCH_INPUT_LENGTH>` to match the first part of a string. This is also recommended for matching numerical values, since the displayed value in the database may not be the same as the stored value. .. seealso:: sSearchReplace_VV """ ret_val = gxapi_cy.WrapVVU._search_text(GXContext._get_tls_geo(), vv, text.encode(), case_sensitive, match, start, dir) return ret_val @classmethod def mask(cls, vv_d, vv_m): """ Mask dummies in one `GXVV <geosoft.gxapi.GXVV>` onto another. :param vv_d: `GXVV <geosoft.gxapi.GXVV>` to be masked :param vv_m: Mask reference `GXVV <geosoft.gxapi.GXVV>` :type vv_d: GXVV :type vv_m: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** `GXVV <geosoft.gxapi.GXVV>` to mask will be resampled to reference `GXVV <geosoft.gxapi.GXVV>` if required. The returned length of the `GXVV <geosoft.gxapi.GXVV>` to mask will be the shorter of the reference `GXVV <geosoft.gxapi.GXVV>` or the mask `GXVV <geosoft.gxapi.GXVV>`. """ gxapi_cy.WrapVVU._mask(GXContext._get_tls_geo(), vv_d, vv_m) @classmethod def mask_and(cls, vv_a, vv_b, vv_c): """ Create mask from logical AND of two VVs. :param vv_a: `GXVV <geosoft.gxapi.GXVV>` A :param vv_b: `GXVV <geosoft.gxapi.GXVV>` B :param vv_c: `GXVV <geosoft.gxapi.GXVV>` C (returned) :type vv_a: GXVV :type vv_b: GXVV :type vv_c: GXVV .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If both values are non-dummies, then result is 1, else dummy. """ gxapi_cy.WrapVVU._mask_and(GXContext._get_tls_geo(), vv_a, vv_b, vv_c) @classmethod def mask_or(cls, vv_a, vv_b, vv_c): """ Create mask from logical OR of two VVs. :param vv_a: `GXVV <geosoft.gxapi.GXVV>` A :param vv_b: `GXVV <geosoft.gxapi.GXVV>` B :param vv_c: `GXVV <geosoft.gxapi.GXVV>` C (returned) :type vv_a: GXVV :type vv_b: GXVV :type vv_c: GXVV .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If either values is non-dummy, then result is 1, else dummy. """ gxapi_cy.WrapVVU._mask_or(GXContext._get_tls_geo(), vv_a, vv_b, vv_c) @classmethod def nl_filt(cls, vv_i, vv_o, fwid, pr_ftol): """ Applies a non-linear filter. :param vv_i: Input `GXVV <geosoft.gxapi.GXVV>` :param vv_o: Filtered `GXVV <geosoft.gxapi.GXVV>` :param fwid: Filter Width :param pr_ftol: Filter Tolerance, 0 for 1% of Std. Dev. :type vv_i: GXVV :type vv_o: GXVV :type fwid: int :type pr_ftol: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapVVU._nl_filt(GXContext._get_tls_geo(), vv_i, vv_o, fwid, pr_ftol) @classmethod def noise_check(cls, vv_i, vv_f, all_tol, num): """ Check on deviation of data from variable background in a `GXVV <geosoft.gxapi.GXVV>` :param vv_i: Input `GXVV <geosoft.gxapi.GXVV>` on which to apply quality control Required in `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` or `GS_FLOAT <geosoft.gxapi.GS_FLOAT>` :param vv_f: Output flag `GXVV <geosoft.gxapi.GXVV>` with result 0 and 1. Required in `GS_BYTE <geosoft.gxapi.GS_BYTE>` :param all_tol: Allowed deviation over a number of data points in input `GXVV <geosoft.gxapi.GXVV>` (next parameter). Must be >= 0.0 :param num: Number of data points. Must be > 0 :type vv_i: GXVV :type vv_f: GXVV :type all_tol: float :type num: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This function checks vertical deviation of data in input `GXVV <geosoft.gxapi.GXVV>` against a moving straight line. The straight line at any time is defined by two extreme points of a data segment. Output `GXVV <geosoft.gxapi.GXVV>` will be 0 if data point in input `GXVV <geosoft.gxapi.GXVV>` falls within the deviation, otherwise, it will be 1. Output `GXVV <geosoft.gxapi.GXVV>` will be 0 if the straight line is vertical. """ gxapi_cy.WrapVVU._noise_check(GXContext._get_tls_geo(), vv_i, vv_f, all_tol, num) @classmethod def noise_check2(cls, vv_i, vv_f, vv_d, all_tol, num): """ Like `noise_check <geosoft.gxapi.GXVVU.noise_check>`, but returns maximum deviation at all points. :param vv_i: Input `GXVV <geosoft.gxapi.GXVV>` on which to apply quality control Required in `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` or `GS_FLOAT <geosoft.gxapi.GS_FLOAT>` :param vv_f: Output flag `GXVV <geosoft.gxapi.GXVV>` with result 0 and 1. Required in `GS_BYTE <geosoft.gxapi.GS_BYTE>` :param vv_d: Output maximum deviation `GXVV <geosoft.gxapi.GXVV>`. :param all_tol: Allowed deviation over a number of data points in input `GXVV <geosoft.gxapi.GXVV>` (next parameter). Must be >= 0.0 :param num: Number of data points in the line segment. Must be > 0 :type vv_i: GXVV :type vv_f: GXVV :type vv_d: GXVV :type all_tol: float :type num: int .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This function checks vertical deviation of data in an input `GXVV <geosoft.gxapi.GXVV>` against a moving straight line, where the X-axis value is taken to be the data index, and the Y-axis value is the input data `GXVV <geosoft.gxapi.GXVV>` value. The straight line is drawn between data points at the ends of the line segment, whose length is an input. The output flag `GXVV <geosoft.gxapi.GXVV>` is set to 0 if data point in input `GXVV <geosoft.gxapi.GXVV>` falls within the deviation for all the moving line segments of which it is a part, otherwise, it will be set to 1. The output maximum deviation `GXVV <geosoft.gxapi.GXVV>` contains the maximum deviation at each point for all the moving line segments that it is a part of. """ gxapi_cy.WrapVVU._noise_check2(GXContext._get_tls_geo(), vv_i, vv_f, vv_d, all_tol, num) @classmethod def normal_dist(cls, vv, seed, mean, var, length): """ Fill with normally (Gaussian) distributed values. :param vv: `GXVV <geosoft.gxapi.GXVV>` object :param seed: Random number generator seed :param mean: Mean value of distribution :param var: Variance of the distribution :param length: Number of values (-1 for all) :type vv: GXVV :type seed: int :type mean: float :type var: float :type length: int .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** `GXVV <geosoft.gxapi.GXVV>` is set to input length (except for -1) See RAND for a short discription of the random number generator used. """ gxapi_cy.WrapVVU._normal_dist(GXContext._get_tls_geo(), vv, seed, mean, var, length) @classmethod def offset_circles(cls, vv_xi, vv_yi, offset, radius, vv_xo, vv_yo): """ Get non-overlapping offset location for circular symbols. :param vv_xi: Input X locations :param vv_yi: Input Y locations :param offset: Minimum offset distance :param radius: Symbol radius :param vv_xo: Output (offset) X locations :param vv_yo: Output (offset) Y locations :type vv_xi: GXVV :type vv_yi: GXVV :type offset: float :type radius: float :type vv_xo: GXVV :type vv_yo: GXVV .. versionadded:: 5.0.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Often on maps plotted symbols and text overlap each other. This routine accepts of `GXVV <geosoft.gxapi.GXVV>` of locations and returns a new set of locations offset from the originals, and guaranteed not to overlap, given the size of the original symbols. The returned offset X, Y locations are offset from the original locations by the minimum of a) the input offset, b) the input symbol radius. This is to ensure that the original location is never covered by the offset symbol. Care should be taken when choosing the symbol size, because if the point density is too high, all the points will get pushed to the outside edge and your plot will look like a hedgehog (it also takes a lot longer!). """ gxapi_cy.WrapVVU._offset_circles(GXContext._get_tls_geo(), vv_xi, vv_yi, offset, radius, vv_xo, vv_yo) @classmethod def offset_correct(cls, vv_xi, vv_yi, dist, heading, v_vxo, v_vyo): """ Correct locations based on heading and fixed offset. :param vv_xi: Input X :param vv_yi: Input Y :param dist: Offset distance :param heading: :ref:`VVU_OFFSET` :param v_vxo: Output X :param v_vyo: Output Y :type vv_xi: GXVV :type vv_yi: GXVV :type dist: float :type heading: int :type v_vxo: GXVV :type v_vyo: GXVV .. versionadded:: 5.0.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** In many applications, measurements are taken with an instrument which is towed behind, or pushed ahead of where the locations are recorded. Use this function to estimate the actual location of the instrument. The method determines the heading along the line, using a "thinned" version of the line. The degree of thinning is based on the size of the offset; the larger the offset, the greater the distance between sample locations used to construct the thinned lined used for determining headings. The thinned line is splined at a frequency greater than the sample frequency, and the heading at any given point is determined from the vector formed by the closest two points on the splined line. The correction (behind, in front, left or right) is determined with respect to the heading, and added to the original location. IF this method fails, no dummies, no duplicated locations, no reversals are produced. The algorithm: 1. Determine average distance between each point = D 2. Smoothing interval = MAX(2*D, Offset distance) = I 3. Thin input points to be at least the smoothing interval I apart from each other. 4. Smoothly re-interpolate the thinned points at five times the original average distance D. 5. For each input point, calculate the bearing using the nearest points on the smoothed curve """ gxapi_cy.WrapVVU._offset_correct(GXContext._get_tls_geo(), vv_xi, vv_yi, dist, heading, v_vxo, v_vyo) @classmethod def offset_correct2(cls, vv_xi, vv_yi, dist, azimuth, vv_xo, vv_yo): """ Same as `offset_correct <geosoft.gxapi.GXVVU.offset_correct>`, but for an arbitrary offset angle. :param vv_xi: Input X :param vv_yi: Input Y :param dist: Offset distance :param azimuth: Offset azimuth (degrees counter-clockwise from straight ahead) :param vv_xo: Output X :param vv_yo: Output Y :type vv_xi: GXVV :type vv_yi: GXVV :type dist: float :type azimuth: float :type vv_xo: GXVV :type vv_yo: GXVV .. versionadded:: 5.1.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapVVU._offset_correct2(GXContext._get_tls_geo(), vv_xi, vv_yi, dist, azimuth, vv_xo, vv_yo) @classmethod def offset_correct3(cls, vv_xi, vv_yi, dist, azimuth, interval, vv_xo, vv_yo): """ Same as `offset_correct2 <geosoft.gxapi.GXVVU.offset_correct2>`, but specify smoothing interval. :param vv_xi: Input X :param vv_yi: Input Y :param dist: Offset distance :param azimuth: Offset azimuth (degrees counter-clockwise from straight ahead) :param interval: Averaging interval - `rDUMMY <geosoft.gxapi.rDUMMY>` for default :param vv_xo: Output X :param vv_yo: Output Y :type vv_xi: GXVV :type vv_yi: GXVV :type dist: float :type azimuth: float :type interval: float :type vv_xo: GXVV :type vv_yo: GXVV .. versionadded:: 5.1.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See the algorithm note #2 above for the default smoothing interval. """ gxapi_cy.WrapVVU._offset_correct3(GXContext._get_tls_geo(), vv_xi, vv_yi, dist, azimuth, interval, vv_xo, vv_yo) @classmethod def offset_correct_xyz(cls, vv_xi, vv_yi, vv_zi, x_off, y_off, z_off, interval, v_vxo, v_vyo, v_vzo): """ Correct locations based on heading and fixed offset. :param vv_xi: Input X :param vv_yi: Input Y :param vv_zi: Input Z :param x_off: Offset along-track (+ve forward) :param y_off: Offset across-track (+ve to the right) :param z_off: Vertical Offset (+ve up) :param interval: Sampling interval - `rDUMMY <geosoft.gxapi.rDUMMY>` for default :param v_vxo: Output X :param v_vyo: Output Y :param v_vzo: Output Z :type vv_xi: GXVV :type vv_yi: GXVV :type vv_zi: GXVV :type x_off: float :type y_off: float :type z_off: float :type interval: float :type v_vxo: GXVV :type v_vyo: GXVV :type v_vzo: GXVV .. versionadded:: 9.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** In many applications, measurements are taken with an instrument which is towed behind, or pushed ahead of where the locations are recorded. Use this function to estimate the actual location of the instrument. The method determines the heading along the line, using a "thinned" version of the line. The default degree of thinning is based on the size of the offset; the larger the offset, the greater the distance between sample locations used to construct the thinned lined used for determining headings. The thinned line is splined at a frequency greater than the sample frequency, and the heading at any given point is determined from the vector formed by the closest two points on the splined line. The correction (behind, in front, left or right) is determined with respect to the heading, and added to the original location. IF this method fails, no dummies, no duplicated locations, no reversals are produced. The algorithm: 1. Determine average distance between each point = D 2. Default smoothing interval = MAX(2*D, Offset distance) = I 3. Thin input points to be at least the smoothing interval I apart from each other. 4. Smoothly re-interpolate the thinned points at five times the original average distance D. 5. For each input point, calculate the bearing using the nearest points on the smoothed curve """ gxapi_cy.WrapVVU._offset_correct_xyz(GXContext._get_tls_geo(), vv_xi, vv_yi, vv_zi, x_off, y_off, z_off, interval, v_vxo, v_vyo, v_vzo) @classmethod def offset_rectangles(cls, vv_xi, vv_yi, offset, size_x, size_y, vv_xo, vv_yo): """ Get non-overlapping offset location for rectangular symbols. :param vv_xi: Input X locations :param vv_yi: Input Y locations :param offset: Minimum offset distance :param size_x: Symbol X size (width) :param size_y: Symbol Y size (height) :param vv_xo: Output (offset) X locations :param vv_yo: Output (offset) Y locations :type vv_xi: GXVV :type vv_yi: GXVV :type offset: float :type size_x: float :type size_y: float :type vv_xo: GXVV :type vv_yo: GXVV .. versionadded:: 5.0.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Often on maps plotted symbols and text overlap each other. This routine accepts of `GXVV <geosoft.gxapi.GXVV>` of locations and returns a new set of locations offset from the originals, and guaranteed not to overlap, given the size of the original symbols. The returned offset X, Y locations are offset from the original locations by the minimum of a) the input offset, b) the input symbol X or Y size. This is to ensure that the original location is never covered by the offset symbol. In addition, the offset symbol is never place directly below the original location, to make it easier to draw a connecting line. Care should be taken when choosing the symbol size, because if the point density is too high, all the points will get pushed to the outside edge and your plot will look like a hedgehog (it also takes a lot longer!). """ gxapi_cy.WrapVVU._offset_rectangles(GXContext._get_tls_geo(), vv_xi, vv_yi, offset, size_x, size_y, vv_xo, vv_yo) @classmethod def pick_peak(cls, vv_i, vv_o, pr_tol, width): """ Find peaks in a `GXVV <geosoft.gxapi.GXVV>` - method one. :param vv_i: Input `GXVV <geosoft.gxapi.GXVV>` :param vv_o: Returned peak `GXVV <geosoft.gxapi.GXVV>`, all dummies except peak points. :param pr_tol: Minimum value to accept (0.0 to find all) :param width: Minimum width to accept (1 to find all) :type vv_i: GXVV :type vv_o: GXVV :type pr_tol: float :type width: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Peaks are the maximum point within a sequence of positive values in the input `GXVV <geosoft.gxapi.GXVV>`. The width is the number of points in the positive sequence. A `GXVV <geosoft.gxapi.GXVV>` may have to be pre-filtered before finding the peak values: Use `bp_filt <geosoft.gxapi.GXVVU.bp_filt>` to smooth the data as required. Use `filter <geosoft.gxapi.GXVVU.filter>` to apply a Laplace filter "-0.5,1.0,-0.5" to make curvature data. """ gxapi_cy.WrapVVU._pick_peak(GXContext._get_tls_geo(), vv_i, vv_o, pr_tol, width) @classmethod def pick_peak2(cls, vv_i, vv_o, pr_base_lvl, pr_ampl): """ Find peaks in a `GXVV <geosoft.gxapi.GXVV>` - method two. :param vv_i: Input `GXVV <geosoft.gxapi.GXVV>` :param vv_o: Returned peak `GXVV <geosoft.gxapi.GXVV>`, all dummies except peak points. :param pr_base_lvl: Base level to accept (0.0 to find all) :param pr_ampl: Minimum amplitude to accept :type vv_i: GXVV :type vv_o: GXVV :type pr_base_lvl: float :type pr_ampl: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Peaks are the maximum point within a sequence of values in the input `GXVV <geosoft.gxapi.GXVV>`. Maximum points must be above the base level and have a local amplitude greater than the minimum amplitude specified. A `GXVV <geosoft.gxapi.GXVV>` may have to be pre-filtered before finding the peak values. """ gxapi_cy.WrapVVU._pick_peak2(GXContext._get_tls_geo(), vv_i, vv_o, pr_base_lvl, pr_ampl) @classmethod def pick_peak3(cls, vv_i, vv_x, vv_y, pr_base_lvl, pr_ampl, v_vind, v_vamp, v_vwid, v_vhawid): """ Find peaks in a `GXVV <geosoft.gxapi.GXVV>` - method two, returning width and half-amplitude widths. :param vv_i: [i] data `GXVV <geosoft.gxapi.GXVV>` :param vv_x: [i] X `GXVV <geosoft.gxapi.GXVV>` used to calculate distance :param vv_y: [i] Y `GXVV <geosoft.gxapi.GXVV>` used to calculate distance :param pr_base_lvl: [i] minimum value to accept (0.0 to find all) :param pr_ampl: [i] amplitude :param v_vind: [o] Indices with peak locations :param v_vamp: [o] Amplitudes at the peaks :param v_vwid: [o] Anomaly widths :param v_vhawid: [o] Anomaly half-amplitude widths :type vv_i: GXVV :type vv_x: GXVV :type vv_y: GXVV :type pr_base_lvl: float :type pr_ampl: float :type v_vind: GXVV :type v_vamp: GXVV :type v_vwid: GXVV :type v_vhawid: GXVV .. versionadded:: 6.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Uses Method 2 above, but also returns the anomaly width (defined as the distance between the surrounding troughs), and the width at the half-amplitude. The half-amplitude width is calculated in two parts, individually for each side based on the distance from the maximum to the location where the amplitude is mid-way between the maximum and trough. The returned VVs are packed; no dummies. Instead the indicies of the peak locations are returned. """ gxapi_cy.WrapVVU._pick_peak3(GXContext._get_tls_geo(), vv_i, vv_x, vv_y, pr_base_lvl, pr_ampl, v_vind, v_vamp, v_vwid, v_vhawid) @classmethod def poly_fill(cls, vv_d, order, vv_c): """ Fill a `GXVV <geosoft.gxapi.GXVV>` with values from an n'th order polynomial, integral x. :param vv_d: `GXVV <geosoft.gxapi.GXVV>` with output data. (Preset length) :param order: Order of the polynomial 0-9 :param vv_c: `GXVV <geosoft.gxapi.GXVV>` with polynomial coefficients (input) :type vv_d: GXVV :type order: int :type vv_c: GXVV .. versionadded:: 5.0.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The output `GXVV <geosoft.gxapi.GXVV>` length must be set as desired before calling. The X scale is unitless (1 per element), i.e. 0,1,2,3,... .. seealso:: `trend <geosoft.gxapi.GXVVU.trend>`, `trend2 <geosoft.gxapi.GXVVU.trend2>`, `poly_fill2 <geosoft.gxapi.GXVVU.poly_fill2>` """ gxapi_cy.WrapVVU._poly_fill(GXContext._get_tls_geo(), vv_d, order, vv_c) @classmethod def poly_fill2(cls, vv_x, vv_d, order, vv_c): """ Fill a `GXVV <geosoft.gxapi.GXVV>` with values from an n'th order polynomial, specified X :param vv_x: `GXVV <geosoft.gxapi.GXVV>` with x spacing (input) :param vv_d: `GXVV <geosoft.gxapi.GXVV>` with output data. (Preset length) :param order: Order of the polynomial 0-9 :param vv_c: `GXVV <geosoft.gxapi.GXVV>` with polynomial coefficients (order+1 values) :type vv_x: GXVV :type vv_d: GXVV :type order: int :type vv_c: GXVV .. versionadded:: 5.0.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The output `GXVV <geosoft.gxapi.GXVV>` length must be set as desired before calling. The X scale is defined by a X `GXVV <geosoft.gxapi.GXVV>` (see Trend_VV for unitless X). .. seealso:: `trend <geosoft.gxapi.GXVVU.trend>`, `trend2 <geosoft.gxapi.GXVVU.trend2>`, `poly_fill <geosoft.gxapi.GXVVU.poly_fill>` """ gxapi_cy.WrapVVU._poly_fill2(GXContext._get_tls_geo(), vv_x, vv_d, order, vv_c) @classmethod def polygon_mask(cls, vv_x, vv_y, vv_m, pply, mask): """ Mask a `GXVV <geosoft.gxapi.GXVV>` using XY data and a polygon. :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` :param vv_m: `GXVV <geosoft.gxapi.GXVV>` to be masked :param pply: `GXPLY <geosoft.gxapi.GXPLY>` object :param mask: :ref:`VVU_MASK` :type vv_x: GXVV :type vv_y: GXVV :type vv_m: GXVV :type pply: GXPLY :type mask: int .. versionadded:: 6.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The VVs have to be the same length """ gxapi_cy.WrapVVU._polygon_mask(GXContext._get_tls_geo(), vv_x, vv_y, vv_m, pply, mask) @classmethod def prune(cls, vv_p, vv_r, o): """ Prune values from a `GXVV <geosoft.gxapi.GXVV>` based on reference `GXVV <geosoft.gxapi.GXVV>` :param vv_p: `GXVV <geosoft.gxapi.GXVV>` to prune :param vv_r: Reference `GXVV <geosoft.gxapi.GXVV>` :param o: :ref:`VVU_PRUNE` :type vv_p: GXVV :type vv_r: GXVV :type o: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Pruning will shorten the `GXVV <geosoft.gxapi.GXVV>` by removing values that are either dummy or non-dummy in the reference `GXVV <geosoft.gxapi.GXVV>` """ gxapi_cy.WrapVVU._prune(GXContext._get_tls_geo(), vv_p, vv_r, o) @classmethod def qc(cls, vv_i, vv_d, v_vf, nominal, max_tol, all_tol, dist, qc): """ Quality control on deviation of data from norm in a `GXVV <geosoft.gxapi.GXVV>` :param vv_i: Input `GXVV <geosoft.gxapi.GXVV>` on which to apply quality control Required in `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` or `GS_FLOAT <geosoft.gxapi.GS_FLOAT>` :param vv_d: Distance `GXVV <geosoft.gxapi.GXVV>` (NULL if criterion #2 does not apply). In `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` or `GS_FLOAT <geosoft.gxapi.GS_FLOAT>` :param v_vf: Output flag `GXVV <geosoft.gxapi.GXVV>` with result 0,1,2,3,-1,-2,-3. Required in `GS_BYTE <geosoft.gxapi.GS_BYTE>` :param nominal: Nominal reading (required, must not be `GS_R8DM <geosoft.gxapi.GS_R8DM>`) :param max_tol: Maximum tolerance/deviation applied to a single reading (criterion #1). `GS_R8DM <geosoft.gxapi.GS_R8DM>` if criterion #1 does not apply. Otherwise, must be positive value including 0.0 :param all_tol: Allowed tolerance/deviation over a given distance (next parameter) (criterion #2). `GS_R8DM <geosoft.gxapi.GS_R8DM>` if criterion #2 does not apply. Otherwise, must be positive value including 0.0 :param dist: The specified distance. `GS_R8DM <geosoft.gxapi.GS_R8DM>` if criterion #2 does not apply. Otherwise, must be positive value excluding 0.0 :param qc: :ref:`QC_CRITERION` :type vv_i: GXVV :type vv_d: GXVV :type v_vf: GXVV :type nominal: float :type max_tol: float :type all_tol: float :type dist: float :type qc: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This function tests data in input `GXVV <geosoft.gxapi.GXVV>` against two separate criteria. Each element of the output `GXVV <geosoft.gxapi.GXVV>` will have one of the following indicators: ========= ============================================================== Indicator Meaning ========= ============================================================== 0 Input data passed both tests --------- -------------------------------------------------------------- 1 The input data and is greater than the nominal value plus maximum tolerance/deviation (Criterion #1) --------- -------------------------------------------------------------- 2 The input data over a specified distance is greater than the nominal value plus allowed tolerance (Criterion #2) --------- -------------------------------------------------------------- 3 The input data failed on above two tests --------- -------------------------------------------------------------- -1 The input data and is less than the nominal value minus maximum tolerance (Criterion #1) --------- -------------------------------------------------------------- -2 The input data over a specified distance is less than the nominal value minus allowed tolerance (Criterion #2) --------- -------------------------------------------------------------- -3 The input data failed on above two tests ========= ============================================================== """ gxapi_cy.WrapVVU._qc(GXContext._get_tls_geo(), vv_i, vv_d, v_vf, nominal, max_tol, all_tol, dist, qc) @classmethod def qc2(cls, vv_i, vv_d, v_vf, vv_drape, max_tol, all_tol, dist, qc): """ Quality control on deviation of data from norm in a `GXVV <geosoft.gxapi.GXVV>` :param vv_i: Input `GXVV <geosoft.gxapi.GXVV>` on which to apply quality control Required in `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` or `GS_FLOAT <geosoft.gxapi.GS_FLOAT>` :param vv_d: Distance `GXVV <geosoft.gxapi.GXVV>` (NULL if criterion #2 does not apply). In `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` or `GS_FLOAT <geosoft.gxapi.GS_FLOAT>` :param v_vf: Output flag `GXVV <geosoft.gxapi.GXVV>` with result 0,1,2,3,-1,-2,-3. Required in `GS_BYTE <geosoft.gxapi.GS_BYTE>` :param vv_drape: Drape elevation `GXVV <geosoft.gxapi.GXVV>` which is used instead of a constant nominal terrain clearance Required in `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` or `GS_FLOAT <geosoft.gxapi.GS_FLOAT>` :param max_tol: Maximum tolerance/deviation applied to a single reading (criterion #1). `GS_R8DM <geosoft.gxapi.GS_R8DM>` if criterion #1 does not apply. Otherwise, must be positive value including 0.0 :param all_tol: Allowed tolerance/deviation over a given distance (next parameter) (criterion #2). `GS_R8DM <geosoft.gxapi.GS_R8DM>` if criterion #2 does not apply. Otherwise, must be positive value including 0.0 :param dist: The specified distance. `GS_R8DM <geosoft.gxapi.GS_R8DM>` if criterion #2 does not apply. Otherwise, must be positive value excluding 0.0 :param qc: :ref:`QC_CRITERION` :type vv_i: GXVV :type vv_d: GXVV :type v_vf: GXVV :type vv_drape: GXVV :type max_tol: float :type all_tol: float :type dist: float :type qc: int .. versionadded:: 2022.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This function tests data in input `GXVV <geosoft.gxapi.GXVV>` against two separate criteria. Each element of the output `GXVV <geosoft.gxapi.GXVV>` will have one of the following indicators: ========= ============================================================== Indicator Meaning ========= ============================================================== 0 Input data passed both tests --------- -------------------------------------------------------------- 1 The input data and is greater than the nominal value plus maximum tolerance/deviation (Criterion #1) --------- -------------------------------------------------------------- 2 The input data over a specified distance is greater than the nominal value plus allowed tolerance (Criterion #2) --------- -------------------------------------------------------------- 3 The input data failed on above two tests --------- -------------------------------------------------------------- -1 The input data and is less than the nominal value minus maximum tolerance (Criterion #1) --------- -------------------------------------------------------------- -2 The input data over a specified distance is less than the nominal value minus allowed tolerance (Criterion #2) --------- -------------------------------------------------------------- -3 The input data failed on above two tests ========= ============================================================== """ gxapi_cy.WrapVVU._qc2(GXContext._get_tls_geo(), vv_i, vv_d, v_vf, vv_drape, max_tol, all_tol, dist, qc) @classmethod def range_vector_mag(cls, vv1, vv2, min, max): """ Find the range of hypotenuse values of two VVs. :param vv1: First `GXVV <geosoft.gxapi.GXVV>` (X) :param vv2: First `GXVV <geosoft.gxapi.GXVV>` (Y) :param min: Min value (returned) :param max: Max value (returned) :type vv1: GXVV :type vv2: GXVV :type min: float_ref :type max: float_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** For each value in the VVs, finds sqrt(dV1*dV1 + dV2*dV2) and returns the min and max values. """ min.value, max.value = gxapi_cy.WrapVVU._range_vector_mag(GXContext._get_tls_geo(), vv1, vv2, min.value, max.value) @classmethod def regress(cls, vv_x, vv_y, slp, intercept): """ Calculate linear regression through data :param vv_x: X data :param vv_y: Y data :param slp: Returns slope :param intercept: Returns intercept :type vv_x: GXVV :type vv_y: GXVV :type slp: float_ref :type intercept: float_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ slp.value, intercept.value = gxapi_cy.WrapVVU._regress(GXContext._get_tls_geo(), vv_x, vv_y, slp.value, intercept.value) @classmethod def rel_var_dup(cls, data_vv, sample_vv, rel_var, num_dup): """ Estimate relative variance of duplicate sample pairs from a database. :param data_vv: Data `GXVV <geosoft.gxapi.GXVV>` :param sample_vv: Sample Type `GXVV <geosoft.gxapi.GXVV>` :param rel_var: Returned relative variance :param num_dup: Returned number of duplicates used. :type data_vv: GXVV :type sample_vv: GXVV :type rel_var: float_ref :type num_dup: int_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Created for duplicate sample handling in `GXCHIMERA <geosoft.gxapi.GXCHIMERA>`. On input, a numeric or text `GXVV <geosoft.gxapi.GXVV>` containing data values, and a sample type `GXVV <geosoft.gxapi.GXVV>`. Sample pairs have types "1" and "2". This routine searches for types in order "1 2 1 2", and calulates the unnormalized relative variance, defined as the sum of the squared differences between duplicates divided by the sum of the squared mean values of the duplicates. (To get the true rel.var., divide by N-1, where N is the number of duplicate pairs used.) Samples out of order, unmatched pairs, or when the sample type does not equal "1" or "2" are ignored. """ rel_var.value, num_dup.value = gxapi_cy.WrapVVU._rel_var_dup(GXContext._get_tls_geo(), data_vv, sample_vv, rel_var.value, num_dup.value) @classmethod def remove_dummy(cls, vv): """ Remove dummy values from a `GXVV <geosoft.gxapi.GXVV>` :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapVVU._remove_dummy(GXContext._get_tls_geo(), vv) @classmethod def remove_dummy2(cls, vv1, vv2): """ Remove dummy values from 2 VVs. :param vv1: `GXVV <geosoft.gxapi.GXVV>` object :param vv2: `GXVV <geosoft.gxapi.GXVV>` object :type vv1: GXVV :type vv2: GXVV .. versionadded:: 5.0.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Removes all indices where either `GXVV <geosoft.gxapi.GXVV>` has a dummy, or is not defined (due to length differences). """ gxapi_cy.WrapVVU._remove_dummy2(GXContext._get_tls_geo(), vv1, vv2) @classmethod def remove_dummy3(cls, vv1, vv2, vv3): """ Remove dummy values from 3 VVs. :param vv1: `GXVV <geosoft.gxapi.GXVV>` object :param vv2: `GXVV <geosoft.gxapi.GXVV>` object :param vv3: `GXVV <geosoft.gxapi.GXVV>` object :type vv1: GXVV :type vv2: GXVV :type vv3: GXVV .. versionadded:: 5.0.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Removes all indices where any `GXVV <geosoft.gxapi.GXVV>` has a dummy, or is not defined (due to length differences). """ gxapi_cy.WrapVVU._remove_dummy3(GXContext._get_tls_geo(), vv1, vv2, vv3) @classmethod def remove_dummy4(cls, vv1, vv2, vv3, vv4): """ Remove dummy values from 4 VVs. :param vv1: `GXVV <geosoft.gxapi.GXVV>` object :param vv2: `GXVV <geosoft.gxapi.GXVV>` object :param vv3: `GXVV <geosoft.gxapi.GXVV>` object :param vv4: `GXVV <geosoft.gxapi.GXVV>` object :type vv1: GXVV :type vv2: GXVV :type vv3: GXVV :type vv4: GXVV .. versionadded:: 6.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Removes all indices where any `GXVV <geosoft.gxapi.GXVV>` has a dummy, or is not defined (due to length differences). """ gxapi_cy.WrapVVU._remove_dummy4(GXContext._get_tls_geo(), vv1, vv2, vv3, vv4) @classmethod def remove_dup(cls, data_vv, sample_vv, output): """ Remove/average duplicate sample pairs from a database. :param data_vv: Data `GXVV <geosoft.gxapi.GXVV>` :param sample_vv: Sample Type `GXVV <geosoft.gxapi.GXVV>` :param output: :ref:`VV_DUP` :type data_vv: GXVV :type sample_vv: GXVV :type output: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Created for duplicate sample handling in `GXCHIMERA <geosoft.gxapi.GXCHIMERA>`. On input, a numeric or text `GXVV <geosoft.gxapi.GXVV>` containing data values, and a sample type `GXVV <geosoft.gxapi.GXVV>`. Sample pairs have types "1" and "2". This routine searches for types in order "1 2 1 2", and replaces the pair of values in the data `GXVV <geosoft.gxapi.GXVV>` according to the :ref:`VV_DUP` value. Results for samples out of order, for unmatched pairs, or when the sample type does not equal "1" or "2" remain unchanged. """ gxapi_cy.WrapVVU._remove_dup(GXContext._get_tls_geo(), data_vv, sample_vv, output) @classmethod def remove_xy_dup(cls, xvv, yvv, zvv, xy_dup): """ Remove/average duplicate samples with the same (X, Y). :param xvv: X `GXVV <geosoft.gxapi.GXVV>` :param yvv: Y `GXVV <geosoft.gxapi.GXVV>` :param zvv: (optional) Z `GXVV <geosoft.gxapi.GXVV>` :param xy_dup: :ref:`VV_XYDUP` :type xvv: GXVV :type yvv: GXVV :type zvv: GXVV :type xy_dup: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Searches for duplicated (X, Y) locations and removes the duplicates (can be more than just a pair). The "Z" values, if defined, are treated according to the value of :ref:`VV_XYDUP`. The returned VVs are shortened to the new length, without duplicates. The Z `GXVV <geosoft.gxapi.GXVV>` can be set to NULL on input, in which case it is ignored. """ gxapi_cy.WrapVVU._remove_xy_dup(GXContext._get_tls_geo(), xvv, yvv, zvv, xy_dup) @classmethod def remove_xy_dup_index(cls, xvv, yvv, index_vv): """ Remove duplicate samples with the same (X, Y) and update index. :param xvv: X `GXVV <geosoft.gxapi.GXVV>` :param yvv: Y `GXVV <geosoft.gxapi.GXVV>` :param index_vv: Index `GXVV <geosoft.gxapi.GXVV>` :type xvv: GXVV :type yvv: GXVV :type index_vv: GXVV .. versionadded:: 7.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Searches for duplicated (X, Y) locations and removes the duplicates (can be more than just a pair). The Index `GXVV <geosoft.gxapi.GXVV>` is updated accordingly .i.e if (X,Y) location of Index[0] == Index[1] Index[1] is removed. """ gxapi_cy.WrapVVU._remove_xy_dup_index(GXContext._get_tls_geo(), xvv, yvv, index_vv) @classmethod def rolling_stats(cls, vv_i, vv_o, stat, window, shrink): """ Calculate a statistic in a rolling window. :param vv_i: Input `GXVV <geosoft.gxapi.GXVV>` :param vv_o: Output `GXVV <geosoft.gxapi.GXVV>` :param stat: :ref:`ST_INFO` :param window: Window size (>0, increased to nearest odd value) :param shrink: Shrink window at ends (1:Yes, 0:No) :type vv_i: GXVV :type vv_o: GXVV :type stat: int :type window: int :type shrink: int .. versionadded:: 5.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If the input VVs are not REAL, copies are made to temporary REALs for processing. If the window size is even, it is increased by 1 so that the output value is put at the exact center index of the window. Statistics are calculated on the values in a window surrounding the individual data points. By shrinking the window at the ends, one-sided effects can be eliminated. For instance, if the data is linear to begin with, a rolling mean will not alter the original data. However, if the window size is kept constant, then values near the ends tend to be pulled up or down. With shrinking, the window is shrunk so that it always has the same width on both sides of the data point under analysis; at the end points the window width is 1, at the next point in it is 3, and so on, until the full width is reached. The median value is calculated by sorting the valid data in the window, then selecting the middle value. If the number of valid data points is even, then the average of the two central values is returned. The mode value is defined as the value which occurs most frequently in the data. This value may not even exist, or may not be unique. In this implementation, the following algorithm is used: The valid data in the window is sorted in ascending order. The number of occurrences of each data value is tracked, and if it occurs more times than any value, it becomes the modal value. If all values are different, this procedure returns the smallest value. If two or more values each have the same (maximum) number of occurrences, then the smallest of these values is returned. """ gxapi_cy.WrapVVU._rolling_stats(GXContext._get_tls_geo(), vv_i, vv_o, stat, window, shrink) @classmethod def search_replace(cls, vv, val, rpl): """ Search and replace numeric values in a `GXVV <geosoft.gxapi.GXVV>`. :param val: Value to replace :param rpl: Replacement :type vv: GXVV :type val: float :type rpl: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Search comparison is made on double comparison of the data. .. seealso:: SearchReplaceText_VV """ gxapi_cy.WrapVVU._search_replace(GXContext._get_tls_geo(), vv, val, rpl) @classmethod def search_replace_text(cls, vv, format, decimal, val, rpl, mode): """ Search and replace text values in a `GXVV <geosoft.gxapi.GXVV>` :param format: String format for numeric `GXVV <geosoft.gxapi.GXVV>` :param decimal: Decimals for formating numeric `GXVV <geosoft.gxapi.GXVV>` :param val: Formatted string to replace :param rpl: Replacement :param mode: :ref:`VVU_SRCHREPL_CASE` :type vv: GXVV :type format: int :type decimal: int :type val: str :type rpl: str :type mode: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Search comparison is made on string comparison of the data. .. seealso:: SearchReplace_VV """ gxapi_cy.WrapVVU._search_replace_text(GXContext._get_tls_geo(), vv, format, decimal, val.encode(), rpl.encode(), mode) @classmethod def search_replace_text_ex(cls, vv, format, decimal, val, rpl, mode, items): """ Search and replace text values in a `GXVV <geosoft.gxapi.GXVV>`, count items changed. :param format: String format for numeric `GXVV <geosoft.gxapi.GXVV>` :param decimal: Decimals for formating numeric `GXVV <geosoft.gxapi.GXVV>` :param val: Formatted string to replace :param rpl: Replacement :param mode: :ref:`VVU_SRCHREPL_CASE` :param items: Number of items replaced (returned) :type vv: GXVV :type format: int :type decimal: int :type val: str :type rpl: str :type mode: int :type items: int_ref .. versionadded:: 6.0.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Search comparison is made on a string comparison of the data. .. seealso:: SearchReplaceText_VV """ items.value = gxapi_cy.WrapVVU._search_replace_text_ex(GXContext._get_tls_geo(), vv, format, decimal, val.encode(), rpl.encode(), mode, items.value) @classmethod def spline(cls, vv_x, vv_y, vv_o, length, start, incr, gap, ext, type): """ Spline a Y `GXVV <geosoft.gxapi.GXVV>` onto an X `GXVV <geosoft.gxapi.GXVV>`. :param vv_x: X (no dummies) :param vv_y: Y to be splined (no dummies) :param vv_o: Y output :param length: Output Length :param start: Starting Location :param incr: Separation Distance :param gap: Maximum gap to interpolate across :param ext: Number of elements to extend :param type: :ref:`VVU_SPL` :type vv_x: GXVV :type vv_y: GXVV :type vv_o: GXVV :type length: int :type start: float :type incr: float :type gap: float :type ext: int :type type: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapVVU._spline(GXContext._get_tls_geo(), vv_x, vv_y, vv_o, length, start, incr, gap, ext, type) @classmethod def spline2(cls, vv_x, vv_y, vv_x2, vv_o, type): """ Spline a Y `GXVV <geosoft.gxapi.GXVV>` onto an X `GXVV <geosoft.gxapi.GXVV>`. Uses specified values of X in X2 :param vv_x: X (no dummies) :param vv_y: Y to be splined (no dummies) :param vv_x2: X2 (no dummies) :param vv_o: Y output :param type: :ref:`VVU_SPL` :type vv_x: GXVV :type vv_y: GXVV :type vv_x2: GXVV :type vv_o: GXVV :type type: int .. versionadded:: 5.1.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapVVU._spline2(GXContext._get_tls_geo(), vv_x, vv_y, vv_x2, vv_o, type) @classmethod def tokenize_to_values(cls, vv, str_val): """ Tokenize a string based on any characters. :param vv: `GXVV <geosoft.gxapi.GXVV>` to place values in :param str_val: Str - String to parse :type vv: GXVV :type str_val: str :returns: Number of tokens (length of `GXVV <geosoft.gxapi.GXVV>`) :rtype: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Parses a series of space, tab or comma-delimited values to a `GXVV <geosoft.gxapi.GXVV>`. """ ret_val = gxapi_cy.WrapVVU._tokenize_to_values(GXContext._get_tls_geo(), vv, str_val.encode()) return ret_val @classmethod def translate(cls, vv, base, mult): """ Translate values in a `GXVV <geosoft.gxapi.GXVV>` :param base: Base :param mult: Scale :type vv: GXVV :type base: float :type mult: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** (new `GXVV <geosoft.gxapi.GXVV>`) = ((old `GXVV <geosoft.gxapi.GXVV>`) + base) * scale """ gxapi_cy.WrapVVU._translate(GXContext._get_tls_geo(), vv, base, mult) @classmethod def trend(cls, vv_d, order, vv_c): """ Calculate an n'th order best-fit polynomial, integral x. :param vv_d: `GXVV <geosoft.gxapi.GXVV>` with input data :param order: Order of the polynomial 0-9 :param vv_c: `GXVV <geosoft.gxapi.GXVV>` to hold polynomial coefficients (returned). :type vv_d: GXVV :type order: int :type vv_c: GXVV .. versionadded:: 5.0.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Returns coefficients c[0] .. c[n] Y(x) = c[0] + c[1]x + c[2](x**2) + ... + c[n](x**n) The X scale is unitless (1 per element), i.e. 0,1,2,3,... The polynomial `GXVV <geosoft.gxapi.GXVV>` length is set to the number of coefficients (order + 1) .. seealso:: `poly_fill <geosoft.gxapi.GXVVU.poly_fill>`, `trend2 <geosoft.gxapi.GXVVU.trend2>`, `poly_fill2 <geosoft.gxapi.GXVVU.poly_fill2>` """ gxapi_cy.WrapVVU._trend(GXContext._get_tls_geo(), vv_d, order, vv_c) @classmethod def trend2(cls, vv_x, vv_d, order, vv_c): """ Calculate an n'th order best-fit polynomial, specified X :param vv_x: `GXVV <geosoft.gxapi.GXVV>` with x spacing (input) :param vv_d: `GXVV <geosoft.gxapi.GXVV>` with input data :param order: Order of the polynomial 0-9 :param vv_c: `GXVV <geosoft.gxapi.GXVV>` to hold polynomial coefficients (returned) :type vv_x: GXVV :type vv_d: GXVV :type order: int :type vv_c: GXVV .. versionadded:: 5.0.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Returns coefficients c[0] .. c[n] Y(x) = c[0] + c[1]x + c[2](x**2) + ... + c[n](x**n) The X scale is defined by a X `GXVV <geosoft.gxapi.GXVV>` (see Trend_VV for unitless X). The polynomial `GXVV <geosoft.gxapi.GXVV>` length is set to the number of coefficients (order + 1) .. seealso:: `poly_fill <geosoft.gxapi.GXVVU.poly_fill>`, `trend2 <geosoft.gxapi.GXVVU.trend2>`, `poly_fill2 <geosoft.gxapi.GXVVU.poly_fill2>` """ gxapi_cy.WrapVVU._trend2(GXContext._get_tls_geo(), vv_x, vv_d, order, vv_c) @classmethod def uniform_dist(cls, vv, seed, min, max, length): """ Fill with uniformly distributed values. :param vv: `GXVV <geosoft.gxapi.GXVV>` object :param seed: Random number generator seed :param min: Minimum of range :param max: Maximum of range :param length: Number of values (-1 for all) :type vv: GXVV :type seed: int :type min: float :type max: float :type length: int .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** `GXVV <geosoft.gxapi.GXVV>` is set to input length (except for -1) See rand.gxh for a short discription of the random number generator used. """ gxapi_cy.WrapVVU._uniform_dist(GXContext._get_tls_geo(), vv, seed, min, max, length) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/tutorial/Geosoft Project/get_data_files.py import geosoft.gxpy.gx as gx import geosoft.gxpy.utility as gxu gxc = gx.GXpy() url = 'https://github.com/GeosoftInc/gxpy/raw/9.3/examples/tutorial/Geosoft%20Project/' gxu.url_retrieve(url + 'example.gpf') gxu.url_retrieve(url + 'geosoft_project.gpf') gxu.url_retrieve(url + 'TMI.GRD') gxu.url_retrieve(url + 'TMI.GRD.gi') gxu.url_retrieve(url + 'TMI.GRD.xml')<file_sep>/geosoft/gxpy/tests/test_va.py import numpy as np import unittest import os import geosoft import geosoft.gxapi as gxapi import geosoft.gxpy.va as gxva import geosoft.gxpy.utility as gxu from base import GXPYTest class Test(GXPYTest): def test_va(self): self.start() self.assertEqual(gxva.__version__, geosoft.__version__) with gxva.GXva(width=12, dtype=np.float) as va: self.assertTrue(isinstance(va.gxva, gxapi.GXVA)) self.assertEqual(va.fid, (0.0,1.0)) self.assertEqual(va.width, 12) fid = (10.1,0.99) with gxva.GXva(width=7, dtype=np.float, fid=fid) as va: self.assertEqual(va.fid, fid) self.assertEqual(va.width, 7) fid = (-45,7) va.fid = fid self.assertEqual(va.fid,fid) va.refid((-40,8),4) self.assertEqual(va.fid,(-40,8)) self.assertEqual(va.length,4) self.assertEqual(va.dimensions, (4,7)) self.assertEqual(va.gxtype, gxu.gx_dtype(np.float)) self.assertEqual(va.np.shape, (4, 7)) va.length = 16 self.assertEqual(va.fid,(-40,8)) self.assertEqual(va.length,16) self.assertEqual(va.dimensions, (16,7)) self.assertEqual(va.gxtype, gxu.gx_dtype(np.float)) self.assertEqual(va.np.shape, (16, 7)) def test_exceptions(self): self.start() self.assertRaises(gxva.VAException, gxva.GXva, np.array([["bones", "queens", "geology"], ["a", "b", "c"]])) with gxva.GXva([[1, 2, 3, 4, 5, 6, 7]], width=7, dtype=np.float) as va: self.assertRaises(gxva.VAException, va.get_data, dtype="U7") with gxva.GXva(np.array(range(45)).reshape((9, 5))) as va: self.assertRaises(gxva.VAException, va.get_data, n=0) with gxva.GXva(np.array(range(45)).reshape((9, 5))) as va: self.assertRaises(gxva.VAException, va.get_data, n_col=0) with gxva.GXva(np.array(range(40)).reshape((20, 2))) as va: self.assertRaises(gxva.VAException, va.set_data, np.array(range(3))) def test_np(self): self.start() fid = (99,0.1) npdata = np.array(range(45)).reshape((9,5)) with gxva.GXva(npdata, fid=fid) as va: self.assertEqual(va.fid, fid) self.assertEqual(va.length, npdata.shape[0]) self.assertEqual(va.width, npdata.shape[1]) np2 = va.get_data(va.dtype) self.assertEqual(np2[0].shape, npdata.shape) np2,fid2 = va.get_data(dtype=va.dtype, start=1) self.assertEqual(fid2,(99.1,.1)) self.assertEqual(np2.shape, (8, 5)) self.assertEqual(va.get_data(start=6)[0].shape, (3, 5)) try: self.assertEqual(va.get_data(dtype=va.dtype, start=50)[0].shape, (0,)) self.assertTrue(False) except gxva.VAException: pass np3,fid3 = va.get_data(np.int) self.assertEqual(fid3,fid) self.assertEqual(np3[0, 0], 0) self.assertEqual(np3[1, 4], 9) np3, fid3 = va.get_data(np.float64) self.assertEqual(fid3, fid) self.assertEqual(np3[0, 0], 0.0) self.assertEqual(np3[1, 4], 9.0) np3, fid3 = va.get_data(np.float64, n=2) self.assertEqual(fid3, fid) self.assertEqual(np3.shape[0], 2) self.assertEqual(np3[0, 0], 0.0) self.assertEqual(np3[1, 4], 9.0) np3, fid3 = va.get_data(np.float64, n=99) self.assertEqual(fid3, fid) self.assertEqual(np3.shape[0], va.length) np3, fid3 = va.get_data(np.float64, n_col=3) self.assertEqual(fid3, fid) self.assertEqual(np3.shape[1], 3) np3, fid3 = va.get_data(np.float64, n_col=99) self.assertEqual(fid3, fid) self.assertEqual(np3.shape[1], va.width) npdata = np.array(range(64), dtype=np.int).reshape(4, 16) npdata[1, 2] = gxapi.iDUMMY with gxva.GXva(npdata, fid=fid) as va: np3, fid = va.get_data(dtype=np.int64) self.assertEqual(np3[0, 0], 0.) self.assertEqual(np3[2, 11], 43) self.assertEqual(np3[1, 2], gxapi.GS_S8DM) np3, fid = va.get_data(dtype=np.int32) self.assertEqual(np3[0, 0], 0.) self.assertEqual(np3[2, 11], 43) self.assertEqual(np3[1, 2], gxapi.GS_S4DM) self.assertEqual(np3[1, 2], gxapi.iDUMMY) np3, fid = va.get_data(np.float) self.assertEqual(np3[0, 0], 0.) self.assertEqual(np3[2, 11], 43.) self.assertTrue(np.isnan(np3[1, 2])) d = np.array(range(32), dtype=np.int).reshape(-1, va.width) d[0,3] = gxu.gx_dummy(d.dtype) va.set_data(d) np3, fid = va.get_data(dtype=np.int32) self.assertEqual(np3.shape[0], 2) self.assertEqual(np3[0,0], 0) self.assertEqual(np3[1,15], 31) self.assertEqual(np3[0, 3], gxapi.GS_S4DM) def test_iterator(self): self.start() npdata = np.array(range(45)).reshape((9, 5)) with gxva.GXva(npdata) as va: self.assertEqual(tuple(va[0][0]), (0, 1, 2, 3, 4)) self.assertEqual(va[4][1], 4) list2d = [v[0] for v in va] self.assertEqual(tuple(list2d[1]), (5, 6, 7, 8, 9)) self.assertEqual(va.np.shape, (9, 5)) def test_strings(self): self.start() fidva = (99,0.1) npdata = np.array(["name","maki","neil","macleod"]).reshape(2,2) self.assertRaises(gxva.VAException, gxva.GXva, npdata, fid=fidva) def test_uom(self): self.start() npdata = np.array(range(45)).reshape((9, 5)) with gxva.GXva(npdata, unit_of_measure='maki') as va: self.assertEqual(va.unit_of_measure, 'maki') va.unit_of_measure = 'nT' self.assertEqual(va.unit_of_measure, 'nT') def test_dummy_nan(self): self.start() npdata = np.array(range(45), dtype=np.float32).reshape((9, 5)) npdata[0,1] = np.nan with gxva.GXva(npdata) as va: self.assertTrue(npdata[0,1]) self.assertEqual(tuple(va[0][0])[4], 4.) self.assertTrue(np.isnan(va[0][0][1])) def test_empty(self): self.start() empty = np.array([[],[]]) va = gxva.GXva(empty, width=2) self.assertEqual(len(va), 0) self.assertEqual(va.np.size, 0) va.set_data(empty) self.assertEqual(len(va), 0) self.assertEqual(va.np.size, 0) ############################################################################################## if __name__ == '__main__': unittest.main()<file_sep>/geosoft/gxpy/spatialdata.py """ Geosoft spatial data base class. Spatial datasets are collections of geometric objects that have associated data, typically persisting in a named file. Examples are Geosoft databases, grids, voxels, geosoft_surfaces. :Classes: ============= =================================================================================== `SpatialData` base class for Geosoft spatial data, inherits from `geosoft.gxpy.geometry.Geometry` ============= =================================================================================== **Constants** `mode=` file open mode constants ============== == ==================================================== FILE_READ 0 open for read, files are not changed FILE_READWRITE 1 open for read and write, files can be changed FILE_NEW 2 new file, accompanied by `overwrite=` parameter ============== == ==================================================== """ import os import geosoft import geosoft.gxapi as gxapi from . import gx as gx from . import utility as gxu from . import geometry as gxgm from . import coordinate_system as gxcs __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) class SpatialException(geosoft.GXRuntimeError): """ Exceptions from `geosoft.gxpy.spatial_data`. """ pass def delete_files(file_name): """ Delete file and xml file :param file_name: base file name .. versionadded:: 9.3.1 """ def df(fn): try: os.remove(fn) except OSError: pass if file_name: df(file_name) df(file_name + '.xml') MODE_READ = 0 MODE_READWRITE = 1 MODE_NEW = 2 def find_meta_branch(meta, item): """ Return the lowest branch in the meta dictionary that contains the item. .. versionadded:: 9.3.1 """ if item in meta: return meta for key, value in meta.items(): if isinstance(value, dict): if item in value: return value for key, value in meta.items(): if isinstance(value, dict): return find_meta_branch(value, item) return None def coordinate_system_from_metadata(meta): """ Return a `geosoft.gxpy.coordinate_system.Coordinate_system` instance from metadata. :param meta: metadata dictionary :return: `geosoft.gxpy.coordinate_system.Coordinate_system`, or None """ try: geometa = find_meta_branch(meta, 'geosoft') if geometa: projection = find_meta_branch(geometa['geosoft'], 'projection') if projection: return gxcs.Coordinate_system(projection['projection']) except: pass return None def coordinate_system_from_metadata_file(file_name): """ Return a `geosoft.gxpy.coordinate_system.Coordinate_system` instance from metadata. :param file_name: spatial dataset name. :return: `geosoft.gxpy.coordinate_system.Coordinate_system`, or None """ return coordinate_system_from_metadata(gxu.geosoft_metadata(file_name)) def extent_from_metadata(meta): """ Return spatial dataset extent from geosoft metadata. :param meta: metadata dictionary :return: `geosoft.gxpy.geometry.Point2` instance .. versionadded:: 9.3.1 """ meta = find_meta_branch(meta, 'geosoft') if meta: cs = coordinate_system_from_metadata(meta) try: ex = meta['geosoft']['dataset']['georeference']['dataextents']['extent3d'] minp = gxgm.Point((float(ex['@minx']), float(ex['@miny']), float(ex['@minz']))) maxp = gxgm.Point((float(ex['@maxx']), float(ex['@maxy']), float(ex['@maxz']))) return gxgm.Point2((minp, maxp), cs) except KeyError: pass return None def extent_from_metadata_file(file_name): """ Return spatial dataset extent from file metadata .xml file :param file_name: spatial dataset file :return: `geosoft.gxpy.geometry.Point2` instance .. versionadded:: 9.3.1 """ return extent_from_metadata(gxu.geosoft_metadata(file_name)) class SpatialData(gxgm.Geometry): """ Base class for spatial datasets. :param name: dataset name. :param file_name: file name for this dataset. :param mode: file mode, MODE_READ, MODE_READWRITE or MODE_NEW. The default is MODE_NEW. :param overwrite: Default is False. If True will raise an error if MODE_NEW and file_name exists. :param gxobj: Base GXAPI spatial dataset object, default is None. If passed the base object is used to resolve common named methods like *`get_ipj()`*. :Properties: properties of `geosoft.gxpy.geometry.Geometry` plus: ================== ============================================================= `file_name` file name `metadata` metadata dictionary `unit_of_measure` primary data unit of measurement ================== ============================================================= .. versionadded:: 9.3.1 """ def __enter__(self): return self def __exit__(self, _type, value, traceback): self.__del__() def __del__(self): if hasattr(self, '_close'): self._close() def _close(self): if hasattr(self, '_open'): if self._open: gx.pop_resource(self._open) self._open = None if self.file_name and self._metadata_changed and self._mode != MODE_READ: with open(self._file_name + '.xml', 'w+') as f: f.write(gxu.xml_from_dict(self._metadata)) self._metadata = None self._gxobj = None def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): return self.name def __init__(self, name=None, file_name=None, mode=MODE_NEW, overwrite=False, **kwargs): if name is None: if file_name: name = os.path.splitext(os.path.basename(file_name))[0] super().__init__(name=name, **kwargs) if file_name is None: if mode != MODE_NEW: raise SpatialException(_t('Cannot read from an unnammed dataset')) else: if mode == MODE_NEW: if self.gxobj is None and not overwrite and os.path.exists(file_name): raise SpatialException(_t('\'{}\' exists. Use overwrite=True to overwrite existing dataset file.'). format(file_name)) else: if not os.path.exists(file_name): raise SpatialException(_t('Cannot find dataset file \'{}\''). format(file_name)) self._file_name = file_name self._mode = mode self._metadata = None self._metadata_changed = False self._metadata_root = '' self._open = gx.track_resource(self.__class__.__name__, self._name) def _init_metadata(self): if not self._metadata: self._metadata = gxu.geosoft_metadata(self._file_name) self._metadata_root = tuple(self._metadata.items())[0][0] @property def file_name(self): """dataset primary file name""" return self._file_name def close(self): """close the dataset.""" self._close() @property def dataset_mode(self): """Dataset open mode""" return self._mode @property def metadata(self): """ Return the dataset metadata as a dictionary. Can be set, in which case the dictionary items passed will be added to, or replace existing metadata. .. seealso:: Geosoft metadata `Schema <https://geosoftgxdev.atlassian.net/wiki/spaces/GXD93/pages/78184638/Geosoft+Metadata+Schema>` .. versionadded:: 9.3.1 """ if self._open is not None: self._init_metadata() return self._metadata[self._metadata_root] else: return None @metadata.setter def metadata(self, meta): self._init_metadata() self._metadata[self._metadata_root] = gxu.merge_dict(self._metadata[self._metadata_root], meta) self._metadata_changed = True @property def unit_of_measure(self): """ Units of measurement (a string) for the primary scalar data associated with this dataset. Can be set. .. versionadded:: 9.3.1 """ try: uom = self.metadata['geosoft']['dataset']['geo:unitofmeasurement']['#text'] except KeyError: uom = '' return uom @unit_of_measure.setter def unit_of_measure(self, uom): self.metadata = {'geosoft': {'@xmlns': 'http://www.geosoft.com/schema/geo', 'dataset': {'geo:unitofmeasurement': {'@xmlns:geo': 'http://www.geosoft.com/schema/geo', '#text': str(uom)}}}} <file_sep>/geosoft/gxpy/tests/run_all.py import multiprocessing import subprocess import glob import timeit import os import sys import inspect def work(test): this_file = os.path.join(os.getcwd(), inspect.getfile(work)) this_folder = os.path.split(this_file)[0] nosetests = os.path.join(os.path.split(sys.executable)[0], 'scripts', 'nosetests') return (test, subprocess.call([nosetests, '-s', '-v', test], cwd=this_folder)) _exit_code = 0 def run_all_tests(): tests = glob.glob('test_*.py') pool = multiprocessing.Pool(processes=6) return pool.map_async(work, tests).get(999999) if __name__ == '__main__': start_time = timeit.default_timer() results = run_all_tests() print('======================================================================') print('Completed {} test fixtures in {}s'.format( len(results), round(timeit.default_timer() - start_time, 3)) ) failed_tests = [f[0] for f in results if f[1] != 0] if failed_tests: print('FAILED (fixtures={})'.format(','.join(failed_tests)), file=sys.stderr) exit(1) else: exit(0) <file_sep>/geosoft/gxapi/GXCHIMERA.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXCHIMERA(gxapi_cy.WrapCHIMERA): """ GXCHIMERA class. `GXCHIMERA <geosoft.gxapi.GXCHIMERA>` GX function library. """ def __init__(self, handle=0): super(GXCHIMERA, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXCHIMERA <geosoft.gxapi.GXCHIMERA>` :returns: A null `GXCHIMERA <geosoft.gxapi.GXCHIMERA>` :rtype: GXCHIMERA """ return GXCHIMERA() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def bar_plot(cls, mview, data_group, offset_group, xvv, yvv, dvv, cvv, col, offset, offset_size, width): """ Plot a Bar plot of up to 8 channels. :param mview: View object to plot to :param data_group: Data group name :param offset_group: Offset group name :param xvv: X locations :param yvv: Y locations :param dvv: Data handles, stored as INT values :param cvv: Colors :param col: Color for edges :param offset: Offset symbols (0: No, 1: Yes) :param offset_size: Offset symbol size :param width: Single bar width in data units. :type mview: GXMVIEW :type data_group: str :type offset_group: str :type xvv: GXVV :type yvv: GXVV :type dvv: GXVV :type cvv: GXVV :type col: int :type offset: int :type offset_size: float :type width: float .. versionadded:: 5.0.7 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The number of channels is taken from the Data handles `GXVV <geosoft.gxapi.GXVV>`. Plots a bar plot with the center of the "X" axis at the symbol location. See the note on offset symbols in `rose_plot <geosoft.gxapi.GXCHIMERA.rose_plot>` """ gxapi_cy.WrapCHIMERA._bar_plot(GXContext._get_tls_geo(), mview, data_group.encode(), offset_group.encode(), xvv, yvv, dvv, cvv, col, offset, offset_size, width) @classmethod def categorize_by_value(cls, vv_r, vv_i, vv_o): """ Transform values to the index of input data ranges. :param vv_r: Input range minima :param vv_i: Input data `GXVV <geosoft.gxapi.GXVV>`. (REAL) :param vv_o: Output (altered) `GXVV <geosoft.gxapi.GXVV>`.(REAL) :type vv_r: GXVV :type vv_i: GXVV :type vv_o: GXVV .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** A list of minima (e.g. M1, M2, M3, M4, M5) is input. A list of values V is input and transformed to outputs N in the following manner: if(V) >= M5) N = 5 else if(V) >= M4) N = 4 ... ... else if(V) >= M1) N = 1 else N = 0 """ gxapi_cy.WrapCHIMERA._categorize_by_value(GXContext._get_tls_geo(), vv_r, vv_i, vv_o) @classmethod def categorize_by_value_det_limit(cls, vv_r, vv_i, det_limit, vv_o): """ Transform values to the index of input data ranges, with detection limit. :param vv_r: Input range minima :param vv_i: Input data `GXVV <geosoft.gxapi.GXVV>`. (REAL) :param det_limit: Detection limit (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param vv_o: Output (altered) `GXVV <geosoft.gxapi.GXVV>`.(REAL) :type vv_r: GXVV :type vv_i: GXVV :type det_limit: float :type vv_o: GXVV .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Same as `categorize_by_value <geosoft.gxapi.GXCHIMERA.categorize_by_value>`, but if the input value is less than the detection limit, the output value is set to zero. """ gxapi_cy.WrapCHIMERA._categorize_by_value_det_limit(GXContext._get_tls_geo(), vv_r, vv_i, det_limit, vv_o) @classmethod def clip_to_detect_limit(cls, vv, det_limit, conv): """ Apply detection limit clipping of data. :param vv: Input data vv (altered). :param det_limit: Detection limit :param conv: Auto-convert negatives? :type vv: GXVV :type det_limit: float :type conv: int .. versionadded:: 5.0.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Flow: 1. If auto-converting negatives, then all negative values are replaced by -0.5*value, and detection limit is ignored. 2. If not auto-converting negatives, and the detection limit is not `rDUMMY <geosoft.gxapi.rDUMMY>`, then values less than the detection limit are converted to one-half the detection limit. """ gxapi_cy.WrapCHIMERA._clip_to_detect_limit(GXContext._get_tls_geo(), vv, det_limit, conv) @classmethod def draw_circle_offset_markers(cls, mview, vv_xi, vv_yi, vv_xo, vv_yo, off_size): """ Plots location marker and joining line for circle offset symbols :param mview: View :param vv_xi: Original (marker) X location :param vv_yi: Original (marker) Y location :param vv_xo: Offset (new) X location :param vv_yo: Offset (new) Y location :param off_size: Marker symbol radius :type mview: GXMVIEW :type vv_xi: GXVV :type vv_yi: GXVV :type vv_xo: GXVV :type vv_yo: GXVV :type off_size: float .. versionadded:: 5.0.7 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Draws black filled circle (symbols.gfn #7) and a joining line. """ gxapi_cy.WrapCHIMERA._draw_circle_offset_markers(GXContext._get_tls_geo(), mview, vv_xi, vv_yi, vv_xo, vv_yo, off_size) @classmethod def draw_rectangle_offset_markers(cls, mview, vv_xi, vv_yi, vv_xo, vv_yo, off_size, x_size, y_size): """ Plots location marker and joining line for rectangle offset symbols :param mview: View :param vv_xi: Original (marker) X location :param vv_yi: Original (marker) Y location :param vv_xo: Offset (new) X location :param vv_yo: Offset (new) Y location :param off_size: Offset symbol width :param x_size: Offset symbol height :param y_size: Marker symbol radius :type mview: GXMVIEW :type vv_xi: GXVV :type vv_yi: GXVV :type vv_xo: GXVV :type vv_yo: GXVV :type off_size: float :type x_size: float :type y_size: float .. versionadded:: 5.0.7 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Draws black filled circle (symbols.gfn #7) and a joining line. """ gxapi_cy.WrapCHIMERA._draw_rectangle_offset_markers(GXContext._get_tls_geo(), mview, vv_xi, vv_yi, vv_xo, vv_yo, off_size, x_size, y_size) @classmethod def duplicate_chem(cls, mview, vv, log, det_lim, old, vv_tol, title, unit, x0, y0, xs, ys): """ Plot an ASSAY Duplicate result in a graph window. :param mview: View :param vv: Duplicate data :param log: Log-transform: 0 - linear, 1 - log :param det_lim: Detect Limit :param old: Number of old samples in the `GXVV <geosoft.gxapi.GXVV>` :param vv_tol: Tolerances (1-5 values) :param title: Title :param unit: Unit :param x0: X location (bottom left corner of graph) :param y0: Y location :param xs: Graph width :param ys: Graph height :type mview: GXMVIEW :type vv: GXVV :type log: int :type det_lim: float :type old: int :type vv_tol: GXVV :type title: str :type unit: str :type x0: float :type y0: float :type xs: float :type ys: float .. versionadded:: 5.0.7 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapCHIMERA._duplicate_chem(GXContext._get_tls_geo(), mview, vv, log, det_lim, old, vv_tol, title.encode(), unit.encode(), x0, y0, xs, ys) @classmethod def duplicate_chem_view(cls, map, view, group, ipj, vv, log, det_lim, old, vv_tol, title, unit, vvx, vv_line, vv_fid, db, min_y, max_y): """ Plot an ASSAY Duplicate result in a new view. :param map: Map :param view: New view name :param group: New group name :param vv: Duplicate data :param log: Log-transform: 0 - linear, 1 - log :param det_lim: Detect Limit :param old: Number of old samples in the `GXVV <geosoft.gxapi.GXVV>` :param vv_tol: Tolerances (1-5 values) :param title: Title :param unit: Unit :param vvx: `GXVV <geosoft.gxapi.GXVV>` X :param vv_line: `GXVV <geosoft.gxapi.GXVV>` Line :param vv_fid: `GXVV <geosoft.gxapi.GXVV>` Fid :param db: Database :param min_y: Returned MinY :param max_y: Returned MaxY :type map: GXMAP :type view: str :type group: str :type ipj: GXIPJ :type vv: GXVV :type log: int :type det_lim: float :type old: int :type vv_tol: GXVV :type title: str :type unit: str :type vvx: GXVV :type vv_line: GXVV :type vv_fid: GXVV :type db: GXDB :type min_y: float_ref :type max_y: float_ref .. versionadded:: 8.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ min_y.value, max_y.value = gxapi_cy.WrapCHIMERA._duplicate_chem_view(GXContext._get_tls_geo(), map, view.encode(), group.encode(), ipj, vv, log, det_lim, old, vv_tol, title.encode(), unit.encode(), vvx, vv_line, vv_fid, db, min_y.value, max_y.value) @classmethod def get_expression_data_vv(cls, db, line, stage, exp, ini, gvv): """ Get data from a line using a channel expression. :param db: Database :param line: Line to read :param stage: Geochem stage (just "raw data stage" for now). :param exp: Channel expression :param ini: INI file name with required units (e.g. PARAMETER.CU="ppm") (optional) :param gvv: Returned data :type db: GXDB :type line: int :type stage: str :type exp: str :type ini: str :type gvv: GXVV .. versionadded:: 6.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Input a channel expression. Units for individual channels are stored in the input INI. Returns a `GXVV <geosoft.gxapi.GXVV>` for the given line with the calculated expression values. """ gxapi_cy.WrapCHIMERA._get_expression_data_vv(GXContext._get_tls_geo(), db, line, stage.encode(), exp.encode(), ini.encode(), gvv) @classmethod def get_lithogeochem_data(cls, db, lst, m_ch, vv_trans, remove_dummy_rows, vv_dummy, warn, vv_d, vv_line, vv_n, vv_used, vv_index, vv_fids, vv_fidi): """ Get all rows of non-dummy data in a database. :param db: [i] database handle :param lst: [i] channels of data to get :param m_ch: [i] mask channel (can be `NULLSYMB <geosoft.gxapi.NULLSYMB>`) :param vv_trans: [i] transforms to apply :param remove_dummy_rows: [i] remove dummy rows? :param vv_dummy: [i] dummy row if this channel value is dummy (0:No, 1:Yes)? Effective only if "remove dummy rows" value is TRUE :param warn: [i] warn if rows removed because of dummy data items? :param vv_d: [o] (INT) returned data - one `GXVV <geosoft.gxapi.GXVV>` handle per channel :param vv_line: [o] line symbols selected :param vv_n: [o] number of original data items in each line :param vv_used: [o] number of non-dummy rows :param vv_index: [o] indices into original data :param vv_fids: [o] Fid Starts (REAL) :param vv_fidi: [o] Fid Increments (REAL) :type db: GXDB :type lst: GXLST :type m_ch: int :type vv_trans: GXVV :type remove_dummy_rows: int :type vv_dummy: GXVV :type warn: int :type vv_d: GXVV :type vv_line: GXVV :type vv_n: GXVV :type vv_used: GXVV :type vv_index: GXVV :type vv_fids: GXVV :type vv_fidi: GXVV .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** This function is a quick way to get all rows of data, guaranteeing no dummy items. Book-keeping VVs returned let you easily write back results to new channels in the correct locations. Set the "Dummy Row" `GXVV <geosoft.gxapi.GXVV>` to 1 if you wish to remove any row where a value for the corresponding channel is a dummy. Transforms to apply: -1 - Channel default (will be either raw or log) 0 - Raw Transform 1 - Log transform: base e with log min = CHIMERA_LOG_MIN 2 - Lambda transform """ gxapi_cy.WrapCHIMERA._get_lithogeochem_data(GXContext._get_tls_geo(), db, lst, m_ch, vv_trans, remove_dummy_rows, vv_dummy, warn, vv_d, vv_line, vv_n, vv_used, vv_index, vv_fids, vv_fidi) @classmethod def get_transform(cls, db, chan, trans_opt, trans, lda): """ Get channel transform options and lambda values. :param db: `GXDB <geosoft.gxapi.GXDB>` handle :param chan: Channel name :param trans_opt: Transform option: -1, 0, 1 or 2 :param trans: Returned transform used :param lda: Returned lambda value for option==2 :type db: GXDB :type chan: str :type trans_opt: int :type trans: int_ref :type lda: float_ref .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If the lambda transform is requested, the channel must have the lambda value defined. Input Transform options -1 - Channel default (will be either raw or log) 0 - Raw Transform 1 - Log transform: base e with log min = CHIMERA_LOG_MIN 2 - Lambda transform """ trans.value, lda.value = gxapi_cy.WrapCHIMERA._get_transform(GXContext._get_tls_geo(), db, chan.encode(), trans_opt, trans.value, lda.value) @classmethod def is_acquire_chan(cls, input_chan, chan, units, factor, oxide): """ Is this channel in acQuire format (e.g. "Ag_ppm_4AWR") :param input_chan: String to test :param chan: Returned channel name :param units: Returned units :param factor: Buffer factor (e.g. ppm = 1.e-6) :param oxide: is this an oxide? :type input_chan: str :type chan: str_ref :type units: str_ref :type factor: float_ref :type oxide: bool_ref :rtype: bool .. versionadded:: 7.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Expressions can take acQuire-type named channels if the exact element/oxide is not found. This function extracts the channel name, and units from an acQuire-formatted channel name. """ ret_val, chan.value, units.value, factor.value, oxide.value = gxapi_cy.WrapCHIMERA._is_acquire_chan(GXContext._get_tls_geo(), input_chan.encode(), chan.value.encode(), units.value.encode(), factor.value, oxide.value) return ret_val @classmethod def is_element(cls, chan, case_sensitive): """ Tests a string to see if it is an element symbol :param chan: String to test :param case_sensitive: :ref:`STR_CASE` :type chan: str :type case_sensitive: int :rtype: bool .. versionadded:: 5.0.7 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Suggested use - testing to see if a channel name is an element so that the "ASSAY" class can be set. """ ret_val = gxapi_cy.WrapCHIMERA._is_element(GXContext._get_tls_geo(), chan.encode(), case_sensitive) return ret_val @classmethod def launch_histogram(cls, db, chan): """ Launch histogram tool on a database. :param db: Database name :param chan: First chan name :type db: str :type chan: str .. versionadded:: 5.0.6 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The database should be a currently open database. This function supercedes `GXEDB.launch_histogram <geosoft.gxapi.GXEDB.launch_histogram>`, (which now just gets the name of the `GXEDB <geosoft.gxapi.GXEDB>` and calls this function). """ gxapi_cy.WrapCHIMERA._launch_histogram(GXContext._get_tls_geo(), db.encode(), chan.encode()) @classmethod def launch_probability(cls, db, chan): """ Launch probability tool on a database. :param db: Database name :param chan: First chan name :type db: str :type chan: str .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The database should be a currently open database. """ gxapi_cy.WrapCHIMERA._launch_probability(GXContext._get_tls_geo(), db.encode(), chan.encode()) @classmethod def launch_scatter(cls, db): """ Launch scatter tool on a database. :param db: Database name :type db: str .. versionadded:: 5.0.6 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The scatter tool uses the following INI parameters ================ =============================================== SCATTER.STM name of the scatter template, "none" for none ---------------- ----------------------------------------------- SCATTER.STM_NAME name of last template section, "" for none. ---------------- ----------------------------------------------- SCATTER.X name of channel to display in X ---------------- ----------------------------------------------- SCATTER.Y name of channel to display in Y ---------------- ----------------------------------------------- SCATTER.MASK name of channel to use for mask ================ =============================================== The database should be a currently open database. This function supercedes `GXEDB.launch_scatter <geosoft.gxapi.GXEDB.launch_scatter>`, (which now just gets the name of the `GXEDB <geosoft.gxapi.GXEDB>` and calls this function). """ gxapi_cy.WrapCHIMERA._launch_scatter(GXContext._get_tls_geo(), db.encode()) @classmethod def launch_triplot(cls, db): """ Launch Triplot tool on a database. :param db: Database name :type db: str .. versionadded:: 5.0.7 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The Triplot tool uses the following INI parameters ================ =============================================== TRIPLOT.TTM name of the triplot template, "none" for none ---------------- ----------------------------------------------- TRIPLOT.TTM_NAME name of last template section, "" for none. ---------------- ----------------------------------------------- TRIPLOT.X name of channel to display in X ---------------- ----------------------------------------------- TRIPLOT.Y name of channel to display in Y ---------------- ----------------------------------------------- TRIPLOT.Z name of channel to display in Z ---------------- ----------------------------------------------- TRIPLOT.MASK name of channel to use for mask ================ =============================================== The database should be a currently open database. """ gxapi_cy.WrapCHIMERA._launch_triplot(GXContext._get_tls_geo(), db.encode()) @classmethod def mask_chan_lst(cls, db, lst): """ Load a `GXLST <geosoft.gxapi.GXLST>` with mask channels. :param db: hDB - Database Object :param lst: `GXLST <geosoft.gxapi.GXLST>` object to populate :type db: GXDB :type lst: GXLST .. versionadded:: 5.0.7 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Loads a `GXLST <geosoft.gxapi.GXLST>` with all channels with CLASS "MASK", as well as all channels containing the string "MASK", as long as the CLASS for these channels is not set to something other than "" or "MASK". This function has been duplicated by `GXDB.mask_chan_lst <geosoft.gxapi.GXDB.mask_chan_lst>`, which is safe to use in applications which do not have `GXCHIMERA <geosoft.gxapi.GXCHIMERA>` loaded. """ gxapi_cy.WrapCHIMERA._mask_chan_lst(GXContext._get_tls_geo(), db, lst) @classmethod def ordered_channel_lst(cls, db, lst): """ Fill a list with the channels in the preferred order. :param db: hDB - Database Object :param lst: `GXLST <geosoft.gxapi.GXLST>` object to populate [recommended 2*`STR_DB_SYMBOL <geosoft.gxapi.STR_DB_SYMBOL>`] :type db: GXDB :type lst: GXLST .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Loads a `GXLST <geosoft.gxapi.GXLST>` with all channels in the preferred order: First: Sample, E, N, assay channels, Middle: Data from survey (other channels), Last: Duplicate, Standard, Chemmask (and other masks), weight, lab, batch If the input `GXLST <geosoft.gxapi.GXLST>` object has values, it is used as the channel `GXLST <geosoft.gxapi.GXLST>`, otherwise, get all the database channels. (This allows you to pass in the currently displayed channels and only reload those). """ gxapi_cy.WrapCHIMERA._ordered_channel_lst(GXContext._get_tls_geo(), db, lst) @classmethod def pie_plot(cls, mview, data_group, offset_group, xvv, yvv, dvv, cvv, col, offset, offset_size, radius): """ Plot a Pie plot of up to 8 channels. :param mview: View object to plot to :param data_group: Data group name :param offset_group: Offset group name :param xvv: X locations :param yvv: Y locations :param dvv: Data handles, stored as INT values :param cvv: Colors :param col: Color for edges :param offset: Offset symbols (0: No, 1: Yes) :param offset_size: Offset symbol size :param radius: Pie plot radius in data units. :type mview: GXMVIEW :type data_group: str :type offset_group: str :type xvv: GXVV :type yvv: GXVV :type dvv: GXVV :type cvv: GXVV :type col: int :type offset: int :type offset_size: float :type radius: float .. versionadded:: 5.0.7 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The number of channels is taken from the Data handles `GXVV <geosoft.gxapi.GXVV>`. The values in each data `GXVV <geosoft.gxapi.GXVV>` are summed and the pie arc is is given by the percent contribution of each constituent. See the note on offset symbols in `rose_plot <geosoft.gxapi.GXCHIMERA.rose_plot>` """ gxapi_cy.WrapCHIMERA._pie_plot(GXContext._get_tls_geo(), mview, data_group.encode(), offset_group.encode(), xvv, yvv, dvv, cvv, col, offset, offset_size, radius) @classmethod def pie_plot2(cls, mview, data_group, offset_group, xvv, yvv, dvv, cvv, col, offset, offset_size, radius, start_angle): """ Same as `pie_plot <geosoft.gxapi.GXCHIMERA.pie_plot>`, with a starting angle. :param mview: View object to plot to :param data_group: Data group name :param offset_group: Offset group name :param xvv: X locations :param yvv: Y locations :param dvv: Data handles, stored as INT values :param cvv: Colors :param col: Color for edges :param offset: Offset symbols (0: No, 1: Yes) :param offset_size: Offset symbol size :param radius: Pie plot radius in data units. :param start_angle: Starting angle in degrees CCW from horizontal (`rDUMMY <geosoft.gxapi.rDUMMY>` gives 0.0) :type mview: GXMVIEW :type data_group: str :type offset_group: str :type xvv: GXVV :type yvv: GXVV :type dvv: GXVV :type cvv: GXVV :type col: int :type offset: int :type offset_size: float :type radius: float :type start_angle: float .. versionadded:: 5.1.5 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The starting angle is the location of the edge of the first pie slice, counted in degrees counter-clockwise from horizontal (3 o'clock). Zero degrees gives the same plot as `pie_plot <geosoft.gxapi.GXCHIMERA.pie_plot>`. """ gxapi_cy.WrapCHIMERA._pie_plot2(GXContext._get_tls_geo(), mview, data_group.encode(), offset_group.encode(), xvv, yvv, dvv, cvv, col, offset, offset_size, radius, start_angle) @classmethod def plot_string_classified_symbols_legend_from_class_file(cls, mview, title, x, y_min, y_max, class_file, index_vv): """ Plot legend for the string classified symbols :param mview: Map view object :param title: Title :param x: Left side X location :param y_min: Bottom Y bound :param y_max: Top Y bound :param class_file: Class file name (`GXTPAT <geosoft.gxapi.GXTPAT>`) :param index_vv: Class indices (INT `GXVV <geosoft.gxapi.GXVV>`) :type mview: GXMVIEW :type title: str :type x: float :type y_min: float :type y_max: float :type class_file: str :type index_vv: GXVV .. versionadded:: 8.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Plot in a legend the classes in the class file found in the input class indices. """ gxapi_cy.WrapCHIMERA._plot_string_classified_symbols_legend_from_class_file(GXContext._get_tls_geo(), mview, title.encode(), x, y_min, y_max, class_file.encode(), index_vv) @classmethod def atomic_weight(cls, element): """ Return the atomic weight of a particular element. :param element: Element name (case insensitive) :type element: str :returns: The atomic weight of the given element. :rtype: float .. versionadded:: 6.4.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If the input string is not an element symbol (elements in the range 1-92, "H" to "U"), then returns a dummy (`GS_R8DM <geosoft.gxapi.GS_R8DM>`). """ ret_val = gxapi_cy.WrapCHIMERA._atomic_weight(GXContext._get_tls_geo(), element.encode()) return ret_val @classmethod def rose_plot(cls, mview, data_group, offset_group, xvv, yvv, dvv, cvv, col, offset, offset_size): """ Plot a Rose plot of up to 8 channels. :param mview: View object to plot to :param data_group: Data group name :param offset_group: Offset group name :param xvv: X locations :param yvv: Y locations :param dvv: Data handles, stored as INT values :param cvv: Colors :param col: Color for edges :param offset: Offset symbols (0: No, 1: Yes) :param offset_size: Offset symbol size :type mview: GXMVIEW :type data_group: str :type offset_group: str :type xvv: GXVV :type yvv: GXVV :type dvv: GXVV :type cvv: GXVV :type col: int :type offset: int :type offset_size: float .. versionadded:: 5.0.7 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The number of channels is taken from the Data handles `GXVV <geosoft.gxapi.GXVV>`. The values in each data `GXVV <geosoft.gxapi.GXVV>` give the radius, in view units, of the sector arc to plots. Values <=0 or dummies are not plotted. Offset symbols: When selected, the symbols plot without overlap, away from the original locations. The original location is marked with a small symbol and a line joins the original position and the relocated symbol. Care should be taken when choosing the symbol size, because if the point density is too high, all the points will get pushed to the outside edge and your plot will look like a hedgehog (it also takes a lot longer!). """ gxapi_cy.WrapCHIMERA._rose_plot(GXContext._get_tls_geo(), mview, data_group.encode(), offset_group.encode(), xvv, yvv, dvv, cvv, col, offset, offset_size) @classmethod def rose_plot2(cls, mview, data_group, offset_group, xvv, yvv, dvv, cvv, col, offset, offset_size, start_angle): """ Same as `rose_plot <geosoft.gxapi.GXCHIMERA.rose_plot>`, with a starting angle. :param mview: View object to plot to :param data_group: Data group name :param offset_group: Offset group name :param xvv: X locations :param yvv: Y locations :param dvv: Data handles, stored as INT values :param cvv: Colors :param col: Color for edges :param offset: Offset symbols (0: No, 1: Yes) :param offset_size: Offset symbol size :param start_angle: Starting angle in degrees CCW from horizontal (`rDUMMY <geosoft.gxapi.rDUMMY>` gives 0.0) :type mview: GXMVIEW :type data_group: str :type offset_group: str :type xvv: GXVV :type yvv: GXVV :type dvv: GXVV :type cvv: GXVV :type col: int :type offset: int :type offset_size: float :type start_angle: float .. versionadded:: 5.1.5 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The starting angle is the location of the edge of the first pie slice, counted in degrees counter-clockwise from horizontal (3 o'clock). Zero degrees gives the same plot as `rose_plot <geosoft.gxapi.GXCHIMERA.rose_plot>`. """ gxapi_cy.WrapCHIMERA._rose_plot2(GXContext._get_tls_geo(), mview, data_group.encode(), offset_group.encode(), xvv, yvv, dvv, cvv, col, offset, offset_size, start_angle) @classmethod def scatter2(cls, mview, title, x1, y1, width, height, horz_vv, vert_vv, sym_font, sym_num_vv, sym_siz_vv, sym_col_vv, annot_style, h_chan, v_chan, h_units, v_units, h_min, h_max, v_min, v_max, hr_min, hr_max, vr_min, vr_max, use_hr_min, use_hr_max, use_vr_min, use_vr_max, h_scaling, v_scaling): """ Plot the scatter plot on a map using symbol number, size and color VVs. :param mview: View :param title: Title :param x1: X location (bottom left corner of box) :param y1: Y location :param width: Box width :param height: Box height :param horz_vv: Horizontal channel :param vert_vv: Vertical channel :param sym_font: Decorated font name, "" for default symbol font (normally symbols.gfn) :param sym_num_vv: Symbol numbers :param sym_siz_vv: Symbol sizes :param sym_col_vv: Colors if symbol number or Color == 0, do not plot :param annot_style: Annotation style 0 - outside, 1 - inside :param h_chan: Horizontal channel name :param v_chan: Vertical channel name :param h_units: Horizontal channel units :param v_units: Vertical channel units :param h_min: Min. Horizontal value, `rDUMMY <geosoft.gxapi.rDUMMY>` for default :param h_max: Max. Horizontal value :param v_min: Min. Vertical value :param v_max: Max. Vertical value :param hr_min: Min. Horizontal range value :param hr_max: Max. Horizontal range value :param vr_min: Min. Vertical range value :param vr_max: Max. Vertical range value :param use_hr_min: Use Min Horz. Range selection? :param use_hr_max: Use Max Horz. Range selection? :param use_vr_min: Use Min Vert. Range selection? :param use_vr_max: Use Max Vert. Range selection? :param h_scaling: Horizontal axis scaling: 0 - linear, 1 - log :param v_scaling: Vertical axis scaling: 0 - linear, 1 - log :type mview: GXMVIEW :type title: str :type x1: float :type y1: float :type width: float :type height: float :type horz_vv: GXVV :type vert_vv: GXVV :type sym_font: str :type sym_num_vv: GXVV :type sym_siz_vv: GXVV :type sym_col_vv: GXVV :type annot_style: int :type h_chan: str :type v_chan: str :type h_units: str :type v_units: str :type h_min: float :type h_max: float :type v_min: float :type v_max: float :type hr_min: float :type hr_max: float :type vr_min: float :type vr_max: float :type use_hr_min: int :type use_hr_max: int :type use_vr_min: int :type use_vr_max: int :type h_scaling: int :type v_scaling: int .. versionadded:: 5.0.7 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The view scaling is not altered with any projection. The base view is best as the input. """ gxapi_cy.WrapCHIMERA._scatter2(GXContext._get_tls_geo(), mview, title.encode(), x1, y1, width, height, horz_vv, vert_vv, sym_font.encode(), sym_num_vv, sym_siz_vv, sym_col_vv, annot_style, h_chan.encode(), v_chan.encode(), h_units.encode(), v_units.encode(), h_min, h_max, v_min, v_max, hr_min, hr_max, vr_min, vr_max, use_hr_min, use_hr_max, use_vr_min, use_vr_max, h_scaling, v_scaling) @classmethod def fixed_symbol_scatter_plot(cls, mview, title, x1, y1, width, height, x_vv, y_vv, m_vv, mask_col, symbol_font, symbol_number, symbol_size, symbol_angle, symbol_color, symbol_fill, db, line_vv, fid_vv, annotn, x_chan, y_chan, x_units, y_units, x_min, x_max, y_min, y_max, x_lin, y_lin, overlay): """ Plot a scatter plot using a single fixed symbol. Optional data masking with masking Color. Optional database linking. :param mview: View :param title: Title :param x1: X location (bottom left corner of box) :param y1: Y location :param width: Box width :param height: Box height :param x_vv: Horizontal channel data :param y_vv: Vertical channel data :param m_vv: Mask channel data (can be NULL) :param mask_col: Mask Color; overrides symbol Color where mask data is not dummy. Pass an empty string to `GXMVIEW.color <geosoft.gxapi.GXMVIEW.color>` for no plot. :param symbol_font: Decorated font name, "" for default symbol font (normally symbols.gfn) :param symbol_number: Symbol number (>=0) :param symbol_size: Symbol size ( >=0) :param symbol_angle: Symbol angle (-360 to 360) :param symbol_color: Symbol Color :param symbol_fill: Symbol fill Color :param db: Database (source of data) :param line_vv: Line handles for data :param fid_vv: Fid values for data :param annotn: Annotation style 0 - outside, 1 - inside :param x_chan: Horizontal channel name :param y_chan: Vertical channel name :param x_units: Horizontal channel units :param y_units: Vertical channel units :param x_min: Min. Horizontal value, `rDUMMY <geosoft.gxapi.rDUMMY>` for default :param x_max: Max. Horizontal value :param y_min: Min. Vertical value :param y_max: Max. Vertical value :param x_lin: Horizontal axis scaling: 0 - linear, 1 - log :param y_lin: Vertical axis scaling :param overlay: Plot overlay ("" for none) :type mview: GXMVIEW :type title: str :type x1: float :type y1: float :type width: float :type height: float :type x_vv: GXVV :type y_vv: GXVV :type m_vv: GXVV :type mask_col: int :type symbol_font: str :type symbol_number: int :type symbol_size: float :type symbol_angle: float :type symbol_color: int :type symbol_fill: int :type db: GXDB :type line_vv: GXVV :type fid_vv: GXVV :type annotn: int :type x_chan: str :type y_chan: str :type x_units: str :type y_units: str :type x_min: float :type x_max: float :type y_min: float :type y_max: float :type x_lin: int :type y_lin: int :type overlay: str .. versionadded:: 8.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Plot a scatter plot using a single fixed symbol. """ gxapi_cy.WrapCHIMERA._fixed_symbol_scatter_plot(GXContext._get_tls_geo(), mview, title.encode(), x1, y1, width, height, x_vv, y_vv, m_vv, mask_col, symbol_font.encode(), symbol_number, symbol_size, symbol_angle, symbol_color, symbol_fill, db, line_vv, fid_vv, annotn, x_chan.encode(), y_chan.encode(), x_units.encode(), y_units.encode(), x_min, x_max, y_min, y_max, x_lin, y_lin, overlay.encode()) @classmethod def zone_coloured_scatter_plot(cls, mview, title, x1, y1, width, height, x_vv, y_vv, m_vv, mask_col, zone_data_vv, zone_file, symbol_font, symbol_number, symbol_size, symbol_angle, symbol_color, symbol_fill, fix_edge_color, db, line_vv, fid_vv, annotn, x_chan, y_chan, x_units, y_units, x_min, x_max, y_min, y_max, x_lin, y_lin, overlay): """ Plot a scatter plot using colors based on a zone file. Optional data masking with masking color. Optional database linking. :param mview: View :param title: Title :param x1: X location (bottom left corner of box) :param y1: Y location :param width: Box width :param height: Box height :param x_vv: Horizontal channel data :param y_vv: Vertical channel data :param m_vv: Mask channel data (can be NULL) :param mask_col: Mask color; overrides symbol color where mask data is not dummy. Pass an empty string to `GXMVIEW.color <geosoft.gxapi.GXMVIEW.color>` for no plot. :param zone_data_vv: Zone channel data :param zone_file: Zone file name :param symbol_font: Decorated font name, "" for default symbol font (normally symbols.gfn) :param symbol_number: Symbol number (>=0) :param symbol_size: Symbol size ( >=0) :param symbol_angle: Symbol angle (-360 to 360) :param symbol_color: Symbol color :param symbol_fill: Symbol fill color :param fix_edge_color: Fix symbol edge color? :param db: Database (source of data) :param line_vv: Line handles for data :param fid_vv: Fid values for data :param annotn: Annotation style 0 - outside, 1 - inside :param x_chan: Horizontal channel name :param y_chan: Vertical channel name :param x_units: Horizontal channel units :param y_units: Vertical channel units :param x_min: Min. Horizontal value, `rDUMMY <geosoft.gxapi.rDUMMY>` for default :param x_max: Max. Horizontal value :param y_min: Min. Vertical value :param y_max: Max. Vertical value :param x_lin: Horizontal axis scaling: 0 - linear, 1 - log :param y_lin: Vertical axis scaling :param overlay: Plot overlay ("" for none) :type mview: GXMVIEW :type title: str :type x1: float :type y1: float :type width: float :type height: float :type x_vv: GXVV :type y_vv: GXVV :type m_vv: GXVV :type mask_col: int :type zone_data_vv: GXVV :type zone_file: str :type symbol_font: str :type symbol_number: int :type symbol_size: float :type symbol_angle: float :type symbol_color: int :type symbol_fill: int :type fix_edge_color: int :type db: GXDB :type line_vv: GXVV :type fid_vv: GXVV :type annotn: int :type x_chan: str :type y_chan: str :type x_units: str :type y_units: str :type x_min: float :type x_max: float :type y_min: float :type y_max: float :type x_lin: int :type y_lin: int :type overlay: str .. versionadded:: 8.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Plot a scatter plot using colors based on a zone file. """ gxapi_cy.WrapCHIMERA._zone_coloured_scatter_plot(GXContext._get_tls_geo(), mview, title.encode(), x1, y1, width, height, x_vv, y_vv, m_vv, mask_col, zone_data_vv, zone_file.encode(), symbol_font.encode(), symbol_number, symbol_size, symbol_angle, symbol_color, symbol_fill, fix_edge_color, db, line_vv, fid_vv, annotn, x_chan.encode(), y_chan.encode(), x_units.encode(), y_units.encode(), x_min, x_max, y_min, y_max, x_lin, y_lin, overlay.encode()) @classmethod def string_classified_scatter_plot(cls, mview, title, x1, y1, width, height, x_vv, y_vv, m_vv, mask_col, class_vv, class_file, symbol_size_override, db, line_vv, fid_vv, annotn, x_chan, y_chan, x_units, y_units, x_min, x_max, y_min, y_max, x_lin, y_lin, overlay): """ Plot a scatter plot using symbols based on a symbol class file. Optional data masking with masking color. Optional database linking. :param mview: View :param title: Title :param x1: X location (bottom left corner of box) :param y1: Y location :param width: Box width :param height: Box height :param x_vv: Horizontal channel data :param y_vv: Vertical channel data :param m_vv: Mask channel data :param mask_col: Mask color; overrides symbol color. Pass an empty string to `GXMVIEW.color <geosoft.gxapi.GXMVIEW.color>` for no plot. :param class_vv: Class channel data :param class_file: Class file (`GXTPAT <geosoft.gxapi.GXTPAT>`) name. :param symbol_size_override: Symbol size override. Set to 0.0 to use class file symbol sizes. :param db: Database (source of data) :param line_vv: Line handles for data :param fid_vv: Fid values for data :param annotn: Annotation style 0 - outside, 1 - inside :param x_chan: Horizontal channel name :param y_chan: Vertical channel name :param x_units: Horizontal channel units :param y_units: Vertical channel units :param x_min: Min. Horizontal value, `rDUMMY <geosoft.gxapi.rDUMMY>` for default :param x_max: Max. Horizontal value :param y_min: Min. Vertical value :param y_max: Max. Vertical value :param x_lin: Horizontal axis scaling: 0 - linear, 1 - log :param y_lin: Vertical axis scaling :param overlay: Plot overlay ("" for none) :type mview: GXMVIEW :type title: str :type x1: float :type y1: float :type width: float :type height: float :type x_vv: GXVV :type y_vv: GXVV :type m_vv: GXVV :type mask_col: int :type class_vv: GXVV :type class_file: str :type symbol_size_override: float :type db: GXDB :type line_vv: GXVV :type fid_vv: GXVV :type annotn: int :type x_chan: str :type y_chan: str :type x_units: str :type y_units: str :type x_min: float :type x_max: float :type y_min: float :type y_max: float :type x_lin: int :type y_lin: int :type overlay: str .. versionadded:: 8.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Plot a scatter plot using symbols based on a symbol class file. """ gxapi_cy.WrapCHIMERA._string_classified_scatter_plot(GXContext._get_tls_geo(), mview, title.encode(), x1, y1, width, height, x_vv, y_vv, m_vv, mask_col, class_vv, class_file.encode(), symbol_size_override, db, line_vv, fid_vv, annotn, x_chan.encode(), y_chan.encode(), x_units.encode(), y_units.encode(), x_min, x_max, y_min, y_max, x_lin, y_lin, overlay.encode()) @classmethod def set_lithogeochem_data(cls, db, lst, vv_d, vv_line, vv_n, vv_used, vv_index, vv_fids, vv_fidi, vv_dummy): """ Set data back into a database. :param db: [i] database handle :param lst: [i] channels of data to set :param vv_d: [i] (INT) input data - one `GXVV <geosoft.gxapi.GXVV>` handle per channel :param vv_line: [i] line symbols selected :param vv_n: [i] number of original data items in each line :param vv_used: [i] number of non-dummy rows :param vv_index: [i] indices into original data :param vv_fids: [i] Fid Starts (REAL) :param vv_fidi: [i] Fid Increments (REAL) :param vv_dummy: [i] init channel values to dummies first (0:No, 1:Yes)? :type db: GXDB :type lst: GXLST :type vv_d: GXVV :type vv_line: GXVV :type vv_n: GXVV :type vv_used: GXVV :type vv_index: GXVV :type vv_fids: GXVV :type vv_fidi: GXVV :type vv_dummy: GXVV .. versionadded:: 6.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** This function would normally be called after AAGetLithogeochemData_CHIMERA to write processed values back into a database, in the correct lines, and in the correct fiducial locations wrt the other data. The book-keeping VVs would all be set up in AAGetLithogeochemData_CHIMERA. Values NOT in the data (missing indices) will be initialized to dummy if the channel is new, or if the value in the last `GXVV <geosoft.gxapi.GXVV>` below is set to 1. New channel types will be set using the data `GXVV <geosoft.gxapi.GXVV>` type. Any metadata (CLASS, display formats) should be set separately. """ gxapi_cy.WrapCHIMERA._set_lithogeochem_data(GXContext._get_tls_geo(), db, lst, vv_d, vv_line, vv_n, vv_used, vv_index, vv_fids, vv_fidi, vv_dummy) @classmethod def stacked_bar_plot(cls, mview, data_group, offset_group, xvv, yvv, dvv, cvv, col, offset, offset_size, width): """ Plot a Bar plot of up to 8 channels, bars stacked on each other. :param mview: View object to plot to :param data_group: Data group name :param offset_group: Offset group name :param xvv: X locations :param yvv: Y locations :param dvv: Data handles, stored as INT values :param cvv: Colors :param col: Color for edges :param offset: Offset symbols (0: No, 1: Yes) :param offset_size: Offset symbol size :param width: Single bar width in data units. :type mview: GXMVIEW :type data_group: str :type offset_group: str :type xvv: GXVV :type yvv: GXVV :type dvv: GXVV :type cvv: GXVV :type col: int :type offset: int :type offset_size: float :type width: float .. versionadded:: 5.1.8 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The number of channels is taken from the Data handles `GXVV <geosoft.gxapi.GXVV>`. Plots a bar plot with the center of the "X" axis at the symbol location. See the note on offset symbols in `rose_plot <geosoft.gxapi.GXCHIMERA.rose_plot>` """ gxapi_cy.WrapCHIMERA._stacked_bar_plot(GXContext._get_tls_geo(), mview, data_group.encode(), offset_group.encode(), xvv, yvv, dvv, cvv, col, offset, offset_size, width) @classmethod def standard(cls, mview, vv, old, tol, min, max, title, unit, x0, y0, xs, ys): """ Plot ASSAY Standard result in a graph window. :param mview: View :param vv: Standard data :param old: Number of old samples in the `GXVV <geosoft.gxapi.GXVV>` :param tol: Tolerance as a function of std dev :param min: Minimum acceptable value :param max: Maximum acceptable value :param title: Title :param unit: Unit :param x0: X location (bottom left corner of graph) :param y0: Y location :param xs: Graph width :param ys: Graph height :type mview: GXMVIEW :type vv: GXVV :type old: int :type tol: float :type min: float :type max: float :type title: str :type unit: str :type x0: float :type y0: float :type xs: float :type ys: float .. versionadded:: 5.0.7 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If the tolerance is `rDUMMY <geosoft.gxapi.rDUMMY>`, then the minimum and maximum values are used, and must be specified. """ gxapi_cy.WrapCHIMERA._standard(GXContext._get_tls_geo(), mview, vv, old, tol, min, max, title.encode(), unit.encode(), x0, y0, xs, ys) @classmethod def standard_view(cls, map, view, group, ipj, vvy, old, tol, min, max, title, unit, xs, vvx, vv_line, vv_fid, db, min_y, max_y): """ Plot ASSAY Standard result in a graph window. :param map: Map :param view: New view name :param group: New group name :param vvy: Standard data (`GXVV <geosoft.gxapi.GXVV>` Y) :param old: Number of old samples in the `GXVV <geosoft.gxapi.GXVV>` :param tol: Tolerance as a function of std dev :param min: Minimum acceptable value :param max: Maximum acceptable value :param title: Title :param unit: Unit :param xs: Size X :param vvx: `GXVV <geosoft.gxapi.GXVV>` X :param vv_line: `GXVV <geosoft.gxapi.GXVV>` Line :param vv_fid: `GXVV <geosoft.gxapi.GXVV>` Fid :param db: Database :param min_y: Returned MinY :param max_y: Returned MaxY :type map: GXMAP :type view: str :type group: str :type ipj: GXIPJ :type vvy: GXVV :type old: int :type tol: float :type min: float :type max: float :type title: str :type unit: str :type xs: float :type vvx: GXVV :type vv_line: GXVV :type vv_fid: GXVV :type db: GXDB :type min_y: float_ref :type max_y: float_ref .. versionadded:: 8.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Same as `standard <geosoft.gxapi.GXCHIMERA.standard>` but plot in a new view. """ min_y.value, max_y.value = gxapi_cy.WrapCHIMERA._standard_view(GXContext._get_tls_geo(), map, view.encode(), group.encode(), ipj, vvy, old, tol, min, max, title.encode(), unit.encode(), xs, vvx, vv_line, vv_fid, db, min_y.value, max_y.value) @classmethod def tri_plot2(cls, mview, title, x1, y1, width, height, x_vv, y_vv, z_vv, m_vv, sym_font, sym_num_vv, sym_siz_vv, sym_col_vv, x_chan, y_chan, z_chan, xr_min, xr_max, yr_min, yr_max, zr_min, zr_max, use_xr_min, use_xr_max, use_yr_min, use_yr_max, use_zr_min, use_zr_max, grid, tic, grid_inc): """ Plot the TriPlot on a map using symbol number, size and color VVs. :param mview: View :param title: Title :param x1: X location (bottom left corner of box) :param y1: Y location :param width: Box width :param height: Box height :param x_vv: X channel :param y_vv: Y channel :param z_vv: Z channel :param m_vv: Mask channel :param sym_font: Decorated font name, "" for default symbol font (normally symbols.gfn) :param sym_num_vv: Symbol numbers :param sym_siz_vv: Symbol sizes :param sym_col_vv: Colors if symbol number or color == 0, do not plot :param x_chan: X channel name :param y_chan: Y channel name :param z_chan: Z channel name :param xr_min: Min. X range value :param xr_max: Max. X range value :param yr_min: Min. Y range value :param yr_max: Max. Y range value :param zr_min: Min. Z range value :param zr_max: Max. Z range value :param use_xr_min: Use Min X Range selection? :param use_xr_max: Use Max X Range selection? :param use_yr_min: Use Min Y Range selection? :param use_yr_max: Use Max Y Range selection? :param use_zr_min: Use Min Z Range selection? :param use_zr_max: Use Max Z Range selection? :param grid: Plot Grid lines? (0: Just outside edge tics, 1: Grid lines). :param tic: Tic Increment (in percent) :param grid_inc: Grid increment (in percent) :type mview: GXMVIEW :type title: str :type x1: float :type y1: float :type width: float :type height: float :type x_vv: GXVV :type y_vv: GXVV :type z_vv: GXVV :type m_vv: GXVV :type sym_font: str :type sym_num_vv: GXVV :type sym_siz_vv: GXVV :type sym_col_vv: GXVV :type x_chan: str :type y_chan: str :type z_chan: str :type xr_min: float :type xr_max: float :type yr_min: float :type yr_max: float :type zr_min: float :type zr_max: float :type use_xr_min: int :type use_xr_max: int :type use_yr_min: int :type use_yr_max: int :type use_zr_min: int :type use_zr_max: int :type grid: int :type tic: float :type grid_inc: float .. versionadded:: 5.1.6 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The mask channel `GXVV <geosoft.gxapi.GXVV>` is used for plotting precedence; those points with mask = dummy are plotted first, then overwritten with the non-masked values, so you don't get "good" points being covered up by masked values. The view scaling is not altered with any projection. The base view is best as the input. """ gxapi_cy.WrapCHIMERA._tri_plot2(GXContext._get_tls_geo(), mview, title.encode(), x1, y1, width, height, x_vv, y_vv, z_vv, m_vv, sym_font.encode(), sym_num_vv, sym_siz_vv, sym_col_vv, x_chan.encode(), y_chan.encode(), z_chan.encode(), xr_min, xr_max, yr_min, yr_max, zr_min, zr_max, use_xr_min, use_xr_max, use_yr_min, use_yr_max, use_zr_min, use_zr_max, grid, tic, grid_inc) @classmethod def fixed_symbol_tri_plot(cls, mview, title, x1, y1, side, x_vv, y_vv, z_vv, m_vv, mask_col, symbol_font, symbol_number, symbol_size, symbol_angle, symbol_color, symbol_fill, db, line_vv, fid_vv, x_chan, y_chan, z_chan, grid, tic, grid_inc, overlay): """ Plot a tri-plot using a single fixed symbol. Optional data masking with masking color. Optional database linking. :param mview: View :param title: Title :param x1: X location (bottom left corner of box) :param y1: Y location :param side: Triangle side length :param x_vv: X channel data :param y_vv: Y channel data :param z_vv: Z channel data :param m_vv: Mask channel data :param mask_col: Mask color; overrides symbol color where mask data is not dummy. Pass an empty string to `GXMVIEW.color <geosoft.gxapi.GXMVIEW.color>` for no plot. :param symbol_font: Decorated font name, "" for default symbol font (normally symbols.gfn) :param symbol_number: Symbol number (>=0) :param symbol_size: Symbol size ( >=0) :param symbol_angle: Symbol angle (-360 to 360) :param symbol_color: Symbol color :param symbol_fill: Symbol fill color :param db: Database (source of data) :param line_vv: Line handles for data :param fid_vv: Fid values for data :param x_chan: X channel name :param y_chan: Y channel name :param z_chan: Z channel name :param grid: Plot Grid lines? (0: Just outside edge tics, 1: Grid lines). :param tic: Tic Increment (in percent) :param grid_inc: Grid increment (in percent) :param overlay: Plot overlay ("" for none) :type mview: GXMVIEW :type title: str :type x1: float :type y1: float :type side: float :type x_vv: GXVV :type y_vv: GXVV :type z_vv: GXVV :type m_vv: GXVV :type mask_col: int :type symbol_font: str :type symbol_number: int :type symbol_size: float :type symbol_angle: float :type symbol_color: int :type symbol_fill: int :type db: GXDB :type line_vv: GXVV :type fid_vv: GXVV :type x_chan: str :type y_chan: str :type z_chan: str :type grid: int :type tic: float :type grid_inc: float :type overlay: str .. versionadded:: 8.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Plot a tri plot using a single fixed symbol. """ gxapi_cy.WrapCHIMERA._fixed_symbol_tri_plot(GXContext._get_tls_geo(), mview, title.encode(), x1, y1, side, x_vv, y_vv, z_vv, m_vv, mask_col, symbol_font.encode(), symbol_number, symbol_size, symbol_angle, symbol_color, symbol_fill, db, line_vv, fid_vv, x_chan.encode(), y_chan.encode(), z_chan.encode(), grid, tic, grid_inc, overlay.encode()) @classmethod def zone_coloured_tri_plot(cls, mview, title, x1, y1, side, x_vv, y_vv, z_vv, m_vv, mask_col, zone_data_vv, zone_file, symbol_font, symbol_number, symbol_size, symbol_angle, symbol_color, symbol_fill, fix_edge_color, db, line_vv, fid_vv, x_chan, y_chan, z_chan, grid, tic, grid_inc, overlay): """ Plot a tri-plot using colors based on a zone file. Optional data masking with masking color. Optional database linking. :param mview: View :param title: Title :param x1: X location (bottom left corner of box) :param y1: Y location :param side: Triangle side length :param x_vv: X channel data :param y_vv: Y channel data :param z_vv: Z channel data :param m_vv: Mask channel data :param mask_col: Mask color; overrides symbol color where mask data is not dummy. Pass an empty string to `GXMVIEW.color <geosoft.gxapi.GXMVIEW.color>` for no plot. :param zone_data_vv: Zone channel data :param zone_file: Zone file name :param symbol_font: Decorated font name, "" for default symbol font (normally symbols.gfn) :param symbol_number: Symbol number (>=0) :param symbol_size: Symbol size ( >=0) :param symbol_angle: Symbol angle (-360 to 360) :param symbol_color: Symbol color :param symbol_fill: Symbol fill color :param fix_edge_color: Fix symbol edge color? :param db: Database (source of data) :param line_vv: Line handles for data :param fid_vv: Fid values for data :param x_chan: X channel name :param y_chan: Y channel name :param z_chan: Z channel name :param grid: Plot Grid lines? (0: Just outside edge tics, 1: Grid lines). :param tic: Tic Increment (in percent) :param grid_inc: Grid increment (in percent) :param overlay: Plot overlay ("" for none) :type mview: GXMVIEW :type title: str :type x1: float :type y1: float :type side: float :type x_vv: GXVV :type y_vv: GXVV :type z_vv: GXVV :type m_vv: GXVV :type mask_col: int :type zone_data_vv: GXVV :type zone_file: str :type symbol_font: str :type symbol_number: int :type symbol_size: float :type symbol_angle: float :type symbol_color: int :type symbol_fill: int :type fix_edge_color: int :type db: GXDB :type line_vv: GXVV :type fid_vv: GXVV :type x_chan: str :type y_chan: str :type z_chan: str :type grid: int :type tic: float :type grid_inc: float :type overlay: str .. versionadded:: 8.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Plot a tri plot using colors based on a zone file. """ gxapi_cy.WrapCHIMERA._zone_coloured_tri_plot(GXContext._get_tls_geo(), mview, title.encode(), x1, y1, side, x_vv, y_vv, z_vv, m_vv, mask_col, zone_data_vv, zone_file.encode(), symbol_font.encode(), symbol_number, symbol_size, symbol_angle, symbol_color, symbol_fill, fix_edge_color, db, line_vv, fid_vv, x_chan.encode(), y_chan.encode(), z_chan.encode(), grid, tic, grid_inc, overlay.encode()) @classmethod def string_classified_tri_plot(cls, mview, title, x1, y1, side, x_vv, y_vv, z_vv, m_vv, mask_col, class_vv, class_file, symbol_size_override, db, line_vv, fid_vv, x_chan, y_chan, z_chan, grid, tic, grid_inc, overlay): """ Plot a tri-plot using symbols based on a symbol class file. Optional data masking with masking color. Optional database linking. :param mview: View :param title: Title :param x1: X location (bottom left corner of box) :param y1: Y location :param side: Triangle side length :param x_vv: X channel data :param y_vv: Y channel data :param z_vv: Z channel data :param m_vv: Mask channel data :param mask_col: Mask color; overrides symbol color. Pass an empty string to `GXMVIEW.color <geosoft.gxapi.GXMVIEW.color>` for no plot. :param class_vv: Class channel data :param class_file: Class file (`GXTPAT <geosoft.gxapi.GXTPAT>`) name. :param symbol_size_override: Symbol size override. Set to 0.0 to use class file symbol sizes. :param db: Database (source of data) :param line_vv: Line handles for data :param fid_vv: Fid values for data :param x_chan: X channel name :param y_chan: Y channel name :param z_chan: Z channel name :param grid: Plot Grid lines? (0: Just outside edge tics, 1: Grid lines). :param tic: Tic Increment (in percent) :param grid_inc: Grid increment (in percent) :param overlay: Plot overlay ("" for none) :type mview: GXMVIEW :type title: str :type x1: float :type y1: float :type side: float :type x_vv: GXVV :type y_vv: GXVV :type z_vv: GXVV :type m_vv: GXVV :type mask_col: int :type class_vv: GXVV :type class_file: str :type symbol_size_override: float :type db: GXDB :type line_vv: GXVV :type fid_vv: GXVV :type x_chan: str :type y_chan: str :type z_chan: str :type grid: int :type tic: float :type grid_inc: float :type overlay: str .. versionadded:: 8.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** Plot a tri-plot using symbols based on a symbol class file. """ gxapi_cy.WrapCHIMERA._string_classified_tri_plot(GXContext._get_tls_geo(), mview, title.encode(), x1, y1, side, x_vv, y_vv, z_vv, m_vv, mask_col, class_vv, class_file.encode(), symbol_size_override, db, line_vv, fid_vv, x_chan.encode(), y_chan.encode(), z_chan.encode(), grid, tic, grid_inc, overlay.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXDATALINKD.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXDATALINKD(gxapi_cy.WrapDATALINKD): """ GXDATALINKD class. DATALINK Display object. """ def __init__(self, handle=0): super(GXDATALINKD, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXDATALINKD <geosoft.gxapi.GXDATALINKD>` :returns: A null `GXDATALINKD <geosoft.gxapi.GXDATALINKD>` :rtype: GXDATALINKD """ return GXDATALINKD() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create_arc_lyr(cls, arc_lyr_file): """ Create an `GXDATALINKD <geosoft.gxapi.GXDATALINKD>` object from a ArcGIS LYR file :param arc_lyr_file: Arc LYR file name :type arc_lyr_file: str :returns: `GXDATALINKD <geosoft.gxapi.GXDATALINKD>` handle, terminates if creation fails :rtype: GXDATALINKD .. versionadded:: 6.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Needs ArcEngine licence. """ ret_val = gxapi_cy.WrapDATALINKD._create_arc_lyr(GXContext._get_tls_geo(), arc_lyr_file.encode()) return GXDATALINKD(ret_val) @classmethod def create_arc_lyr_ex(cls, arc_lyr_file, o3d_group): """ Create an `GXDATALINKD <geosoft.gxapi.GXDATALINKD>` object from a ArcGIS LYR file :param arc_lyr_file: Arc LYR file name :param o3d_group: Display as 3D Group? (as opposed to bitmap on plane) :type arc_lyr_file: str :type o3d_group: int :returns: `GXDATALINKD <geosoft.gxapi.GXDATALINKD>` handle, terminates if creation fails :rtype: GXDATALINKD .. versionadded:: 9.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Needs ArcEngine licence. """ ret_val = gxapi_cy.WrapDATALINKD._create_arc_lyr_ex(GXContext._get_tls_geo(), arc_lyr_file.encode(), o3d_group) return GXDATALINKD(ret_val) @classmethod def create_arc_lyr_from_tmp(cls, arc_lyr_file): """ Create an `GXDATALINKD <geosoft.gxapi.GXDATALINKD>` object from a temporary ArcGIS LYR file :param arc_lyr_file: Arc LYR file name :type arc_lyr_file: str :returns: `GXDATALINKD <geosoft.gxapi.GXDATALINKD>` handle, terminates if creation fails :rtype: GXDATALINKD .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Needs ArcEngine licence. """ ret_val = gxapi_cy.WrapDATALINKD._create_arc_lyr_from_tmp(GXContext._get_tls_geo(), arc_lyr_file.encode()) return GXDATALINKD(ret_val) @classmethod def create_arc_lyr_from_tmp_ex(cls, arc_lyr_file, o3d_group): """ Create an `GXDATALINKD <geosoft.gxapi.GXDATALINKD>` object from a temporary ArcGIS LYR file :param arc_lyr_file: Arc LYR file name :param o3d_group: Display as 3D Group? (as opposed to bitmap on plane) :type arc_lyr_file: str :type o3d_group: int :returns: `GXDATALINKD <geosoft.gxapi.GXDATALINKD>` handle, terminates if creation fails :rtype: GXDATALINKD .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Needs ArcEngine licence. """ ret_val = gxapi_cy.WrapDATALINKD._create_arc_lyr_from_tmp_ex(GXContext._get_tls_geo(), arc_lyr_file.encode(), o3d_group) return GXDATALINKD(ret_val) @classmethod def create_bing(cls, layer): """ Create an `GXDATALINKD <geosoft.gxapi.GXDATALINKD>` object for a BING dataset :param layer: 0 = Aerial, 1 = Road :type layer: int :returns: `GXDATALINKD <geosoft.gxapi.GXDATALINKD>` handle, terminates if creation fails :rtype: GXDATALINKD .. versionadded:: 8.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapDATALINKD._create_bing(GXContext._get_tls_geo(), layer) return GXDATALINKD(ret_val) def get_extents(self, min_x, max_x, min_y, max_y): """ Get the data extents of the DATALINK Display object. :param min_x: Min X :param max_x: Max X :param min_y: Min Y :param max_y: Max Y :type min_x: float_ref :type max_x: float_ref :type min_y: float_ref :type max_y: float_ref .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ min_x.value, max_x.value, min_y.value, max_y.value = self._get_extents(min_x.value, max_x.value, min_y.value, max_y.value) def get_ipj(self, ipj): """ Get the projection of the DATALINK Display object. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` object to set the projection to :type ipj: GXIPJ .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_ipj(ipj) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXTRND.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXTRND(gxapi_cy.WrapTRND): """ GXTRND class. The `GXTRND <geosoft.gxapi.GXTRND>` methods are used to determine trend directions in database data by locating maxima and minima along lines and joining them in a specified direction. The resulting trend lines are appended to the database and used by gridding methods such as Bigrid and Rangrid to enforce features in the specified direction. """ def __init__(self, handle=0): super(GXTRND, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXTRND <geosoft.gxapi.GXTRND>` :returns: A null `GXTRND <geosoft.gxapi.GXTRND>` :rtype: GXTRND """ return GXTRND() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def get_max_min(cls, vv_x, vv_y, vv_z, vv_xm, v_vym, v_vzm, window, trnd): """ Find the max/min nodes in a line. :param vv_x: X Channel :param vv_y: Y Channel :param vv_z: Data Channel :param vv_xm: X MaxMin (returned) :param v_vym: Y MaxMin (returned) :param v_vzm: Data MaxMin (returned) :param window: MaxMin Window :param trnd: :ref:`TRND_NODE` :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_xm: GXVV :type v_vym: GXVV :type v_vzm: GXVV :type window: float :type trnd: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Trend lines positions consist of X and Y VVs """ gxapi_cy.WrapTRND._get_max_min(GXContext._get_tls_geo(), vv_x, vv_y, vv_z, vv_xm, v_vym, v_vzm, window, trnd) @classmethod def get_mesh(cls, db, chan, window, max_length, mesh_vv, trnd): """ Get the lines in a trend mesh. :param db: Database :param chan: Selected channel :param window: MaxMin Window :param max_length: Maximum join length :param mesh_vv: `GXVV <geosoft.gxapi.GXVV>` of type GS_D2POINT (returned) :param trnd: :ref:`TRND_NODE` :type db: GXDB :type chan: str :type window: float :type max_length: float :type mesh_vv: GXVV :type trnd: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapTRND._get_mesh(GXContext._get_tls_geo(), db, chan.encode(), window, max_length, mesh_vv, trnd) @classmethod def trnd_db(cls, db, chan, window, angle, deviation, max_length, deflection, min_length, resample, br_angle): """ Uses a selected channel to find data trends in a database. :param db: Database handle :param chan: Selected channel :param window: MaxMin Window :param angle: Preferred angle, degrees CCW from X :param deviation: Allowed deviation :param max_length: Longest join :param deflection: Maximum deflection in join (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param min_length: Minimum length for trend lines (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param resample: Resampling distance (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param br_angle: Breaking angle, degrees CCW from X (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :type db: GXDB :type chan: str :type window: float :type angle: float :type deviation: float :type max_length: float :type deflection: float :type min_length: float :type resample: float :type br_angle: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapTRND._trnd_db(GXContext._get_tls_geo(), db, chan.encode(), window, angle, deviation, max_length, deflection, min_length, resample, br_angle) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXIMG.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXPG import GXPG ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXIMG(gxapi_cy.WrapIMG): """ GXIMG class. The `GXIMG <geosoft.gxapi.GXIMG>` class performs read and write operations on grid file data. When efficient access along both rows and columns is desired the `GXPG <geosoft.gxapi.GXPG>` class is recommended (see `GXPG <geosoft.gxapi.GXPG>` and `GXPGU <geosoft.gxapi.GXPGU>`); the `GXIMG <geosoft.gxapi.GXIMG>` is first created, then the `GXPG <geosoft.gxapi.GXPG>` is obtained from the `GXIMG <geosoft.gxapi.GXIMG>` using `get_pg <geosoft.gxapi.GXIMG.get_pg>`. **Note:** The `GXIMG <geosoft.gxapi.GXIMG>` methods use the XGD DATs to access grid files in different formats. The characteristics of a grid can be controlled using decorations on a grid file name. For example: `create_new_file <geosoft.gxapi.GXIMG.create_new_file>`(`GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`,1,100,100,"mag.grd"); -> creates a new grid file "mag.grd" with all defaults. `create_new_file <geosoft.gxapi.GXIMG.create_new_file>`(`GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`,1,100,100,"mag.grd(GRD;comp=none)"); -> creates a new grid file "mag.grd" with no compression. `create_new_file <geosoft.gxapi.GXIMG.create_new_file>`(`GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`,1,100,100,"mag.grd(GRD;comp=size;type=short"); -> creates a new grid file "mag.grd" compressed for size, numbers stored as 2-byte integers.. See :ref:`DAT_XGD`.DOC for information about file name decorations available for all `GXDAT <geosoft.gxapi.GXDAT>` types. Different grid types support different features. For example, not all grid types support projection information. Geosoft will always create a ``*.gi`` file that is used to store all such information that we require from a grid. If the grid does support this information, both the grid and the ``*.gi`` file will contain the information. """ def __init__(self, handle=0): super(GXIMG, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXIMG <geosoft.gxapi.GXIMG>` :returns: A null `GXIMG <geosoft.gxapi.GXIMG>` :rtype: GXIMG """ return GXIMG() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def average2(cls, grid_in, grid_out): """ Reduce the dimensions in a 2D pager by a factor of 2 :param grid_in: Name of source Grid :param grid_out: Name of output Grid :type grid_in: str :type grid_out: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method is useful for reducing the dimensions in a 2D pager by a factor of 2. The output pager retains the same origin, but the X and Y spacing is double that of the original. Essentially, the process removes all the even-indexed rows and columns, while leaving the locations of all the remaining data points in the "odd" rows and columns unchanged. The output values at the output data locations are created by performing an average of the original data point and its valid surrounding data points; what is essentially a 3x3 smoothing filter. """ gxapi_cy.WrapIMG._average2(GXContext._get_tls_geo(), grid_in.encode(), grid_out.encode()) def copy(self, im_go): """ Copy IMGs. :param im_go: Target `GXIMG <geosoft.gxapi.GXIMG>` :type im_go: GXIMG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._copy(im_go) @classmethod def create(cls, type, kx, width, height): """ Creates an `GXIMG <geosoft.gxapi.GXIMG>` not tied to a file at all :param type: Data type :ref:`GS_TYPES` :param kx: Grid orientation (KX): 1 (rows in X) -1 (rows in Y) :param width: Grid width :param height: Grid height :type type: int :type kx: int :type width: int :type height: int :returns: `GXIMG <geosoft.gxapi.GXIMG>` object :rtype: GXIMG .. versionadded:: 5.0.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Once destroyed all the data in this `GXIMG <geosoft.gxapi.GXIMG>` is lost. """ ret_val = gxapi_cy.WrapIMG._create(GXContext._get_tls_geo(), type, kx, width, height) return GXIMG(ret_val) @classmethod def create_file(cls, type, grid, mode): """ Creates an Image object tied to a grid file. :param type: Data type, :ref:`GS_TYPES` or `GS_TYPE_DEFAULT <geosoft.gxapi.GS_TYPE_DEFAULT>` to use native `GXDAT <geosoft.gxapi.GXDAT>` type. :param grid: Name of the Grid to link to :param mode: Grid file open mode :ref:`IMG_FILE` :type type: int :type grid: str :type mode: int :returns: `GXIMG <geosoft.gxapi.GXIMG>` object :rtype: GXIMG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** When the `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` data type is chosen the actual on-disk type of the input image will be used instead of `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` if the on-disk values represent color data as opposed to real numbers. """ ret_val = gxapi_cy.WrapIMG._create_file(GXContext._get_tls_geo(), type, grid.encode(), mode) return GXIMG(ret_val) @classmethod def create_mem(cls, type, kx, width, height): """ Creates an `GXIMG <geosoft.gxapi.GXIMG>` object that is backed only by memory. :param type: Data type, :ref:`GS_TYPES` :param kx: Grid orientation (KX): 1 (rows in X) -1 (rows in Y) :param width: Grid width :param height: Grid height :type type: int :type kx: int :type width: int :type height: int :returns: `GXIMG <geosoft.gxapi.GXIMG>` object :rtype: GXIMG .. versionadded:: 5.0.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Once destroyed all the data is lost. This is temporary. """ ret_val = gxapi_cy.WrapIMG._create_mem(GXContext._get_tls_geo(), type, kx, width, height) return GXIMG(ret_val) @classmethod def create_new_file(cls, type, kx, width, height, grid): """ Creates a new image file :param type: Data type, :ref:`GS_TYPES` Cannot be `GS_TYPE_DEFAULT <geosoft.gxapi.GS_TYPE_DEFAULT>` :param kx: Grid orientation (KX): 1 (rows in X) -1 (rows in Y) :param width: Grid width :param height: Grid height :param grid: Name of the Grid to link to :type type: int :type kx: int :type width: int :type height: int :type grid: str :returns: `GXIMG <geosoft.gxapi.GXIMG>` object :rtype: GXIMG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapIMG._create_new_file(GXContext._get_tls_geo(), type, kx, width, height, grid.encode()) return GXIMG(ret_val) @classmethod def create_out_file(cls, type, grid, img): """ Creates an output image file using input image info. :param type: Data type, :ref:`GS_TYPES` or `GS_TYPE_DEFAULT <geosoft.gxapi.GS_TYPE_DEFAULT>` :param grid: Name of the Grid to link to :param img: Input Image for new image creation :type type: int :type grid: str :type img: GXIMG :returns: `GXIMG <geosoft.gxapi.GXIMG>` object :rtype: GXIMG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** When the `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` data type is chosen the actual on-disk type of the input image will be used instead of `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` if the on-disk values represent color data as opposed to real numbers. """ ret_val = gxapi_cy.WrapIMG._create_out_file(GXContext._get_tls_geo(), type, grid.encode(), img) return GXIMG(ret_val) def create_projected(self, ipj): """ Applies a projection to an image. :param ipj: Projection to apply :type ipj: GXIPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The `GXIMG <geosoft.gxapi.GXIMG>` now appears to be in the projected coordinate system space. """ self._create_projected(ipj) def create_projected2(self, ipj, cell_size): """ Applies a projection to an image, specify cell size. :param ipj: Projection to apply :param cell_size: Cell size :type ipj: GXIPJ :type cell_size: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The `GXIMG <geosoft.gxapi.GXIMG>` now appears to be in the projected coordinate system space, with the specified cell size. If the cell size is `rDUMMY <geosoft.gxapi.rDUMMY>` (`GS_R8DM <geosoft.gxapi.GS_R8DM>`), one is automatically calculated, as with `create_projected <geosoft.gxapi.GXIMG.create_projected>`. """ self._create_projected2(ipj, cell_size) def create_projected3(self, ipj, cell_size, exp_pct): """ Same as `create_projected2 <geosoft.gxapi.GXIMG.create_projected2>`, but set expansion of bounds. :param ipj: Projection to apply :param cell_size: Cell size :param exp_pct: Expansion percent (>=0). :type ipj: GXIPJ :type cell_size: float :type exp_pct: float .. versionadded:: 6.3.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The `GXIMG <geosoft.gxapi.GXIMG>` now appears to be in the projected coordinate system space, with the specified cell size. If the cell size is `rDUMMY <geosoft.gxapi.rDUMMY>` (`GS_R8DM <geosoft.gxapi.GS_R8DM>`), one is automatically calculated, as with `create_projected <geosoft.gxapi.GXIMG.create_projected>`. The expansion percent expands the bounds of the projected grid in order to allow for the curving of bounding edges. Normally, edges are sampled in order to allow for curving, but this parameter is set to 1.0 (for 1 percent) in the `create_projected <geosoft.gxapi.GXIMG.create_projected>` and `create_projected2 <geosoft.gxapi.GXIMG.create_projected2>` wrappers, and will generally create a white/dummy border around the new grid. This new method allows you to specify the expansion, or turn it off (by setting it to 0). If the value is set to `rDUMMY <geosoft.gxapi.rDUMMY>`, then expansion is left at 1.0, the legacy behaviour. """ self._create_projected3(ipj, cell_size, exp_pct) def extent(self, min_x, min_y, max_x, max_y): """ Get the img extents :param min_x: X minimum :param min_y: Y minimum :param max_x: X maximum :param max_y: Y maximum :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref .. versionadded:: 9.7 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ min_x.value, min_y.value, max_x.value, max_y.value = self._extent(min_x.value, min_y.value, max_x.value, max_y.value) def geth_pg(self): """ Get the actual pager of a grid. :returns: `GXPG <geosoft.gxapi.GXPG>` Object :rtype: GXPG .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `get_pg <geosoft.gxapi.GXIMG.get_pg>` to get just a copy of the grid's pager. """ ret_val = self._geth_pg() return GXPG(ret_val) def get_info(self, dx, dy, xo, yo, rot): """ Retrieves location information about this image. :param dx: X element separation :param dy: Y element separation :param xo: X location of first point :param yo: Y location of first point :param rot: Grid X axis rotation deg. CCW from reference X :type dx: float_ref :type dy: float_ref :type xo: float_ref :type yo: float_ref :type rot: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ dx.value, dy.value, xo.value, yo.value, rot.value = self._get_info(dx.value, dy.value, xo.value, yo.value, rot.value) def get_ipj(self, ipj): """ Get the projection of a grid. :param ipj: Projection of the grid :type ipj: GXIPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_ipj(ipj) def get_meta(self, meta): """ Get the metadata of a grid. :param meta: Metadata of the grid :type meta: GXMETA .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_meta(meta) def get_pg(self, pg): """ Get a copy of the pager of a grid. :param pg: `GXPG <geosoft.gxapi.GXPG>` object to hold pager of the grid :type pg: GXPG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `geth_pg <geosoft.gxapi.GXIMG.geth_pg>` to get the actual pager of the grid. """ self._get_pg(pg) def get_projected_cell_size(self, ipj, cell): """ Returns default cell size from projected image. :param ipj: Projection to apply :param cell: Returned cell size :type ipj: GXIPJ :type cell: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Returns the cell size calculated by CreateProjected_PJIMG, or by `create_projected2 <geosoft.gxapi.GXIMG.create_projected2>` when `GS_R8DM <geosoft.gxapi.GS_R8DM>` is entered as the optional cell size. No inheritance is actually performed to the input `GXIMG <geosoft.gxapi.GXIMG>`. """ cell.value = self._get_projected_cell_size(ipj, cell.value) def get_tr(self, tr): """ Get the trend information from a grid. :param tr: Trend information from the grid :type tr: GXTR .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_tr(tr) def element_type(self, xg_dor_img): """ Returns the element type. :param xg_dor_img: 0 for XGD, 1 for `GXIMG <geosoft.gxapi.GXIMG>` :type xg_dor_img: int :returns: Element type :rtype: int .. versionadded:: 5.0.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._element_type(xg_dor_img) return ret_val def e_type(self): """ Returns the element type. :returns: Element type :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Same as sElementType_IMG(img,1) """ ret_val = self._e_type() return ret_val def get_def_itr(self, itr): """ Get default transform, if it exists :param itr: Transform :type itr: GXITR :returns: 0 - Okay 1 - No default possible/available :rtype: int .. versionadded:: 5.0.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_def_itr(itr) return ret_val def is_colour(self): """ Is this a Geosoft color grid? :rtype: bool .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_colour() return ret_val @classmethod def is_valid_img_file(cls, file): """ Is this a valid `GXIMG <geosoft.gxapi.GXIMG>` file? :param file: File to check :type file: str :rtype: bool .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapIMG._is_valid_img_file(GXContext._get_tls_geo(), file.encode()) return ret_val @classmethod def is_valid_img_file_ex(cls, file, err_msg): """ Is this a valid `GXIMG <geosoft.gxapi.GXIMG>` file? Returns error message if it cannot be opened for any reason. :param file: File to check :param err_msg: Error message registered if unable to open :type file: str :type err_msg: str_ref :rtype: bool .. versionadded:: 8.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, err_msg.value = gxapi_cy.WrapIMG._is_valid_img_file_ex(GXContext._get_tls_geo(), file.encode(), err_msg.value.encode()) return ret_val def ne(self): """ Gets the # of elements in the optimal KX direction. :returns: # of elements in the optimal KX direction :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._ne() return ret_val def inherit(self, ipj, cell): """ Inherit a projection/new cell size on the `GXIMG <geosoft.gxapi.GXIMG>`. :param ipj: Projection :param cell: Optional cell size :type ipj: GXIPJ :type cell: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If cell size is `GS_R8DM <geosoft.gxapi.GS_R8DM>`, then "nice" values for the cell size of the new projected grid will be determined so that the new grid has about the same number of cells as the old. If the cell size is specified, the inheritance will always work, even if the input `GXIPJ <geosoft.gxapi.GXIPJ>` is identical to the original `GXIPJ <geosoft.gxapi.GXIPJ>`, and the cell boundaries will be forced to be aligned with the new cell size. """ self._inherit(ipj, cell) def inherit_img(self, im_gs): """ Make a grids match in size and coordinate system :param im_gs: Source `GXIMG <geosoft.gxapi.GXIMG>` :type im_gs: GXIMG .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._inherit_img(im_gs) def nv(self): """ Gets the # of vectors in the optimal KX direction. :returns: # of vectors in the optimal KX direction :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._nv() return ret_val def nx(self): """ Gets the # of X elements. :returns: # of X elements. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._nx() return ret_val def ny(self): """ Gets the # of Y elements. :returns: # of Y elements. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._ny() return ret_val def query_int(self, query): """ Query information about the `GXIMG <geosoft.gxapi.GXIMG>` :param query: :ref:`IMG_QUERY` :type query: int :returns: Information requested, dummy if unknown or invalid. :rtype: int .. versionadded:: 5.0.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** You can call either funtion to retrieve any data, int or real. """ ret_val = self._query_int(query) return ret_val def query_kx(self): """ Asks the `GXIMG <geosoft.gxapi.GXIMG>` for the most efficient way to access the data. :returns: -1 by columns, 1 by rows, 0 rows and columns are equally efficient. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._query_kx() return ret_val def set_def_itr(self, itr): """ Set default transform :param itr: Transform :type itr: GXITR :returns: 0 - Okay 1 - No default possible/available :rtype: int .. versionadded:: 5.0.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._set_def_itr(itr) return ret_val @classmethod def user_preference_to_plot_as_colour_shaded_grid(cls): """ Returns the global setting. :returns: 0 - User wishes to plot grids as regular (flat) grid 1 - User wishes to plot grids as color-shaded grids :rtype: int .. versionadded:: 7.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapIMG._user_preference_to_plot_as_colour_shaded_grid(GXContext._get_tls_geo()) return ret_val def load_img(self, im_gi): """ Loads an `GXIMG <geosoft.gxapi.GXIMG>` into a master `GXIMG <geosoft.gxapi.GXIMG>`. :param im_gi: `GXIMG <geosoft.gxapi.GXIMG>` to load :type im_gi: GXIMG .. versionadded:: 5.0.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The cell sizes and projections must be the same. """ self._load_img(im_gi) def load_into_pager(self): """ Load `GXIMG <geosoft.gxapi.GXIMG>` data from file into a pager to increase access time. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._load_into_pager() def opt_kx(self, kx): """ Force optimal KX as desired. :param kx: KX -1 by column 1 by row :type kx: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This will force loading an image into a `GXPG <geosoft.gxapi.GXPG>` if it is not already accessible in the direction requested. Subsequent calls to methods that use the optimal KX will use the KX set here. """ self._opt_kx(kx) def read_v(self, v, be, ne, vv): """ Read a vector in the optimal KX direction. :param v: Vector to Read :param be: Begining element # to read (0 is the first) :param ne: # elements to read (0 for whole vector) :param vv: `GXVV <geosoft.gxapi.GXVV>` handle :type v: int :type be: int :type ne: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._read_v(v, be, ne, vv) def read_x(self, bx, by, ny, vv): """ Read a column (constant X) :param bx: X column :param by: Start Y to read :param ny: # Y to read (0 for whole vector) :type bx: int :type by: int :type ny: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._read_x(bx, by, ny, vv) def read_y(self, by, bx, nx, vv): """ Read a row (constant Y) :param by: Y row :param bx: Start X to read :param nx: # X to read (0 for whole vector) :type by: int :type bx: int :type nx: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._read_y(by, bx, nx, vv) @classmethod def refresh_gi(cls, grid): """ Refresh the GI of a grid after it has moved or changed. :param grid: Grid name :type grid: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapIMG._refresh_gi(GXContext._get_tls_geo(), grid.encode()) def relocate(self, min_x, min_y, max_x, max_y, asp): """ Re-locate a grid image. :param min_x: Area X minimum :param min_y: Area Y minimum :param max_x: Area X maximum :param max_y: Area Y maximum :param asp: :ref:`IMG_RELOCATE` :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type asp: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._relocate(min_x, min_y, max_x, max_y, asp) @classmethod def report(cls, grid, wa, force, decimals, title): """ Writes grid info report to a file :param grid: Grid name :param wa: Text file to write to :param force: Recalc statistics (0 - no; 1 - yes) :param decimals: Number of decimals to put in results :param title: Title for report :type grid: str :type wa: GXWA :type force: int :type decimals: int :type title: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapIMG._report(GXContext._get_tls_geo(), grid.encode(), wa, force, decimals, title.encode()) @classmethod def report_csv(cls, grid, wa, force, decimals, header): """ Writes grid info as a line to a CSV file :param grid: Grid name :param wa: Text file to write to :param force: Recalc statistics (0 - no; 1 - yes) :param decimals: Number of decimals to put in results :param header: Write header line (0 - no; 1 - yes)? :type grid: str :type wa: GXWA :type force: int :type decimals: int :type header: int .. versionadded:: 6.4.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Appends the stats as a CSV line to the input text file. The header line should only be written to a new text file. """ gxapi_cy.WrapIMG._report_csv(GXContext._get_tls_geo(), grid.encode(), wa, force, decimals, header) def get_display_property(self, query): """ Gets display information about this image. :param query: :ref:`IMG_DISPLAY_PROPERTY` :type query: int :rtype: float .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** These values do not necessarily match the grid as currently displayed; they are retrieved and used whenever the grid is displayed in a viewer or map, and are used (for instance) to set the default values in the Display Grid GX. Some values return only values of 0 or 1. """ ret_val = self._get_display_property(query) return ret_val def get_z(self, x, y): """ Gets the grid value at a point :param x: X location in the grid projection :param y: Y location in the grid projection :type x: float :type y: float :returns: Grid value :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_z(x, y) return ret_val def query_double(self, query): """ Query information about the `GXIMG <geosoft.gxapi.GXIMG>` :param query: :ref:`IMG_QUERY` :type query: int :returns: Information requested, dummy if unknown or invalid. :rtype: float .. versionadded:: 5.0.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** You can call either funtion to retrieve any data, int or real. """ ret_val = self._query_double(query) return ret_val def set_display_property(self, query, value): """ Sets display information about this image. :param query: :ref:`IMG_DISPLAY_PROPERTY` :param value: Value :type query: int :type value: float .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** These values do NOT take immediate effect; they are retrieved and used whenever the grid is displayed in a viewer or map, and are used (for instance) to set the default values in the Display Grid GX. Some values take on only values of 0 or 1. """ self._set_display_property(query, value) def get_shadow_grid_path(self, path): """ Gets the name of a view. :param path: Shadow grid path name returned :type path: str_ref .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ path.value = self._get_shadow_grid_path(path.value.encode()) def set_shadow_grid_path(self, path): """ Sets display information about this image. :param path: Grid path for the shadow grid :type path: str .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This values does NOT take immediate effect; it is retrieved and used whenever the grid is displayed in a viewer or map, and is used (for instance) to set the default value in the Display Grid GX. """ self._set_shadow_grid_path(path.encode()) def set_grid_unchanged(self): """ Mark the grid as unchanged so it will not output lineage .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_grid_unchanged() def set_info(self, dx, dy, xo, yo, rot): """ Sets location information about this image. :param dx: X element separation :param dy: Y element separation :param xo: X location of first point :param yo: Y location of first point :param rot: Grid X axis rotation deg. CCW from reference X :type dx: float :type dy: float :type xo: float :type yo: float :type rot: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Calls to this function should be made BEFORE calls to `set_ipj <geosoft.gxapi.GXIMG.set_ipj>`, as the latter function sets up the bounding rectangle in the metadata. """ self._set_info(dx, dy, xo, yo, rot) def set_ipj(self, ipj): """ Set the projection of a grid. :param ipj: Projection :type ipj: GXIPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Calls to this function should be made AFTER calls to `set_info <geosoft.gxapi.GXIMG.set_info>`, as `set_ipj <geosoft.gxapi.GXIMG.set_ipj>` sets up the bounding rectangle in the metadata. """ self._set_ipj(ipj) def set_meta(self, meta): """ Set the metadata of a grid. :param meta: Metadata to add to the grid :type meta: GXMETA .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_meta(meta) def set_pg(self, pg): """ Copy a pager into the pager of a grid. :param pg: Pager object to copy into the pager of the grid :type pg: GXPG .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_pg(pg) def set_tr(self, tr): """ Set the trend information to a grid. :param tr: Trend information to set for the grid :type tr: GXTR .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_tr(tr) @classmethod def sync(cls, grid): """ Syncronize the Metadata for this Grid :param grid: Grid name :type grid: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapIMG._sync(GXContext._get_tls_geo(), grid.encode()) def write_v(self, v, be, ne, vv): """ Write a vector in the optimal KX direction. :param v: Vector to write :param be: Begining element to write (0 is the first) :param ne: # elements to write (0 for whole vector) :param vv: `GXVV <geosoft.gxapi.GXVV>` handle :type v: int :type be: int :type ne: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._write_v(v, be, ne, vv) def write_x(self, bx, by, ny, vv): """ Write a column (constant X) :param bx: X column :param by: Start Y to write :param ny: # Y to write (0 for whole vector) :type bx: int :type by: int :type ny: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._write_x(bx, by, ny, vv) def write_y(self, by, bx, nx, vv): """ Write a row (constant Y) :param by: Y row :param bx: Start X to write :param nx: # X write (0 for whole vector) :type by: int :type bx: int :type nx: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._write_y(by, bx, nx, vv) def set_double_parameter(self, name, value): """ Store a real parameter in an `GXIMG <geosoft.gxapi.GXIMG>` object :param name: Parameter name (case insensitive) :param value: Parameter value to store :type name: str :type value: float .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_double_parameter(name.encode(), value) def get_double_parameter(self, name): """ Store a real parameter in an `GXIMG <geosoft.gxapi.GXIMG>` object :param name: Parameter name (case insensitive) :type name: str :returns: Parameter value, `rDUMMY <geosoft.gxapi.rDUMMY>` if not found. :rtype: float .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_double_parameter(name.encode()) return ret_val # Faults def number_of_faults(self): """ Returns the number of individual fault traces stored in the IMG :rtype: int .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Faults are paths of (X, Y) locations, and are either polylines or polygons. """ ret_val = self._number_of_faults() return ret_val def get_fault(self, index, xVV, yVV, type): """ Retrieve a fault trace :param index: Fault index (0 to N-1) :param xVV: X locations `GXVV <geosoft.gxapi.GXVV>` (REAL) :param yVV: Y locations `GXVV <geosoft.gxapi.GXVV>` (REAL) :param type: Fault type :ref:`IMG_FAULT` :type index: int :type xVV: GXVV :type yVV: GXVV :type type: int_ref .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ type.value = self._get_fault(index, xVV, yVV, type.value) def add_fault(self, xVV, yVV, type): """ Add a fault trace :param xVV: X locations `GXVV <geosoft.gxapi.GXVV>` :param yVV: Y locations `GXVV <geosoft.gxapi.GXVV>` :param type: Fault type :ref:`IMG_FAULT` :type xVV: GXVV :type yVV: GXVV :type type: int :returns: index of the added fault :rtype: int .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The X and Y values cannot be dummies. Polygons require at least three locations, Polylines require at least two. """ ret_val = self._add_fault(xVV, yVV, type) return ret_val def delete_fault(self, index): """ Delete a fault trace :param index: Fault index (0 to N-1) :type index: int .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._delete_fault(index) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXMESHUTIL.rst .. _GXMESHUTIL: GXMESHUTIL class ================================== .. autoclass:: geosoft.gxapi.GXMESHUTIL :members: .. _SURFACE_CLIP_MODE: SURFACE_CLIP_MODE constants ----------------------------------------------------------------------- Surface Clip Mode .. autodata:: geosoft.gxapi.SURFACE_CLIP_ABOVE :annotation: .. autoattribute:: geosoft.gxapi.SURFACE_CLIP_ABOVE .. autodata:: geosoft.gxapi.SURFACE_CLIP_BELOW :annotation: .. autoattribute:: geosoft.gxapi.SURFACE_CLIP_BELOW .. autodata:: geosoft.gxapi.SURFACE_CLIP_BOTH :annotation: .. autoattribute:: geosoft.gxapi.SURFACE_CLIP_BOTH .. _SURFACE_CLIP_STATUS: SURFACE_CLIP_STATUS constants ----------------------------------------------------------------------- Surface Clip Return Code .. autodata:: geosoft.gxapi.SURFACE_CLIP_SUCCESS :annotation: .. autoattribute:: geosoft.gxapi.SURFACE_CLIP_SUCCESS .. autodata:: geosoft.gxapi.SURFACE_CLIP_SUCCESS_EMPTY :annotation: .. autoattribute:: geosoft.gxapi.SURFACE_CLIP_SUCCESS_EMPTY .. autodata:: geosoft.gxapi.SURFACE_CLIP_FAIL :annotation: .. autoattribute:: geosoft.gxapi.SURFACE_CLIP_FAIL .. _SURFACE_PROJECTION_METHOD: SURFACE_PROJECTION_METHOD constants ----------------------------------------------------------------------- Surface projection method .. autodata:: geosoft.gxapi.SURFACE_PROJECTION_MAXIMUM :annotation: .. autoattribute:: geosoft.gxapi.SURFACE_PROJECTION_MAXIMUM .. autodata:: geosoft.gxapi.SURFACE_PROJECTION_MINIMUM :annotation: .. autoattribute:: geosoft.gxapi.SURFACE_PROJECTION_MINIMUM .. autodata:: geosoft.gxapi.SURFACE_PROJECTION_AVERAGE :annotation: .. autoattribute:: geosoft.gxapi.SURFACE_PROJECTION_AVERAGE .. _SURFACE_TRANSFORMATION_METHOD: SURFACE_TRANSFORMATION_METHOD constants ----------------------------------------------------------------------- Surface transformation method .. autodata:: geosoft.gxapi.SURFACE_TRANSFORMATION_METHOD_SHIFT :annotation: .. autoattribute:: geosoft.gxapi.SURFACE_TRANSFORMATION_METHOD_SHIFT .. autodata:: geosoft.gxapi.SURFACE_TRANSFORMATION_METHOD_SCALE :annotation: .. autoattribute:: geosoft.gxapi.SURFACE_TRANSFORMATION_METHOD_SCALE <file_sep>/geosoft/gxdesktop/hello_world.py import geosoft.gxapi as gxapi import geosoft.gxpy as gxpy # a python script must have a rungx(), which is executed by OM when the script is run def rungx(): # get the current gx context gxp = gxpy.gx.gx() # say hello to the user identified by gxp.gid. gxapi.GXSYS.display_message("GX Python", "Hello {}".format(gxp.gid)) if __name__ == "__main__": gxc = gxpy.gx.GXpy() print('Hello {}'.format(gxc.gid))<file_sep>/geosoft/gxapi/GXPROJ.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXPROJ(gxapi_cy.WrapPROJ): """ GXPROJ class. Project functions """ def __init__(self, handle=0): super(GXPROJ, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXPROJ <geosoft.gxapi.GXPROJ>` :returns: A null `GXPROJ <geosoft.gxapi.GXPROJ>` :rtype: GXPROJ """ return GXPROJ() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Drag-and-drop methods @classmethod def drop_map_clip_data(cls, hglobal): """ Drop Map clipboard data in the current project (workspace background) :param hglobal: Handle to Global Clipboard data :type hglobal: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapPROJ._drop_map_clip_data(GXContext._get_tls_geo(), hglobal) # Miscellaneous @classmethod def add_document(cls, name, type, display): """ Adds (and opens) a document file in the current project. :param name: Document name :param type: Type of document to add :param display: :ref:`PROJ_DISPLAY` :type name: str :type type: str :type display: int :returns: 0 - Ok 1 - Error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The passed file name must be a valid file name complete with an extension and qualifiers (if applicable). The type string can be one of the following: Database Grid Map 3DView Geosurface Voxel VoxelInversion GMS3D GMS2D """ ret_val = gxapi_cy.WrapPROJ._add_document(GXContext._get_tls_geo(), name.encode(), type.encode(), display) return ret_val @classmethod def add_document_without_opening(cls, name, type): """ Adds (and opens) a document file in the current project. :param name: Document name :param type: Type of document to add :type name: str :type type: str :returns: 0 - Ok 1 - Error :rtype: int .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The passed file name must be a valid file name complete with an extension and qualifiers (if applicable). The type string can be one of the following: Database Grid Map 3DView Geosurface Voxel VoxelInversion GMS3D GMS2D """ ret_val = gxapi_cy.WrapPROJ._add_document_without_opening(GXContext._get_tls_geo(), name.encode(), type.encode()) return ret_val @classmethod def add_document_include_meta(cls, name, type, meta, display): """ Adds (and opens) a document file in the current project. :param name: Document name :param type: Type of document to add :param meta: Meta file to load :param display: :ref:`PROJ_DISPLAY` :type name: str :type type: str :type meta: str :type display: int :returns: 0 - Ok 1 - Error :rtype: int .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The passed file name must be a valid file name complete with an extension and qualifiers (if applicable). The type string can be one of the following: Database Grid Map 3DView Geosurface Voxel VoxelInversion GMS3D GMS2D """ ret_val = gxapi_cy.WrapPROJ._add_document_include_meta(GXContext._get_tls_geo(), name.encode(), type.encode(), meta.encode(), display) return ret_val @classmethod def add_grid_document(cls, name, colors, method, display): """ Adds (and opens) a grid document file in the current project with a particular colour distribution and colour file. :param name: Document name :param colors: Colour zone file to use :param method: Colour method to use - one of the ITR_ZONE_XXXX values :param display: :ref:`PROJ_DISPLAY` :type name: str :type colors: str :type method: int :type display: int :returns: 0 - Ok 1 - Error :rtype: int .. versionadded:: 9.7 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The passed file name must be a valid Grid document with an extension and qualifiers (if applicable). """ ret_val = gxapi_cy.WrapPROJ._add_grid_document(GXContext._get_tls_geo(), name.encode(), colors.encode(), method, display) return ret_val @classmethod def get_command_environment(cls): """ The current command environment :returns: :ref:`COMMAND_ENV` Notes We are moving towards embedded tools and menus and this setting can be queried from the project to determine how specific commands should react. Only 3D viewer is currently making use of this. If new Command environment enum values are added, then update the iGetCommandEnvironment_PROJ() function in geogxgui\\gxx_app.cpp and in the COMMAND_ENVIRONMENT enums in geoengine.core\\include\\obj\\objgp.h. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapPROJ._get_command_environment(GXContext._get_tls_geo()) return ret_val @classmethod def list_documents(cls, gvv, type): """ Fills a `GXVV <geosoft.gxapi.GXVV>` with documents of a certain type. :param gvv: `GXVV <geosoft.gxapi.GXVV>` of type -`STR_FILE <geosoft.gxapi.STR_FILE>` :param type: Type of document to obtain :type gvv: GXVV :type type: str :returns: The number of documents listed in the `GXVV <geosoft.gxapi.GXVV>`. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The type string can be one of the following: Database List Databases. Grid List Grids. Map List Maps. 3DView List 3D Views. Geosurface List Geosurfaces. Voxel List Voxels. VoxelInversion List VOXI Documents. `GXMXD <geosoft.gxapi.GXMXD>` List ArcGIS MXDs. GMS3D List GM-SYS 3D Models. GMS2D List GM-SYS 2D Models. All Lists all files. """ ret_val = gxapi_cy.WrapPROJ._list_documents(GXContext._get_tls_geo(), gvv, type.encode()) return ret_val @classmethod def list_loaded_documents(cls, gvv, type): """ Fills a `GXVV <geosoft.gxapi.GXVV>` with loaded documents of a certain type. :param gvv: `GXVV <geosoft.gxapi.GXVV>` of type -`STR_FILE <geosoft.gxapi.STR_FILE>`> :param type: Type of document to obtain :type gvv: GXVV :type type: str :returns: The number of loaded documents listed in the `GXVV <geosoft.gxapi.GXVV>`. :rtype: int .. versionadded:: 9.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The type string can be one of the following: Database List Databases. Grid List Grids. Map List Maps. 3DView List 3D Views. Geosurface List Geosurfaces. Voxel List Voxels. VoxelInversion List VOXI Documents. `GXMXD <geosoft.gxapi.GXMXD>` List ArcGIS MXDs. GMS3D List GM-SYS 3D Models. GMS2D List GM-SYS 2D Models. All Lists all files. """ ret_val = gxapi_cy.WrapPROJ._list_loaded_documents(GXContext._get_tls_geo(), gvv, type.encode()) return ret_val @classmethod def current_document(cls, name, type): """ Get the name and type of the loaded document with focus. :param name: Name (empty if none currently loaded) :param type: Type :type name: str_ref :type type: str_ref .. versionadded:: 9.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ name.value, type.value = gxapi_cy.WrapPROJ._current_document(GXContext._get_tls_geo(), name.value.encode(), type.value.encode()) @classmethod def current_document_of_type(cls, name, type): """ Get the name of a loaded document of a specific type. :param name: Name (empty if none currently loaded) :param type: Type :type name: str_ref :type type: str .. versionadded:: 9.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ name.value = gxapi_cy.WrapPROJ._current_document_of_type(GXContext._get_tls_geo(), name.value.encode(), type.encode()) @classmethod def list_tools(cls, lst, type): """ Fills an `GXLST <geosoft.gxapi.GXLST>` object with tools of a certain type and notes the current visibility setting. :param lst: `GXLST <geosoft.gxapi.GXLST>` object to hold list :param type: :ref:`TOOL_TYPE` :type lst: GXLST :type type: int :returns: The number of tools found. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** GX will terminate if there is an error. `GXLST <geosoft.gxapi.GXLST>` object will hold the tool name in the name column and include whether the tool is currently visible in the value column (1=visible, 0-hidden). """ ret_val = gxapi_cy.WrapPROJ._list_tools(GXContext._get_tls_geo(), lst, type) return ret_val @classmethod def remove_document(cls, name): """ Removes (and closes if visible) a document from the current project. :param name: Document name :type name: str :returns: 0 - Ok 1 - Document not found in project :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The passed file name must be a valid file name complete with an extension and qualifiers (if applicable). """ ret_val = gxapi_cy.WrapPROJ._remove_document(GXContext._get_tls_geo(), name.encode()) return ret_val @classmethod def remove_tool(cls, name): """ Removes (and closes if visible) a auxiliary tool from the current project. :param name: Tool name :type name: str :returns: 0 - Ok 1 - Tool not found in project :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Nothing """ ret_val = gxapi_cy.WrapPROJ._remove_tool(GXContext._get_tls_geo(), name.encode()) return ret_val @classmethod def save_close_documents(cls, type): """ Saves and closes (if visible) documents contained in the current project. :param type: Type of document to save / close :type type: str :returns: 0 - Ok -1 - User hit cancel in save dialog 1 - Error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This wrapper brings up the save dialog tool to allow the user to save the modified documents for this project. Only documents that have actually changed will be listed. The type string can be one of the following: Database Grid Map 3DView Geosurface Voxel VoxelInversion GMS3D GMS2D All """ ret_val = gxapi_cy.WrapPROJ._save_close_documents(GXContext._get_tls_geo(), type.encode()) return ret_val @classmethod def get_name(cls, name): """ Return the name of the project file. :param name: name :type name: str_ref .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Return the name of the project file. """ name.value = gxapi_cy.WrapPROJ._get_name(GXContext._get_tls_geo(), name.value.encode()) @classmethod def get_server_and_project_guid(cls, server_id, project_id): """ Return the unique identifier of the project and server. :param server_id: Server ID :param project_id: Project ID :type server_id: str_ref :type project_id: str_ref .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Return the unique identifier of the project and server. """ server_id.value, project_id.value = gxapi_cy.WrapPROJ._get_server_and_project_guid(GXContext._get_tls_geo(), server_id.value.encode(), project_id.value.encode()) @classmethod def set_central_project_information(cls, server_guid, project_guid, branch_id, revision_id, cs_info): """ Set Central project information. :param server_guid: server guid :param project_guid: project guid :param branch_id: branch ID :param revision_id: revision ID :param cs_info: Coordinate system information, either EPSG or WKT :type server_guid: str :type project_guid: str :type branch_id: int :type revision_id: int :type cs_info: str .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Set Central project information. """ gxapi_cy.WrapPROJ._set_central_project_information(GXContext._get_tls_geo(), server_guid.encode(), project_guid.encode(), branch_id, revision_id, cs_info.encode()) @classmethod def get_central_project_information(cls, instance, project, crs, branch, rev_id, rev_date, rev_note, rev_stage, rev_author, rev_server_url, rev_proj_url): """ Get Central project information. :param instance: Instance name :param project: Project name :param crs: Coordinate Reference System :param branch: Branch name :param rev_id: Revision Id :param rev_date: Revision date :param rev_note: Revision note :param rev_stage: Revision stage :param rev_author: Revision author :param rev_server_url: Revision server url :param rev_proj_url: Revision project url :type instance: str_ref :type project: str_ref :type crs: str_ref :type branch: str_ref :type rev_id: str_ref :type rev_date: str_ref :type rev_note: str_ref :type rev_stage: str_ref :type rev_author: str_ref :type rev_server_url: str_ref :type rev_proj_url: str_ref .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Get Central project information. """ instance.value, project.value, crs.value, branch.value, rev_id.value, rev_date.value, rev_note.value, rev_stage.value, rev_author.value, rev_server_url.value, rev_proj_url.value = gxapi_cy.WrapPROJ._get_central_project_information(GXContext._get_tls_geo(), instance.value.encode(), project.value.encode(), crs.value.encode(), branch.value.encode(), rev_id.value.encode(), rev_date.value.encode(), rev_note.value.encode(), rev_stage.value.encode(), rev_author.value.encode(), rev_server_url.value.encode(), rev_proj_url.value.encode()) @classmethod def save_document_view(cls, name, meta_file): """ Save document view to a file. :param name: Document name :param meta_file: save meta to file :type name: str :type meta_file: str .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Save document view to a file. """ gxapi_cy.WrapPROJ._save_document_view(GXContext._get_tls_geo(), name.encode(), meta_file.encode()) @classmethod def get_default_project_path(cls, folder): """ Get default project folder. :param folder: Returned default path :type folder: str_ref .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Get default project folder. """ folder.value = gxapi_cy.WrapPROJ._get_default_project_path(GXContext._get_tls_geo(), folder.value.encode()) @classmethod def set_default_project_path(cls, folder): """ Set default project folder. :param folder: Default path :type folder: str .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Set default project folder. """ gxapi_cy.WrapPROJ._set_default_project_path(GXContext._get_tls_geo(), folder.encode()) @classmethod def has_pending_central_publish_event(cls): """ Checks if there is a pending publish event. :rtype: bool .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Checks if there is a pending publish event. """ ret_val = gxapi_cy.WrapPROJ._has_pending_central_publish_event(GXContext._get_tls_geo()) return ret_val ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXHTTP.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXHTTP(gxapi_cy.WrapHTTP): """ GXHTTP class. Connect to an Internet Server using `GXHTTP <geosoft.gxapi.GXHTTP>`. **Note:** References: 1. http://www.w3.org/Protocols/`GXHTTP <geosoft.gxapi.GXHTTP>`/HTTP2.html 2. http://www.w3.org/Addressing/URL/5_BNF.html Note that path and search must conform be xalpha string (ref 2.). Special characters can be specified with a %xx, where xx is the hex ASCII number. For example, a search string "This one" should be specified as "This%20one" """ def __init__(self, handle=0): super(GXHTTP, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXHTTP <geosoft.gxapi.GXHTTP>` :returns: A null `GXHTTP <geosoft.gxapi.GXHTTP>` :rtype: GXHTTP """ return GXHTTP() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, url, user_name, password, purpose): """ This method creates a connection to an `GXHTTP <geosoft.gxapi.GXHTTP>` server :param url: URL of the server :param user_name: User name, "" for none :param password: <PASSWORD>, "" for none :param purpose: Purpose of communication (for user verification) :type url: str :type user_name: str :type password: str :type purpose: str :returns: `GXHTTP <geosoft.gxapi.GXHTTP>` Object :rtype: GXHTTP .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** An OM user has the ability to control access and verification of access to servers over the Internet. A GX Developer has no way to change the users choice of access. This is to prevent the creation of GX's that may be dangerous or may be used to collect information without the knowledgede of the user. If the specified URL is restricted from access by the user, the create function will fail. If the specified URL has not been accessed by this user before, or if the user has this site on "Verify", the user will be presented with a dialog requiring verification before communication can begin. The user may choose to change the server site to a full "Trust" relationship, in which case the verification message will not reappear unless the site is explicitly changed back to verify or is restricted. If you intend your GX to communicate with a server without verification, you must instruct your user to change their trust relationship with your server to "Trusted". Your user will have the opportunity to do so the first time a script is run. """ ret_val = gxapi_cy.WrapHTTP._create(GXContext._get_tls_geo(), url.encode(), user_name.encode(), password.encode(), purpose.encode()) return GXHTTP(ret_val) def download(self, file, bf, dynamic): """ Download file from the internet to a `GXBF <geosoft.gxapi.GXBF>`. :param file: File Name on the `GXHTTP <geosoft.gxapi.GXHTTP>` site :param bf: `GXBF <geosoft.gxapi.GXBF>` in which to place the file :param dynamic: Dynamic content (0 - no, 1 - yes) :type file: str :type bf: GXBF :type dynamic: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The file will be written starting at the current location in the `GXBF <geosoft.gxapi.GXBF>` """ self._download(file.encode(), bf, dynamic) def silent_download(self, file, bf, dynamic): """ Download file from the internet to a `GXBF <geosoft.gxapi.GXBF>` with no prompt for proxy authentication. :param file: File Name on the `GXHTTP <geosoft.gxapi.GXHTTP>` site :param bf: `GXBF <geosoft.gxapi.GXBF>` in which to place the file :param dynamic: Dynamic content (0 - no, 1 - yes) :type file: str :type bf: GXBF :type dynamic: int .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The file will be written starting at the current location in the `GXBF <geosoft.gxapi.GXBF>`. No prompt for proxy authentication """ self._silent_download(file.encode(), bf, dynamic) def get(self, cl, method, bf, ret_bf): """ Get data from a server. :param cl: Http path (file or an ISAPI DLL), no spaces :param method: Http search string, no spaces :param bf: Data to send :param ret_bf: Data returned :type cl: str :type method: str :type bf: GXBF :type ret_bf: GXBF .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Full contents of the `GXBF <geosoft.gxapi.GXBF>` are sent in an `GXHTTP <geosoft.gxapi.GXHTTP>` GET message. `GXBF <geosoft.gxapi.GXBF>` pointer is returned to location before the call. request URL will be: http://server/path?search """ self._get(cl.encode(), method.encode(), bf, ret_bf) def post(self, cl, method, bf): """ Post data to the server. :param cl: Http path (file or an ISAPI DLL) :param method: Http search string, no spaces :param bf: Data to post :type cl: str :type method: str :type bf: GXBF .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Full contents of the `GXBF <geosoft.gxapi.GXBF>` are sent as an `GXHTTP <geosoft.gxapi.GXHTTP>` POST message. request URL will be: http://server/path?search """ self._post(cl.encode(), method.encode(), bf) def set_proxy_credentials(self, username, password): """ Assigns the proxy username and password so that user is not prompted when the first download fails :param username: Username :param password: <PASSWORD> :type username: str :type password: str .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_proxy_credentials(username.encode(), password.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXIPGUI.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXIPGUI(gxapi_cy.WrapIPGUI): """ GXIPGUI class. This class is used in the `GXIP <geosoft.gxapi.GXIP>` System for `GXGUI <geosoft.gxapi.GXGUI>` functions such as defining parameters for pseudo-section plots. """ def __init__(self, handle=0): super(GXIPGUI, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXIPGUI <geosoft.gxapi.GXIPGUI>` :returns: A null `GXIPGUI <geosoft.gxapi.GXIPGUI>` :rtype: GXIPGUI """ return GXIPGUI() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def modify_job(cls, ip, db, ini, plot_type, page): """ Modify parameters for an `GXIP <geosoft.gxapi.GXIP>` plot. :param ip: `GXDH <geosoft.gxapi.GXDH>` Handle :param db: `GXDB <geosoft.gxapi.GXDB>` Handle :param ini: Job Name (``*.inp``) :param plot_type: Job type :ref:`IP_PLOT` :param page: Page to open `GXGUI <geosoft.gxapi.GXGUI>` on :type ip: GXIP :type db: GXDB :type ini: str :type plot_type: int :type page: int_ref :returns: 0 - Ok -1 - User Cancelled :rtype: int .. versionadded:: 6.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val, page.value = gxapi_cy.WrapIPGUI._modify_job(GXContext._get_tls_geo(), ip, db, ini.encode(), plot_type, page.value) return ret_val @classmethod def launch_ipqc_tool(cls, db, line, chan): """ Launch the In-Line `GXIP <geosoft.gxapi.GXIP>` QC tool on a database. :param db: Database name :param line: Current Line (can be blank) :param chan: Channel to open with (can be blank) :type db: str :type line: str :type chan: str .. versionadded:: 8.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The database should be a currently open database. """ gxapi_cy.WrapIPGUI._launch_ipqc_tool(GXContext._get_tls_geo(), db.encode(), line.encode(), chan.encode()) @classmethod def launch_offset_ipqc_tool(cls, db, line, chan): """ Launch the Offset `GXIP <geosoft.gxapi.GXIP>` QC tool on a database. :param db: Database name :param line: Current Line (can be blank) :param chan: Channel to open with (can be blank) :type db: str :type line: str :type chan: str .. versionadded:: 9.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The database should be a currently open database. """ gxapi_cy.WrapIPGUI._launch_offset_ipqc_tool(GXContext._get_tls_geo(), db.encode(), line.encode(), chan.encode()) @classmethod def ipqc_tool_exists(cls): """ See if there is an IPQC Tool (Offset or Inline) already open. :returns: 0 if not open, 1 if open :rtype: int .. versionadded:: 8.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** See if there is an IPQC Tool already open. """ ret_val = gxapi_cy.WrapIPGUI._ipqc_tool_exists(GXContext._get_tls_geo()) return ret_val @classmethod def launch_remove_contributing_electrodes_ext_tool(cls, db, map): """ Launch the Remove Contributing Electrodes dialog. :param db: Database name :param map: Current Map :type db: str :type map: str .. versionadded:: 9.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** This tool removes the `GXEXT <geosoft.gxapi.GXEXT>` object that allows the database-map linking between an IP data base and IP plan map for plotting contributing electrodes for a given database row of data. """ gxapi_cy.WrapIPGUI._launch_remove_contributing_electrodes_ext_tool(GXContext._get_tls_geo(), db.encode(), map.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXDSEL.rst .. _GXDSEL: GXDSEL class ================================== .. autoclass:: geosoft.gxapi.GXDSEL :members: .. _DSEL_PICTURE_QUALITY: DSEL_PICTURE_QUALITY constants ----------------------------------------------------------------------- Line Label Formats .. autodata:: geosoft.gxapi.DSEL_PICTURE_QUALITY_DEFAULT :annotation: .. autoattribute:: geosoft.gxapi.DSEL_PICTURE_QUALITY_DEFAULT .. autodata:: geosoft.gxapi.DSEL_PICTURE_QUALITY_LOSSLESS :annotation: .. autoattribute:: geosoft.gxapi.DSEL_PICTURE_QUALITY_LOSSLESS .. autodata:: geosoft.gxapi.DSEL_PICTURE_QUALITY_SEMILOSSY :annotation: .. autoattribute:: geosoft.gxapi.DSEL_PICTURE_QUALITY_SEMILOSSY .. autodata:: geosoft.gxapi.DSEL_PICTURE_QUALITY_LOSSY :annotation: .. autoattribute:: geosoft.gxapi.DSEL_PICTURE_QUALITY_LOSSY .. autodata:: geosoft.gxapi.DSEL_PICTURE_QUALITY_NATIVE :annotation: .. autoattribute:: geosoft.gxapi.DSEL_PICTURE_QUALITY_NATIVE .. autodata:: geosoft.gxapi.DSEL_PICTURE_QUALITY_ECW :annotation: .. autoattribute:: geosoft.gxapi.DSEL_PICTURE_QUALITY_ECW .. autodata:: geosoft.gxapi.DSEL_PICTURE_QUALITY_JPG :annotation: .. autoattribute:: geosoft.gxapi.DSEL_PICTURE_QUALITY_JPG .. autodata:: geosoft.gxapi.DSEL_PICTURE_QUALITY_PNG :annotation: .. autoattribute:: geosoft.gxapi.DSEL_PICTURE_QUALITY_PNG .. autodata:: geosoft.gxapi.DSEL_PICTURE_QUALITY_BMP :annotation: .. autoattribute:: geosoft.gxapi.DSEL_PICTURE_QUALITY_BMP .. autodata:: geosoft.gxapi.DSEL_PICTURE_QUALITY_TIF :annotation: .. autoattribute:: geosoft.gxapi.DSEL_PICTURE_QUALITY_TIF <file_sep>/docs/GXDXFI.rst .. _GXDXFI: GXDXFI class ================================== .. autoclass:: geosoft.gxapi.GXDXFI :members: <file_sep>/geosoft/gxapi/GXGEOSTRING.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXGEOSTRING(gxapi_cy.WrapGEOSTRING): """ GXGEOSTRING class. The `GXGEOSTRING <geosoft.gxapi.GXGEOSTRING>` class is used to read information stored in Geostring files (``*.geosoft_string``). Geosoft geostrings are 3D vector files that store digitized interpretations drawn on section maps. Both polygon and polyline features can be stored in the same file. This API currently only provides read access, but read/write support could be added in the future. """ def __init__(self, handle=0): super(GXGEOSTRING, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXGEOSTRING <geosoft.gxapi.GXGEOSTRING>` :returns: A null `GXGEOSTRING <geosoft.gxapi.GXGEOSTRING>` :rtype: GXGEOSTRING """ return GXGEOSTRING() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def open(cls, geostring_file, mode): """ Open a Geostring file :param geostring_file: Geostring file name :param mode: :ref:`GEOSTRING_OPEN` :type geostring_file: str :type mode: int :returns: `GXGEOSTRING <geosoft.gxapi.GXGEOSTRING>` Object :rtype: GXGEOSTRING .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapGEOSTRING._open(GXContext._get_tls_geo(), geostring_file.encode(), mode) return GXGEOSTRING(ret_val) def get_ipj(self, ipj): """ Get the coordinate system of the Geostring. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` in which to place the Geostring coordinate system :type ipj: GXIPJ .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_ipj(ipj) def get_features(self, lst): """ Get the features :param lst: `GXLST <geosoft.gxapi.GXLST>` to fill :type lst: GXLST .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** List items are returned with feature GUID in name and feature name in value. """ self._get_features(lst) def get_sections(self, lst): """ Get the sections :param lst: `GXLST <geosoft.gxapi.GXLST>` to fill :type lst: GXLST .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** List items are returned with section GUID in name and section name in value. """ self._get_sections(lst) def get_all_shapes(self, lst): """ Get the all shapes :param lst: `GXLST <geosoft.gxapi.GXLST>` to fill :type lst: GXLST .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_all_shapes(lst) def get_shapes_for_feature(self, guid, lst): """ Get all shapes linked to a specific feature :param guid: Feature GUID :param lst: `GXLST <geosoft.gxapi.GXLST>` to fill :type guid: str :type lst: GXLST .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_shapes_for_feature(guid.encode(), lst) def get_shapes_for_section(self, guid, lst): """ Get all shapes linked to a specific section :param guid: Section GUID :param lst: `GXLST <geosoft.gxapi.GXLST>` to fill :type guid: str :type lst: GXLST .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_shapes_for_section(guid.encode(), lst) def get_shapes_for_feature_and_section(self, feature_guid, section_guid, lst): """ Get all shapes linked to a specific feature and section :param feature_guid: Feature GUID :param section_guid: Section GUID :param lst: `GXLST <geosoft.gxapi.GXLST>` to fill :type feature_guid: str :type section_guid: str :type lst: GXLST .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_shapes_for_feature_and_section(feature_guid.encode(), section_guid.encode(), lst) def get_feature_properties(self, guid, name, description, polygon, pat_number, pat_size, pat_thick, pat_density, pat_color, pat_bg_color, line_style, line_thickness, line_pitch, line_color): """ Get a feature's properties :param guid: Feature GUID :param name: Name :param description: Description :param polygon: Indicates if feature is described by polygons (shapes are polylines if not set) :param pat_number: The fill pattern number (see `GXMVIEW.pat_number <geosoft.gxapi.GXMVIEW.pat_number>`) :param pat_size: The fill pattern size (see `GXMVIEW.pat_size <geosoft.gxapi.GXMVIEW.pat_size>`) :param pat_thick: The fill pattern thickness (see `GXMVIEW.pat_thick <geosoft.gxapi.GXMVIEW.pat_thick>`) :param pat_density: The fill pattern density (see `GXMVIEW.pat_density <geosoft.gxapi.GXMVIEW.pat_density>`) :param pat_color: The fill color (an `GXMVIEW <geosoft.gxapi.GXMVIEW>` color) :param pat_bg_color: The fill background color (an `GXMVIEW <geosoft.gxapi.GXMVIEW>` color) :param line_style: The line style (see `GXMVIEW.line_style <geosoft.gxapi.GXMVIEW.line_style>`) :param line_thickness: The line thickness (see `GXMVIEW.line_thick <geosoft.gxapi.GXMVIEW.line_thick>`) :param line_pitch: The dash pattern pitch (see `GXMVIEW.line_style <geosoft.gxapi.GXMVIEW.line_style>`) :param line_color: The line color (an `GXMVIEW <geosoft.gxapi.GXMVIEW>` color) :type guid: str :type name: str_ref :type description: str_ref :type polygon: bool_ref :type pat_number: int_ref :type pat_size: float_ref :type pat_thick: float_ref :type pat_density: float_ref :type pat_color: int_ref :type pat_bg_color: int_ref :type line_style: int_ref :type line_thickness: float_ref :type line_pitch: float_ref :type line_color: int_ref .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ name.value, description.value, polygon.value, pat_number.value, pat_size.value, pat_thick.value, pat_density.value, pat_color.value, pat_bg_color.value, line_style.value, line_thickness.value, line_pitch.value, line_color.value = self._get_feature_properties(guid.encode(), name.value.encode(), description.value.encode(), polygon.value, pat_number.value, pat_size.value, pat_thick.value, pat_density.value, pat_color.value, pat_bg_color.value, line_style.value, line_thickness.value, line_pitch.value, line_color.value) def get_section_properties(self, guid, name, container_name, orientation, easting, northing, elevation, azimuth, swing, a, b, c, d): """ Get a section's properties :param guid: Section GUID :param name: Name :param container_name: ContainerName :param orientation: :ref:`SECTION_ORIENTATION` :param easting: Easting :param northing: Northing :param elevation: Elevation :param azimuth: Azimuth :param swing: Swing :param a: A in the scalar equation of best-fit plane describing the section :param b: B in the scalar equation of best-fit plane describing the section :param c: C in the scalar equation of best-fit plane describing the section :param d: D in the scalar equation of best-fit plane describing the section :type guid: str :type name: str_ref :type container_name: str_ref :type orientation: int_ref :type easting: float_ref :type northing: float_ref :type elevation: float_ref :type azimuth: float_ref :type swing: float_ref :type a: float_ref :type b: float_ref :type c: float_ref :type d: float_ref .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ name.value, container_name.value, orientation.value, easting.value, northing.value, elevation.value, azimuth.value, swing.value, a.value, b.value, c.value, d.value = self._get_section_properties(guid.encode(), name.value.encode(), container_name.value.encode(), orientation.value, easting.value, northing.value, elevation.value, azimuth.value, swing.value, a.value, b.value, c.value, d.value) def get_shape_properties(self, guid, feature_guid, section_guid, vert_v_vx, vert_v_vy, vert_v_vz): """ Get a shape's properties :param guid: Shape GUID :param feature_guid: Feature GUID :param section_guid: Section GUID :param vert_v_vx: Vertices X location :param vert_v_vy: Vertices Y location :param vert_v_vz: Vertices Z location :type guid: str :type feature_guid: str_ref :type section_guid: str_ref :type vert_v_vx: GXVV :type vert_v_vy: GXVV :type vert_v_vz: GXVV .. versionadded:: 8.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ feature_guid.value, section_guid.value = self._get_shape_properties(guid.encode(), feature_guid.value.encode(), section_guid.value.encode(), vert_v_vx, vert_v_vy, vert_v_vz) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/tests/vox_assert_test.py import geosoft.gxpy.gx as gx import geosoft.gxpy.voxset as gxvox gx = gx.GXpy() with gxvox.Voxset.new("test_new", dimension=(35, 50, 12), overwrite=True) as vox: print(vox.nx, vox.ny, vox.nz) <file_sep>/geosoft/gxapi/GXMVG.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXMVIEW import GXMVIEW ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXMVG(gxapi_cy.WrapMVG): """ GXMVG class. The `GXMVG <geosoft.gxapi.GXMVG>` class provides the ability to create view graphs. """ def __init__(self, handle=0): super(GXMVG, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXMVG <geosoft.gxapi.GXMVG>` :returns: A null `GXMVG <geosoft.gxapi.GXMVG>` :rtype: GXMVG """ return GXMVG() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def axis_x(self, d_y, d_lx, d_rx, d_maj_int, d_min_int, d_size): """ Draw an X axis :param d_y: Y location in plot units (mm) :param d_lx: Left X (rescaling unit) :param d_rx: Right X (rescaling unit) :param d_maj_int: Major tick interval (rescaling unit). Ticks drawn in decades in LOG or LOGLINEAR scale :param d_min_int: Minor tick interval (rescaling unit). Not used in LOG/LOGLINEAR :param d_size: Tick size in view units (mm) (negative for down ticks) :type d_y: float :type d_lx: float :type d_rx: float :type d_maj_int: float :type d_min_int: float :type d_size: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** When Log annotation is applied, nice tick intervals will be calculated Obsolete """ self._axis_x(d_y, d_lx, d_rx, d_maj_int, d_min_int, d_size) def axis_y(self, d_x, d_by, d_ty, d_maj_int, d_min_int, d_size): """ Draw a Y axis :param d_x: X location in plot units (mm) :param d_by: Bottom Y (rescaling unit) :param d_ty: Top Y (rescaling unit) :param d_maj_int: Major tick interval (rescaling unit). Ticks drawn in decades in LOG or LOGLINEAR scale :param d_min_int: Minor tick interval (rescaling unit). Not used in LOG/LOGLINEAR :param d_size: Tick size in plot units (mm)(negative for left ticks) :type d_x: float :type d_by: float :type d_ty: float :type d_maj_int: float :type d_min_int: float :type d_size: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** When Log annotation is applied, nice tick intervals will be calculated Obsolete """ self._axis_y(d_x, d_by, d_ty, d_maj_int, d_min_int, d_size) @classmethod def create(cls, map, name, xmin_m, ymin_m, xmax_m, ymax_m, xmin_u, ymin_u, xmax_u, ymax_u): """ Create a `GXMVG <geosoft.gxapi.GXMVG>` object :param map: H_MAP handle :param name: View Name :param xmin_m: Minimum X in map unit (mm) :param ymin_m: Minimum Y in map unit (mm) :param xmax_m: Maximum X in map unit (mm) :param ymax_m: Maximum Y in map unit (mm) :param xmin_u: Minimum X in view unit (m for example) :param ymin_u: Minimum Y in view unit :param xmax_u: Maximum X in view unit :param ymax_u: Maximum Y in view unit :type map: GXMAP :type name: str :type xmin_m: float :type ymin_m: float :type xmax_m: float :type ymax_m: float :type xmin_u: float :type ymin_u: float :type xmax_u: float :type ymax_u: float :returns: `GXMVG <geosoft.gxapi.GXMVG>` handle (NULL if error) :rtype: GXMVG .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Obsolete """ ret_val = gxapi_cy.WrapMVG._create(GXContext._get_tls_geo(), map, name.encode(), xmin_m, ymin_m, xmax_m, ymax_m, xmin_u, ymin_u, xmax_u, ymax_u) return GXMVG(ret_val) def get_mview(self): """ Get the `GXMVIEW <geosoft.gxapi.GXMVIEW>` Handle of the Object. :returns: `GXMVIEW <geosoft.gxapi.GXMVIEW>` Handle :rtype: GXMVIEW .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Obsolete """ ret_val = self._get_mview() return GXMVIEW(ret_val) def grid(self, d1st_x, d1st_y, d_x, d_y, d_dx, d_dy, l_type): """ Draw a grid in the current `GXMVG <geosoft.gxapi.GXMVG>` :param d1st_x: X position of 1st vertical grid line to draw (in rescaling unit) :param d1st_y: Y position of 1st horizontal grid line to draw (in rescaling unit) :param d_x: X grid increment of rescaled map unit (see above Rescaling functions) :param d_y: Y grid increment of rescaled map unit (see above Rescaling functions) :param d_dx: X dot increment/cross X size of rescaled map unit :param d_dy: Y dot increment/cross Y size of rescaled map unit :param l_type: :ref:`MVG_GRID` :type d1st_x: float :type d1st_y: float :type d_x: float :type d_y: float :type d_dx: float :type d_dy: float :type l_type: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The grid will be drawn in the current window. In the LOG and LOGLINEAR rescaling modes, grids will be drawn in decades and the X/Y grid increments will be ignored. In addition, grid lines at 0 (zero) and LOGMIN will be drawn. Obsolete """ self._grid(d1st_x, d1st_y, d_x, d_y, d_dx, d_dy, l_type) def label_x(self, y, lx, rx, maj_int, just, bound, orient): """ Label annotations on the X axis :param y: Y location in plot units (mm) :param lx: Left X (rescaling unit) :param rx: Right X (rescaling unit) :param maj_int: Major tick interval (ignored if in LOG or LOGLINEAR rescaling) :param just: Label justification :ref:`MVG_LABEL_JUST` :param bound: Edge label bounding :ref:`MVG_LABEL_BOUND` :param orient: Label orientation :ref:`MVG_LABEL_ORIENT` :type y: float :type lx: float :type rx: float :type maj_int: float :type just: int :type bound: int :type orient: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Label bounding will justify edge labels to be inside the bar limits. When Log annotation is applied, labels will be drawn in decades. Obsolete .. seealso:: sAxisX_MVG """ self._label_x(y, lx, rx, maj_int, just, bound, orient) def label_y(self, x, by, ty, maj_int, just, bound, orient): """ Label annotations on the Y axis :param x: X location in plot units (mm) :param by: Bottom Y (rescaling unit) :param ty: Top Y (rescaling unit) :param maj_int: Label interval (ignored if in LOG or LOGLINEAR rescaling) :param just: Label justification :ref:`MVG_LABEL_JUST` :param bound: Edge label bounding :ref:`MVG_LABEL_BOUND` :param orient: Label orientation :ref:`MVG_LABEL_ORIENT` :type x: float :type by: float :type ty: float :type maj_int: float :type just: int :type bound: int :type orient: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Label bounding will justify edge labels to be inside the bar limits. When Log annotation is applied, labels will be drawn in decades. Obsolete .. seealso:: sAxisY_MVG """ self._label_y(x, by, ty, maj_int, just, bound, orient) def poly_line_va(self, draw, wrap, vv_x, va, vv_array): """ Creates PolyLines/polygons from `GXVV <geosoft.gxapi.GXVV>` and `GXVA <geosoft.gxapi.GXVA>`. :param draw: :ref:`MVG_DRAW` :param wrap: :ref:`MVG_WRAP` :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` :param va: Y VAs :param vv_array: `GXVV <geosoft.gxapi.GXVV>` containing list of `GXVA <geosoft.gxapi.GXVA>` ranges, such as 1,2 40 ... Entire `GXVA <geosoft.gxapi.GXVA>` is drawn if this `GXVV <geosoft.gxapi.GXVV>` is empty. :type draw: int :type wrap: int :type vv_x: GXVV :type va: GXVA :type vv_array: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If the `GXVV <geosoft.gxapi.GXVV>` contains dummies, the polylines will break at the dummies; the polygons will skip the dummies. If wrapping is applied, POLYGON parameter is ignored and only POLYLINES are drawn. Obsolete """ self._poly_line_va(draw, wrap, vv_x, va, vv_array) def poly_line_vv(self, draw, wrap, vv_x, vv_y): """ Creates PolyLines/polygons from `GXVV <geosoft.gxapi.GXVV>` and `GXVV <geosoft.gxapi.GXVV>`. :param draw: :ref:`MVG_DRAW` :param wrap: :ref:`MVG_WRAP` :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` :type draw: int :type wrap: int :type vv_x: GXVV :type vv_y: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If the `GXVV <geosoft.gxapi.GXVV>` contains dummies, the polylines will break at the dummies; the polygons will skip the dummies. If wrapping is applied, POLYGON parameter is ignored and only POLYLINES are drawn. Obsolete """ self._poly_line_vv(draw, wrap, vv_x, vv_y) def rescale_x_range(self, scale, min, max, log_min): """ Re-scale horizontal axis :param scale: :ref:`MVG_SCALE` :param min: Scale information: new minimum X :param max: Scale information: new maximum X :param log_min: Scale information: minimum X to apply log10, it is defined only for LOGLINEAR scale :type scale: int :type min: float :type max: float :type log_min: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** When RescaleX_MVG is used, only the scaling information related to X axis will be considered Obsolete """ self._rescale_x_range(scale, min, max, log_min) def rescale_y_range(self, scale, min, max, log_min): """ Re-scale vertical axis :param scale: :ref:`MVG_SCALE` :param min: Scale information: new minimum Y :param max: Scale information: new maximum Y :param log_min: Scale information: minimum Y to apply log10, it is defined only for LOGLINEAR scale :type scale: int :type min: float :type max: float :type log_min: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** When RescaleY_MVG is used, only the scaling information related to Y axis will be considered Obsolete """ self._rescale_y_range(scale, min, max, log_min) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXMETA.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXMETA(gxapi_cy.WrapMETA): """ GXMETA class. A `GXMETA <geosoft.gxapi.GXMETA>` object contains hierarchical organized metadata of any type, including other objects. `GXMETA <geosoft.gxapi.GXMETA>` information is organized in an XML-like structure based on a data schema that describes the data hierarchy. `GXMETA <geosoft.gxapi.GXMETA>` objects are used by many entities that need to store metadata specific to the entities or to the application. Metadata can be saved in databases and maps, as well as in channels, lines, views and groups. Oasis montaj objects can be queried for their associated metadata, and if it exists, the metadata can be retrieved and utilized by other Oasis montaj processes. """ def __init__(self, handle=0): super(GXMETA, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXMETA <geosoft.gxapi.GXMETA>` :returns: A null `GXMETA <geosoft.gxapi.GXMETA>` :rtype: GXMETA """ return GXMETA() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Attribute def create_attrib(self, name, ph_class, ph_type): """ Create an attribute :param name: Attribute Name :param ph_class: Parent class or :ref:`META_CORE_CLASS` :param ph_type: Type of Attribute or :ref:`META_CORE_TYPE` :type name: str :type ph_class: int :type ph_type: int :returns: x - Attribute Token :rtype: int .. versionadded:: 6.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._create_attrib(name.encode(), ph_class, ph_type) return ret_val def delete_attrib(self, ph_attribute): """ Delete Attrib from `GXMETA <geosoft.gxapi.GXMETA>`. :param ph_attribute: Attrib to delete :type ph_attribute: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._delete_attrib(ph_attribute) # Browser def set_attribute_editable(self, ph_attribute, editable): """ Allow/disallow an attribute to be editable in the browser :param ph_attribute: Attribute or :ref:`META_CORE_ATTRIB` :param editable: Editable Flag :type ph_attribute: int :type editable: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_attribute_editable(ph_attribute, editable) def set_attribute_visible(self, ph_attribute, visible): """ Allow/disallow an attribute to be visible in the browser :param ph_attribute: Attribute or :ref:`META_CORE_ATTRIB` :param visible: Editable Flag :type ph_attribute: int :type visible: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_attribute_visible(ph_attribute, visible) # Class def create_class(self, name, ph_class): """ Create a class :param name: Class Name :param ph_class: Parent class or `META_CORE_CLASS_Base <geosoft.gxapi.META_CORE_CLASS_Base>` :type name: str :type ph_class: int :returns: x - Class Token :rtype: int .. versionadded:: 6.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._create_class(name.encode(), ph_class) return ret_val def delete_class(self, ph_class): """ Delete Class from `GXMETA <geosoft.gxapi.GXMETA>`. :param ph_class: Class to delete :type ph_class: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._delete_class(ph_class) # Core def copy(self, source_meta): """ Copy a `GXMETA <geosoft.gxapi.GXMETA>` to another :param source_meta: Source `GXMETA <geosoft.gxapi.GXMETA>` object. :type source_meta: GXMETA .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._copy(source_meta) @classmethod def create(cls): """ Create :returns: `GXMETA <geosoft.gxapi.GXMETA>` Object :rtype: GXMETA .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMETA._create(GXContext._get_tls_geo()) return GXMETA(ret_val) @classmethod def create_s(cls, bf): """ Create a `GXMETA <geosoft.gxapi.GXMETA>` Object from a `GXBF <geosoft.gxapi.GXBF>` :param bf: `GXBF <geosoft.gxapi.GXBF>` to serialize from :type bf: GXBF :returns: `GXMETA <geosoft.gxapi.GXMETA>` Object :rtype: GXMETA .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMETA._create_s(GXContext._get_tls_geo(), bf) return GXMETA(ret_val) def serial(self, bf): """ Serialize an `GXMETA <geosoft.gxapi.GXMETA>` to a `GXBF <geosoft.gxapi.GXBF>` :param bf: `GXBF <geosoft.gxapi.GXBF>` to serialize to :type bf: GXBF .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._serial(bf) # Get Data def find_data(self, ph_object, ph_attrib): """ Does this meta/attribute have a value ? :param ph_object: Object :param ph_attrib: Attribute :type ph_object: int :type ph_attrib: int :returns: x - Data Value `H_META_INVALID_TOKEN <geosoft.gxapi.H_META_INVALID_TOKEN>` - No :rtype: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._find_data(ph_object, ph_attrib) return ret_val def get_attrib_bool(self, ph_object, ph_attrib, value): """ Get a boolean value to an attribute :param ph_object: Object :param ph_attrib: Attribute :param value: Value to set :type ph_object: int :type ph_attrib: int :type value: int_ref .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ value.value = self._get_attrib_bool(ph_object, ph_attrib, value.value) def get_attrib_enum(self, ph_object, ph_attrib, value): """ Get an enum value to an attribute (as an integer) :param ph_object: Object :param ph_attrib: Attribute :param value: Value to set :type ph_object: int :type ph_attrib: int :type value: int_ref .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ value.value = self._get_attrib_enum(ph_object, ph_attrib, value.value) def get_attrib_int(self, ph_object, ph_attrib, value): """ Get an integer value to an attribute :param ph_object: Object :param ph_attrib: Attribute :param value: Value to set :type ph_object: int :type ph_attrib: int :type value: int_ref .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ value.value = self._get_attrib_int(ph_object, ph_attrib, value.value) def get_attrib_double(self, ph_object, ph_attrib, value): """ Get an integer value to an attribute :param ph_object: Object :param ph_attrib: Attribute :param value: Value to set :type ph_object: int :type ph_attrib: int :type value: float_ref .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ value.value = self._get_attrib_double(ph_object, ph_attrib, value.value) def get_attrib_string(self, ph_object, ph_attrib, value): """ Get a string value to an attribute :param ph_object: Object :param ph_attrib: Attribute :param value: String value to get :type ph_object: int :type ph_attrib: int :type value: str_ref .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ value.value = self._get_attrib_string(ph_object, ph_attrib, value.value.encode()) def has_value(self, ph_object, ph_attrib): """ Does this meta/attribute have a value set? :param ph_object: Object :param ph_attrib: Attribute :type ph_object: int :type ph_attrib: int :rtype: bool .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._has_value(ph_object, ph_attrib) return ret_val # Import/Export def export_table_csv(self, ph_class, file): """ Export all items in a class as a CSV :param ph_class: Class of items to export :param file: Name of CSV file to produce :type ph_class: int :type file: str .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._export_table_csv(ph_class, file.encode()) def import_table_csv(self, ph_class, file): """ Import a CSV into a class as items. :param ph_class: Class to import into :param file: Name of CSV file to load :type ph_class: int :type file: str .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Field names in the CSV file that match attribute names in the class will be imported into table entries in the class. Usually this will be used with a class created using the hCreateTable_SCHEMA method so that the contents of class can be viewed as a table. """ self._import_table_csv(ph_class, file.encode()) def write_text(self, wa): """ Write the entire meta as a text file :param wa: `GXWA <geosoft.gxapi.GXWA>` to write to :type wa: GXWA .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._write_text(wa) # Item def delete_all_items(self, ph_class): """ Delete all items in this class. :param ph_class: Class of items to delete :type ph_class: int .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._delete_all_items(ph_class) def delete_item(self, ph_item): """ Delete item from `GXMETA <geosoft.gxapi.GXMETA>`. :param ph_item: Item to delete :type ph_item: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._delete_item(ph_item) def h_creat_item(self, name, ph_class): """ Creates item in Class. :param name: Unique item Name :param ph_class: Class (can be root) :type name: str :type ph_class: int :returns: x - Next Item `H_META_INVALID_TOKEN <geosoft.gxapi.H_META_INVALID_TOKEN>` - Error :rtype: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._h_creat_item(name.encode(), ph_class) return ret_val def h_get_next_item(self, ph_class, ph_token): """ Count the number of items in a class :param ph_class: Class :param ph_token: Starting Item (must `H_META_INVALID_TOKEN <geosoft.gxapi.H_META_INVALID_TOKEN>` for first item) :type ph_class: int :type ph_token: int :returns: x - Next Item `H_META_INVALID_TOKEN <geosoft.gxapi.H_META_INVALID_TOKEN>` - No more items :rtype: int .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._h_get_next_item(ph_class, ph_token) return ret_val # Object def get_attrib_obj(self, ph_object, ph_attrib, obj): """ Get an object from an attribute :param ph_object: Object :param ph_attrib: Attribute :param obj: Object to get info into :type ph_object: int :type ph_attrib: int :type obj: int .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_attrib_obj(ph_object, ph_attrib, obj) def set_attrib_obj(self, ph_object, ph_attrib, obj): """ Set an object to an attribute :param ph_object: Object :param ph_attrib: Attribute :param obj: Object to set :type ph_object: int :type ph_attrib: int :type obj: int .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_attrib_obj(ph_object, ph_attrib, obj) # Set Data def set_attrib_bool(self, ph_object, ph_attrib, value): """ Set a boolean value to an attribute :param ph_object: Object :param ph_attrib: Attribute :param value: Value to set :type ph_object: int :type ph_attrib: int :type value: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_attrib_bool(ph_object, ph_attrib, value) def set_attrib_enum(self, ph_object, ph_attrib, value): """ Set an enum value to an attribute (as an integer) :param ph_object: Object :param ph_attrib: Attribute :param value: Value to set :type ph_object: int :type ph_attrib: int :type value: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_attrib_enum(ph_object, ph_attrib, value) def set_attrib_int(self, ph_object, ph_attrib, value): """ Set an integer value to an attribute :param ph_object: Object :param ph_attrib: Attribute :param value: Value to set :type ph_object: int :type ph_attrib: int :type value: int .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_attrib_int(ph_object, ph_attrib, value) def set_attrib_double(self, ph_object, ph_attrib, value): """ Set an integer value to an attribute :param ph_object: Object :param ph_attrib: Attribute :param value: Value to set :type ph_object: int :type ph_attrib: int :type value: float .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_attrib_double(ph_object, ph_attrib, value) def set_attrib_string(self, ph_object, ph_attrib, value): """ Set a string value to an attribute :param ph_object: Object :param ph_attrib: Attribute :param value: String value to set :type ph_object: int :type ph_attrib: int :type value: str .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_attrib_string(ph_object, ph_attrib, value.encode()) def set_empty_attrib(self, ph_object, ph_attrib): """ Set an empty attribute data holder :param ph_object: MetaObject to set :param ph_attrib: Attribute MetaObject to set :type ph_object: int :type ph_attrib: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_empty_attrib(ph_object, ph_attrib) # Transfer def h_copy_across_attribute(self, source_meta, ph_attribute): """ Copy an Attribute from one `GXMETA <geosoft.gxapi.GXMETA>` to another :param source_meta: Source `GXMETA <geosoft.gxapi.GXMETA>` object. :param ph_attribute: Attribute to copy :type source_meta: GXMETA :type ph_attribute: int :returns: x - Handle of Attribute META_INVALID_TOKEN - No visible data :rtype: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._h_copy_across_attribute(source_meta, ph_attribute) return ret_val def h_copy_across_class(self, source_meta, ph_class): """ Copy a Class from one `GXMETA <geosoft.gxapi.GXMETA>` to another :param source_meta: Source `GXMETA <geosoft.gxapi.GXMETA>` object. :param ph_class: Class to copy :type source_meta: GXMETA :type ph_class: int :returns: x - Handle of Class META_INVALID_TOKEN - No visible data anywhere :rtype: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This will copy all parent classes as well. """ ret_val = self._h_copy_across_class(source_meta, ph_class) return ret_val def h_copy_across_data(self, source_meta, ph_data): """ Copy a Data value from one `GXMETA <geosoft.gxapi.GXMETA>` to another :param source_meta: Source `GXMETA <geosoft.gxapi.GXMETA>` object. :param ph_data: Data value to copy :type source_meta: GXMETA :type ph_data: int :returns: x - Handle of Data value META_INVALID_TOKEN - No visible data :rtype: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._h_copy_across_data(source_meta, ph_data) return ret_val def h_copy_across_item(self, source_meta, ph_item): """ Copy an Item from one `GXMETA <geosoft.gxapi.GXMETA>` to another :param source_meta: Source `GXMETA <geosoft.gxapi.GXMETA>` object. :param ph_item: Item to copy :type source_meta: GXMETA :type ph_item: int :returns: x - Handle of Item META_INVALID_TOKEN - No visible data :rtype: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._h_copy_across_item(source_meta, ph_item) return ret_val def h_copy_across_type(self, source_meta, ph_type): """ Copy a Type from one `GXMETA <geosoft.gxapi.GXMETA>` to another :param source_meta: Source `GXMETA <geosoft.gxapi.GXMETA>` object. :param ph_type: Type to copy :type source_meta: GXMETA :type ph_type: int :returns: x - Handle of type META_INVALID_TOKEN - No visible data anywhere :rtype: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Classes and parent types will also be copied. """ ret_val = self._h_copy_across_type(source_meta, ph_type) return ret_val def move_datas_across(self, source_meta, ph_i_obj, ph_o_obj): """ Moves data items from one `GXMETA <geosoft.gxapi.GXMETA>` to another :param source_meta: Source `GXMETA <geosoft.gxapi.GXMETA>` Object :param ph_i_obj: Object to copy data from :param ph_o_obj: Object to copy data to :type source_meta: GXMETA :type ph_i_obj: int :type ph_o_obj: int .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._move_datas_across(source_meta, ph_i_obj, ph_o_obj) # Type def create_type(self, name, ph_class, ph_type): """ Create an attribute :param name: Attribute Name :param ph_class: Parent Class or :ref:`META_CORE_CLASS` :param ph_type: Parent Type or :ref:`META_CORE_TYPE` :type name: str :type ph_class: int :type ph_type: int :returns: x - Type Token :rtype: int .. versionadded:: 6.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._create_type(name.encode(), ph_class, ph_type) return ret_val def delete_data(self, ph_data): """ Delete Data from `GXMETA <geosoft.gxapi.GXMETA>`. :param ph_data: Data to delete :type ph_data: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._delete_data(ph_data) def delete_type(self, ph_type): """ Delete Type from `GXMETA <geosoft.gxapi.GXMETA>`. :param ph_type: Type to delete :type ph_type: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._delete_type(ph_type) # UMN def get_obj_name(self, ph_object, name): """ Get the name of this item. :param ph_object: Object :param name: Name of object :type ph_object: int :type name: str_ref .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ name.value = self._get_obj_name(ph_object, name.value.encode()) def resolve_umn(self, umn): """ Resolve a Unique Meta Name (UMN) and find the token :param umn: Unique Meta Name (UMN) :type umn: str :returns: x - Token `H_META_INVALID_TOKEN <geosoft.gxapi.H_META_INVALID_TOKEN>` - Not found :rtype: int .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._resolve_umn(umn.encode()) return ret_val ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXDOCU.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXDOCU(gxapi_cy.WrapDOCU): """ GXDOCU class. Class to work with documents """ def __init__(self, handle=0): super(GXDOCU, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXDOCU <geosoft.gxapi.GXDOCU>` :returns: A null `GXDOCU <geosoft.gxapi.GXDOCU>` :rtype: GXDOCU """ return GXDOCU() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def copy(self, doc_us): """ Copy `GXDOCU <geosoft.gxapi.GXDOCU>` :param doc_us: Source `GXDOCU <geosoft.gxapi.GXDOCU>` :type doc_us: GXDOCU .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._copy(doc_us) @classmethod def create(cls): """ Create a document onject :returns: `GXDOCU <geosoft.gxapi.GXDOCU>` Object :rtype: GXDOCU .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapDOCU._create(GXContext._get_tls_geo()) return GXDOCU(ret_val) @classmethod def create_s(cls, bf): """ Create from a serialized source :param bf: `GXBF <geosoft.gxapi.GXBF>` from which to read `GXDOCU <geosoft.gxapi.GXDOCU>` :type bf: GXBF :returns: `GXDOCU <geosoft.gxapi.GXDOCU>` Object :rtype: GXDOCU .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapDOCU._create_s(GXContext._get_tls_geo(), bf) return GXDOCU(ret_val) def get_file(self, file): """ Get the document and place in a file. :param file: File to which to write document :type file: str .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_file(file.encode()) def get_file_meta(self, file): """ Get the document and place in a file with metadata. :param file: File to which to write document :type file: str .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If this document is only a URL link, the URL link will be resolved and the document downloaded from the appropriate server using the protocol specified. The document has metadata, and the native document does not support metadata, the metadata will be placed in an associated file "filename.extension.GeosoftMeta" """ self._get_file_meta(file.encode()) def get_meta(self, meta): """ Get the document's meta :param meta: `GXMETA <geosoft.gxapi.GXMETA>` object to fill in with the document's meta :type meta: GXMETA .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_meta(meta) def doc_name(self, name): """ The document name. :param name: Buffer to fill with document name :type name: str_ref .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ name.value = self._doc_name(name.value.encode()) def file_name(self, name): """ The original document file name. :param name: Buffer to fill with document file name :type name: str_ref .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ name.value = self._file_name(name.value.encode()) def have_meta(self): """ Checks if a document has metadata. :rtype: bool .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._have_meta() return ret_val def is_reference(self): """ Is the document only a reference (a URL) ? :returns: 1 - Yes, 0 - No :rtype: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_reference() return ret_val def open(self, mode): """ Open a document in the document viewer :param mode: :ref:`DOCU_OPEN` :type mode: int .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** On Windows, the default application for the file extension is used to open the file. """ self._open(mode) def serial(self, bf): """ Serialize `GXDOCU <geosoft.gxapi.GXDOCU>` :param bf: `GXBF <geosoft.gxapi.GXBF>` in which to write object :type bf: GXBF .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._serial(bf) def set_file(self, type, name, file): """ Set the document from a file source. :param type: Document type :param name: Document name, if "" file name will be used :param file: Document file, must exist :type type: str :type name: str :type file: str .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Document types are normally identified by their extension. If you leave the document type blank, the extension of the document file will be used as the document type. To resolve conflicting types, you can define your own unique type by entering your own type "extension" string. The following types are pre-defined (as are any normal Geosoft file types): "htm" HTML "html" HTML "txt" ASCII text file "doc" Word for Windows document "pdf" Adobe PDF "map" Geosoft map file "mmap" Mapinfo map file (real extension "map") "grd" Geosoft grid file "gdb" Geosoft database URL Document Links The document name can be a URL link to the document using one of the supported protocols. The following protocols are supported: http://www.mywebserver.com/MyFile.doc - `GXHTTP <geosoft.gxapi.GXHTTP>` dap://my.dap.server.com/dcs?DatasetName?MyFile.doc - DAP (DAP Document Access) ftp://my.ftp.server.com/Dir1/MyFile.doc - FTP protocol The full file name will be stored but no data will be stored with the `GXDOCU <geosoft.gxapi.GXDOCU>` class and the document can be retrieved using the sGetFile_DOCU method. """ self._set_file(type.encode(), name.encode(), file.encode()) def set_file_meta(self, type, name, file): """ Set the document from a file source with metadata. :param type: Document type extension :param name: Document name, if NULL use file name :param file: Document file or URL :type type: str :type name: str :type file: str .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See `set_file <geosoft.gxapi.GXDOCU.set_file>`. This function is the same as sSetFile_DOCU, plus insures that a `GXMETA <geosoft.gxapi.GXMETA>` exists that includes the "Data" class. If the file has associated metadata, either supported natively in the file, or through an associated file "filename.extension.GeosoftMeta", that metadata will be loaded into the `GXDOCU <geosoft.gxapi.GXDOCU>` meta, and a Data class will be constructed if one does not exist. Also, the Document type Extension is very important in that it specifies the document types that natively have metadata. The ones currently supported are: "map" Geosoft map file "gdb" Geosoft database "grd" Geosoft grid file """ self._set_file_meta(type.encode(), name.encode(), file.encode()) def set_meta(self, meta): """ Set the document's meta :param meta: `GXMETA <geosoft.gxapi.GXMETA>` to add to the document's meta :type meta: GXMETA .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_meta(meta) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXTB.rst .. _GXTB: GXTB class ================================== .. autoclass:: geosoft.gxapi.GXTB :members: .. _TB_SEARCH: TB_SEARCH constants ----------------------------------------------------------------------- `GXTB <geosoft.gxapi.GXTB>` Searching mode .. autodata:: geosoft.gxapi.TB_SEARCH_BINARY :annotation: .. autoattribute:: geosoft.gxapi.TB_SEARCH_BINARY .. autodata:: geosoft.gxapi.TB_SEARCH_LINEAR :annotation: .. autoattribute:: geosoft.gxapi.TB_SEARCH_LINEAR .. _TB_SORT: TB_SORT constants ----------------------------------------------------------------------- `GXTB <geosoft.gxapi.GXTB>` Sorting mode .. autodata:: geosoft.gxapi.TB_SORT_UNIQUE :annotation: .. autoattribute:: geosoft.gxapi.TB_SORT_UNIQUE .. autodata:: geosoft.gxapi.TB_SORT_ALLOW_DUPLICATES :annotation: .. autoattribute:: geosoft.gxapi.TB_SORT_ALLOW_DUPLICATES <file_sep>/geosoft/gxapi/GXSEGYREADER.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXIPJ import GXIPJ from .GXLTB import GXLTB from .GXVV import GXVV ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXSEGYREADER(gxapi_cy.WrapSEGYREADER): """ GXSEGYREADER class. Convert 3D SEG Y files to voxel or database. """ def __init__(self, handle=0): super(GXSEGYREADER, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXSEGYREADER <geosoft.gxapi.GXSEGYREADER>` :returns: A null `GXSEGYREADER <geosoft.gxapi.GXSEGYREADER>` :rtype: GXSEGYREADER """ return GXSEGYREADER() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def open_file(cls, filename): """ Opens a 3D SEG Y file. :param filename: File name :type filename: str :returns: `GXSEGYREADER <geosoft.gxapi.GXSEGYREADER>` handle, terminates if creation fails. :rtype: GXSEGYREADER .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapSEGYREADER._open_file(GXContext._get_tls_geo(), filename.encode()) return GXSEGYREADER(ret_val) def set_is_3d(self, is_3d): """ Specify if the input SEG-Y file is 3D or 2D. :param is_3d: True is file is 3D, false for 2D. :type is_3d: bool .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_is_3d(is_3d) def get_is_3d(self): """ Returns true if the file is 3D false if it is 2D. :rtype: bool .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._get_is_3d() return ret_val def get_endianess(self): """ Returns true if the file is little endian. false if it is big endian. :rtype: bool .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._get_endianess() return ret_val def set_endianess(self, is_little_endian): """ Set the endianess of the file. :param is_little_endian: True is file is little endian, false for big endian. :type is_little_endian: bool .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_endianess(is_little_endian) def get_xy_units(self, xy_units): """ Get the currently-specified xy-units. :param xy_units: The name of the units. :type xy_units: str_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ xy_units.value = self._get_xy_units(xy_units.value.encode()) def set_z_type(self, z_type): """ Specify if the z-dimension is time or depth. :param z_type: Either "DEPTH" or "TIME". :type z_type: str .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_z_type(z_type.encode()) def get_z_type(self, z_type): """ Indicate if the z-dimension is time or depth. :param z_type: Either "DEPTH" or "TIME". :type z_type: str_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ z_type.value = self._get_z_type(z_type.value.encode()) def get_possible_z_units(self, z_units_list): """ Get a list of the possible values that can be passed to `SetZUnits()`. The values returned by this function depend on what the z-type is set to. :param z_units_list: List of possible z-units,separated by newlines :type z_units_list: str_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ z_units_list.value = self._get_possible_z_units(z_units_list.value.encode()) def get_z_units(self, z_units): """ Get the currently-specified z-units. :param z_units: The name of the z-units. :type z_units: str_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ z_units.value = self._get_z_units(z_units.value.encode()) def set_z_units(self, z_units): """ Set the z-units. :param z_units: The Z units. Must be one of the strings returned by `GetPossibleZUnits()`. :type z_units: str .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_z_units(z_units.encode()) def get_z_offset_units(self, z_units): """ Get the currently-specified units for the z offset. :param z_units: List of possible z-units,separated by newlines :type z_units: str_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ z_units.value = self._get_z_offset_units(z_units.value.encode()) def set_z_offset_units(self, z_units): """ Set the units that the z-offset is in. :param z_units: The Z units. Must be one of the strings returned by `GetPossibleZUnits()`. :type z_units: str .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_z_offset_units(z_units.encode()) def get_last_sample_at(self, value): """ Returns the depth of the last sample in the traces, in the units specified by `SetZUnits()` :param value: Out-parameter containing the depth of the last sample in the traces. :type value: float_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ value.value = self._get_last_sample_at(value.value) @classmethod def list_binary_header_fields(cls): """ Returns the names and offsets of the fields in the binary header. :rtype: GXLTB .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapSEGYREADER._list_binary_header_fields(GXContext._get_tls_geo()) return GXLTB(ret_val) @classmethod def list_trace_header_fields(cls): """ Returns the names and offsets of the fields in the trace header. :rtype: GXLTB .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapSEGYREADER._list_trace_header_fields(GXContext._get_tls_geo()) return GXLTB(ret_val) def get_text_header(self, text): """ Get the SEG Y file's text header. :param text: The text header contents. :type text: str_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ text.value = self._get_text_header(text.value.encode()) def get_binary_header(self): """ Get the SEG Y file's binary header. :returns: :class:'LTB' containing three columns: the field name, offset, and value. The value column will be in text form, suitable for display to the end user. :rtype: GXLTB .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._get_binary_header() return GXLTB(ret_val) def get_trace_data_at(self, data_type_name, number_of_samples, current_trace, is_big_endian): """ Get the SEG Y trace file data for a particular data type, number of samples, and starting trace :param data_type_name: The name of a supported data type. :param number_of_samples: The number of samples to return :param current_trace: The trace to start at :param is_big_endian: 1 for big endian, 0 for small :type data_type_name: str :type number_of_samples: int :type current_trace: int :type is_big_endian: int :returns: :class:'VV' containing the data from the traces :rtype: GXVV .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._get_trace_data_at(data_type_name.encode(), number_of_samples, current_trace, is_big_endian) return GXVV(ret_val) def get_trace_header_at(self, data_type_name, number_of_samples, current_trace, is_big_endian): """ Get the SEG Y trace file header data for a particular starting trace :param data_type_name: The name of a supported data type. :param number_of_samples: The number of samples to return :param current_trace: The trace to start at :param is_big_endian: 1 for big endian, 0 for small :type data_type_name: str :type number_of_samples: int :type current_trace: int :type is_big_endian: int :returns: :class:'VV' containing the data from the trace header :rtype: GXVV .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._get_trace_header_at(data_type_name.encode(), number_of_samples, current_trace, is_big_endian) return GXVV(ret_val) def estimate_number_of_traces(self, data_type_name, number_of_samples): """ Get the number of traces that would be in the SEG-Y file, given a trace length and data type. :param data_type_name: The name of a supported data type. :param number_of_samples: The number of samples to return :type data_type_name: str :type number_of_samples: int :rtype: int .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._estimate_number_of_traces(data_type_name.encode(), number_of_samples) return ret_val @classmethod def get_num_trace_data_types(cls): """ Returns the number of supported trace data types. :rtype: int .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapSEGYREADER._get_num_trace_data_types(GXContext._get_tls_geo()) return ret_val @classmethod def get_trace_data_type_name(cls, index, name): """ Get the name of one of the available data types. These are the names used as identifiers in this API. To get a name suitable for displaying to the user, use `GetTraceDataTypeDisplayName() instead. :param index: Which name to return. Call this function for each value between 0 and the value returned by `GetNumTraceTypes()` to get a full list of the available types. :param name: The name of a supported data type. :type index: int :type name: str_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ name.value = gxapi_cy.WrapSEGYREADER._get_trace_data_type_name(GXContext._get_tls_geo(), index, name.value.encode()) @classmethod def get_trace_data_type_display_name(cls, index, name): """ Get a string, suitable for displaying to the user, describing the type returned by passing the same `index` value to `GetTraceDataTypeName()` :param index: Which name to return. :param name: The display name of a supported data type. :type index: int :type name: str_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ name.value = gxapi_cy.WrapSEGYREADER._get_trace_data_type_display_name(GXContext._get_tls_geo(), index, name.value.encode()) def get_trace_data_type(self, name): """ Get the data type of the trace data. This will match one of the names rfeturned by `GetTraceDataTypeName()` :param name: The name of data type. :type name: str_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ name.value = self._get_trace_data_type(name.value.encode()) def set_trace_data_type(self, name): """ Set the data type of the trace data. This must match one of the names returned by `GetTraceDataTypeName()` :param name: The name of data type. :type name: str .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_trace_data_type(name.encode()) def get_trace_length_configuration(self, which_header, location): """ Specifies where the trace length comes from: can be a field in the binary file header, a field in the trace header, or a value specified by the user. :param which_header: Out-parameter indicating which header contains the trace length: "file_header" "trace_header", or "none" :param location: Out-parameter containing the byte offset of the field within the header (if applicable) :type which_header: str_ref :type location: int_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ which_header.value, location.value = self._get_trace_length_configuration(which_header.value.encode(), location.value) def set_trace_length_configuration(self, which_header, location_or_value): """ Specifies where the trace length comes from: can be a field in the binary file header, a field in the trace header, or a value specified by the user. :param which_header: Which header contains the trace length: "file_header", "trace_header" or "none" :param location_or_value: If `which_header` is "file_header" or "trace_header", then this parameter is the offset of the field containing the trace length. If `which_header` is "none", then this parameter is the actual trace length. :type which_header: str :type location_or_value: int .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_trace_length_configuration(which_header.encode(), location_or_value) def get_trace_length(self): """ Returns the number of data samples per trace. :rtype: int .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._get_trace_length() return ret_val def get_sample_interval_configuration(self, which_header, location): """ Specifies where the sample interval comes from: can be a field in the binary file header, a field in the trace header, or a value specified by the user. :param which_header: Out-parameter indicating which header contains the trace length: "file_header" "trace_header", or "none" :param location: Out-parameter containing the byte offset of the field within the header (if applicable) :type which_header: str_ref :type location: int_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ which_header.value, location.value = self._get_sample_interval_configuration(which_header.value.encode(), location.value) def set_sample_interval_configuration(self, which_header, location, location_or_value): """ Specifies where the sample interval comes from: can be a field in the binary file header, a field in the trace header, or a value specified by the user. :param which_header: Which header contains the sample interval: "file_header", "trace_header" or "none" :param location: If `which_header` is "file_header" or "trace_header", then this parameter is the offset of the field containing the sample interval. If `which_header` is "none", thenthis parameter is ignored. :param location_or_value: If `which_header` is "file_header" or "trace_header", then this parameter is ignored. If `which_header` is "none", thenthis parameter is the sample interval. :type which_header: str :type location: int :type location_or_value: float .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_sample_interval_configuration(which_header.encode(), location, location_or_value) def get_sample_interval(self, sample_interval): """ Returns the sample interval of the trace data. :param sample_interval: output parameter for sample interval :type sample_interval: float_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ sample_interval.value = self._get_sample_interval(sample_interval.value) def get_z_offset_configuration(self, which_header, location): """ Specifies where the z-offset (time delay) comes from: can be a field in the binary file header, a field in the trace header, or a value specified by the user. :param which_header: Out-parameter indicating which header contains the z offset: "file_header" "trace_header", or "none" :param location: Out-parameter containing the byte offset of the field within the header (if applicable) :type which_header: str_ref :type location: int_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ which_header.value, location.value = self._get_z_offset_configuration(which_header.value.encode(), location.value) def set_z_offset_configuration(self, which_header, location, value): """ Specifies where the z-offset (time delay) comes from: can be a field in the binary file header, a field in the trace header, or a value specified by the user. :param which_header: Which header contains the z offset: "file_header", "trace_header" or "none" :param location: If `which_header` is "file_header" or "trace_header", then this parameter is the offset of the field. If `which_header` is "none", then this parameter is ignored. :param value: If `which_header` is "file_header" or "trace_header", then this parameter is ignored. If `which_header` is "none", then this parameter is the z offset. :type which_header: str :type location: int :type value: float .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_z_offset_configuration(which_header.encode(), location, value) def get_z_offset(self, z_offset): """ Returns the z-offset (time delay) of the trace data. Positive values correspond to a deeper top-of-trace; negative values to a higher top-of-trace. :param z_offset: The z offset/time delay :type z_offset: float_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ z_offset.value = self._get_z_offset(z_offset.value) def scan_file(self): """ Scans the SEG Y file, and attempts to guess the layout. .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._scan_file() def get_field_configuration(self): """ Returns information on the data in the trace headers. :returns: :class:'LTB' containing trace header information :rtype: GXLTB .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._get_field_configuration() return GXLTB(ret_val) def set_field_configuration(self, configuration): """ Sets the interpretation of the fields in the SEG Y file, and specifies which fields should be exported to GDB. :param configuration: `GXLTB <geosoft.gxapi.GXLTB>` following the same format as returned by `GetFieldConfiguration()`. :type configuration: GXLTB .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_field_configuration(configuration) def get_trace_count(self, count): """ Get the number of traces in the SEG Y file :param count: Trace count :type count: int_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ count.value = self._get_trace_count(count.value) def check_sane_inline_crossline(self, is_sane, possibly_swapped, only_one_line): """ Checks if the currently-configured inline and crossline fields seem sensible. :param is_sane: True is inline/crossline values seem sensible. :param possibly_swapped: True if it looks like the inline and crossline fields are swapped. :param only_one_line: True if it looks like the file only contains one line. This may mean the file is 2D. :type is_sane: bool_ref :type possibly_swapped: bool_ref :type only_one_line: bool_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ is_sane.value, possibly_swapped.value, only_one_line.value = self._check_sane_inline_crossline(is_sane.value, possibly_swapped.value, only_one_line.value) def get_voxel_dimensions(self, x, y, z): """ Get the size of the voxel that would be exported with the current configuration. :param x: Voxel size along X :param y: Voxel size along Y :param z: Voxel size along Z :type x: int_ref :type y: int_ref :type z: int_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ x.value, y.value, z.value = self._get_voxel_dimensions(x.value, y.value, z.value) def get_voxel_cell_size(self, x, y, z): """ Get the cell size of the voxel that would be exported with the current configuration. :param x: Voxel cell size along X :param y: Voxel cell size along Y :param z: Voxel cell size along Z :type x: float_ref :type y: float_ref :type z: float_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ x.value, y.value, z.value = self._get_voxel_cell_size(x.value, y.value, z.value) def set_auto_voxel_cell_size_xy(self): """ Set the XY cell size of the voxel that would be exported to the dimensions calculated from the tie points.. .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_auto_voxel_cell_size_xy() def set_user_voxel_cell_size_xy(self, x, y): """ Set the XY cell size of the voxel that would be exported with the current configuration. :param x: Voxel cell size along X :param y: Voxel cell size along Y :type x: float :type y: float .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_user_voxel_cell_size_xy(x, y) def get_georeferencing(self): """ Returns the georeferencing of the voxel that would be exported with the current configuration. :rtype: GXIPJ .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._get_georeferencing() return GXIPJ(ret_val) def set_georeferencing(self, ipj): """ Sets the georeferencing of the voxel that would be exported with the current configuration. :type ipj: GXIPJ .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_georeferencing(ipj) def get_tie_point(self, i, x, y, in_line, cross_line): """ Return the currently-active tie points. If SetTiePoints() has not already been called, then the returned points will be the automatically-selected ones. :param i: Tie point indexl must be 0, 1 or 2. :param x: x-coordinate :param y: inline-coordinate :param in_line: Tie point index. :param cross_line: crossline-coordinate :type i: int :type x: float_ref :type y: float_ref :type in_line: int_ref :type cross_line: int_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ x.value, y.value, in_line.value, cross_line.value = self._get_tie_point(i, x.value, y.value, in_line.value, cross_line.value) def set_tie_point(self, i, x, y, in_line, cross_line): """ Set the currently-active tie points. If SetTiePoints() has not already been called, then the returned points will be the automatically-selected ones. :param i: Tie point index. :param x: x-coordinate :param y: inline-coordinate :param in_line: Tie point index. :param cross_line: crossline-coordinate :type i: int :type x: float :type y: float :type in_line: int :type cross_line: int .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_tie_point(i, x, y, in_line, cross_line) def recalculate_georeferencing(self): """ Recalculate georeferencing; call after configuration has changed. .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._recalculate_georeferencing() def reset_tie_points(self): """ Discard user-supplied tie points and auto-choose new ones.. .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._reset_tie_points() def get_inline_and_crossline_azimuths(self, inline_azimuth, crossline_azimuth): """ Get the inline and crossline azimuths, in degrees :param inline_azimuth: Inline azimuth :param crossline_azimuth: Crossline azimuth :type inline_azimuth: float_ref :type crossline_azimuth: float_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ inline_azimuth.value, crossline_azimuth.value = self._get_inline_and_crossline_azimuths(inline_azimuth.value, crossline_azimuth.value) def export_voxel_and_database(self, voxel_filename, gdb_filename): """ Exports contents of SEG Y file to voxel and/or database. :param voxel_filename: Output voxel file name :param gdb_filename: Output database file name :type voxel_filename: str :type gdb_filename: str .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._export_voxel_and_database(voxel_filename.encode(), gdb_filename.encode()) def set_gdb_output_filename(self, filename): """ Exports contents of SEG Y file to a database. :param filename: Output database file name :type filename: str .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_gdb_output_filename(filename.encode()) def set_voxel_output_filename(self, filename): """ Exports contents of SEG Y file to voxel. :param filename: Output voxel file name :type filename: str .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_voxel_output_filename(filename.encode()) def set_section_output_filename(self, filename): """ Exports contents of SEG Y file to a crooked section. :param filename: Output crooked section file name :type filename: str .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_section_output_filename(filename.encode()) def set_slice_output_prefix(self, prefix): """ Exports inline or crossline slices to a section grid. :param prefix: Output section file name prefix :type prefix: str .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_slice_output_prefix(prefix.encode()) def set_inline_slice_indices(self, indices): """ Which inline slices to export to a section grid. :param indices: Which inline values to export. :type indices: GXVV .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_inline_slice_indices(indices) def set_crossline_slice_indices(self, indices): """ Which crossline slices to export to a section grid. :param indices: Which crossline values to export. :type indices: GXVV .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_crossline_slice_indices(indices) def set_z_slice_indices(self, indices): """ Which z slices to export to a section grid. :param indices: Which z values to export. :type indices: GXVV .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_z_slice_indices(indices) def get_slice_filenames(self, filename_list): """ Returns a list of the filenames of the XY slices that will be exported. :param filename_list: List of filenames,separated by newlines :type filename_list: str_ref .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ filename_list.value = self._get_slice_filenames(filename_list.value.encode()) def set_z_decimation(self, factor): """ Sets Z decimation factor. :param factor: Decimation factor :type factor: int .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_z_decimation(factor) def set_user_z_range(self, z_min, z_max): """ Sets Z-range to clamp to. :param z_min: Z minimum :param z_max: Z maximum :type z_min: float :type z_max: float .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_user_z_range(z_min, z_max) def clear_user_z_range(self): """ Clears Z-range to clamp to, disbling z-clamping. .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._clear_user_z_range() def set_user_inline_range(self, inline_min, inline_max, inline_stride, factor): """ Sets inline-range to clamp to. :param inline_min: inline minimum :param inline_max: inline maximum :param inline_stride: inline stride :param factor: Decimation factor :type inline_min: int :type inline_max: int :type inline_stride: int :type factor: int .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_user_inline_range(inline_min, inline_max, inline_stride, factor) def set_user_crossline_range(self, crossline_min, crossline_max, crossline_stride, factor): """ Sets crossline-range to clamp to. :param crossline_min: crossline minimum :param crossline_max: crossline maximum :param crossline_stride: crossline stride :param factor: Decimation factor :type crossline_min: int :type crossline_max: int :type crossline_stride: int :type factor: int .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_user_crossline_range(crossline_min, crossline_max, crossline_stride, factor) def clear_user_range(self): """ Clears inline and crossline ranges to clamp to. .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._clear_user_range() def set_trace_dummy_value(self, dummy_value): """ Sets the trace dummy value. :param dummy_value: trace dummy value :type dummy_value: float .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_trace_dummy_value(dummy_value) def clear_trace_dummy_value(self): """ Disables the trace dummy value. .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._clear_trace_dummy_value() def get_clip_xy_extents(self, is_clip_valid, x_min, x_max, y_min, y_max): """ Gets the X,Y extents to clip the voxel. :param is_clip_valid: is clip area valid :param x_min: minimum X value :param x_max: maximum X value :param y_min: minimum Y value :param y_max: maximum Y value :type is_clip_valid: int_ref :type x_min: float_ref :type x_max: float_ref :type y_min: float_ref :type y_max: float_ref .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ is_clip_valid.value, x_min.value, x_max.value, y_min.value, y_max.value = self._get_clip_xy_extents(is_clip_valid.value, x_min.value, x_max.value, y_min.value, y_max.value) def add_trace_filter(self, offset, value1, value2, constraint): """ Add a filter based on trace header fields. :param offset: byte-offset of the field to filter by. :param value1: If `constraint` is EQUAL and NOT_EQUAL, this is the value to compare the field to. For IN_RANGE and NOT_IN_RANGE, this is the minimum. :param value2: If `constraint` is EQUAL and NOT_EQUAL, this parameter is ignored For IN_RANGE and NOT_IN_RANGE, this is the maximum. :param constraint: How the values are compared. Can be "IN_RANGE", "NOT_IN_RANGE", "EQUAL" or "NOT_EQUAL". :type offset: int :type value1: int :type value2: int :type constraint: str .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._add_trace_filter(offset, value1, value2, constraint.encode()) def clear_trace_filters(self): """ Remove all active trace filters. .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._clear_trace_filters() def count_traces_that_pass_filters(self): """ Count the number of traces that pass the currently-configured trace filters. :rtype: int .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._count_traces_that_pass_filters() return ret_val def override_navigation_2d(self, shotpoint_coords, x_coordinates, y_coordinates): """ Specify the X/Y coordinates of the traces, instead of using values from the trace headers. :param shotpoint_coords: The shotpoint coordinates, asread from the trace headers. :param x_coordinates: The x-coordinates :param y_coordinates: The y-coordinates :type shotpoint_coords: GXVV :type x_coordinates: GXVV :type y_coordinates: GXVV .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._override_navigation_2d(shotpoint_coords, x_coordinates, y_coordinates) def export_files(self): """ Exports contents of SEG Y file to voxel and/or database. .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._export_files() def get_trace_header_as_json(self, trace_number, text): """ Return the contents of a trace header as JSON. :param trace_number: Which trace to read :param text: Trace header as JSON. :type trace_number: int :type text: str_ref .. versionadded:: 9.10 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ text.value = self._get_trace_header_as_json(trace_number, text.value.encode()) def get_trace_data(self, trace_number): """ Get the SEG Y trace file header data for a particular starting trace :param trace_number: Which trace to read :type trace_number: int :returns: :class:'VV' containing the data from the trace header :rtype: GXVV .. versionadded:: 9.10 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._get_trace_data(trace_number) return GXVV(ret_val) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXMATH.rst .. _GXMATH: GXMATH class ================================== .. autoclass:: geosoft.gxapi.GXMATH :members: <file_sep>/geosoft/gxapi/GXLPT.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXLPT(gxapi_cy.WrapLPT): """ GXLPT class. This class allows access to the current default line patterns. It does not allow the definition of individual patterns. It is is used primarily with `GXMAP <geosoft.gxapi.GXMAP>` class functions. """ def __init__(self, handle=0): super(GXLPT, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXLPT <geosoft.gxapi.GXLPT>` :returns: A null `GXLPT <geosoft.gxapi.GXLPT>` :rtype: GXLPT """ return GXLPT() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls): """ Creates a line pattern object with current default patterns. :returns: `GXLPT <geosoft.gxapi.GXLPT>` Object :rtype: GXLPT .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapLPT._create(GXContext._get_tls_geo()) return GXLPT(ret_val) def get_lst(self, lst): """ Copies all pattern names into a `GXLST <geosoft.gxapi.GXLST>` object. :param lst: `GXLST <geosoft.gxapi.GXLST>` Handle :type lst: GXLST .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_lst(lst) def get_standard_lst(self, lst): """ Copies the six standard line types into a `GXLST <geosoft.gxapi.GXLST>` object. :param lst: `GXLST <geosoft.gxapi.GXLST>` Handle :type lst: GXLST .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The six standard line types are "solid", "long dash", "dotted", "short dash", "long, short dash" and "dash dot". """ self._get_standard_lst(lst) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXBIGRID.rst .. _GXBIGRID: GXBIGRID class ================================== .. autoclass:: geosoft.gxapi.GXBIGRID :members: <file_sep>/geosoft/gxapi/GXMVU.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXMVU(gxapi_cy.WrapMVU): """ GXMVU class. A catchall library for methods using the `GXMAP <geosoft.gxapi.GXMAP>` and `GXMVIEW <geosoft.gxapi.GXMVIEW>` classes. These include drawing flight paths, legends, postings, and special objects such as histograms and bar charts. """ def __init__(self, handle=0): super(GXMVU, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXMVU <geosoft.gxapi.GXMVU>` :returns: A null `GXMVU <geosoft.gxapi.GXMVU>` :rtype: GXMVU """ return GXMVU() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def arrow(cls, mview, hx, hy, tx, ty, ratio, angle, type): """ Draw an arrow. :param mview: View :param hx: Head X location :param hy: Head Y location :param tx: Tail X location :param ty: Tail Y location :param ratio: See :ref:`MVU_ARROW` definitions for explanation :param angle: Angle of barbs with respect to the tail in degrees. :param type: :ref:`MVU_ARROW` :type mview: GXMVIEW :type hx: float :type hy: float :type tx: float :type ty: float :type ratio: float :type angle: float :type type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._arrow(GXContext._get_tls_geo(), mview, hx, hy, tx, ty, ratio, angle, type) @classmethod def arrow_vector_vv(cls, mview, vv_x, vv_y, vv_dx, vv_dy, scale, pos, size, style, point, thickness): """ Draw arrow vectors based on input VVs. :param mview: View :param vv_x: X locations :param vv_y: Y locations :param vv_dx: X Vector value (can be negative) :param vv_dy: Y Vector value (can be negative) :param scale: Scaling (units/mm) :param pos: :ref:`MVU_VPOS` :param size: :ref:`MVU_VSIZE` :param style: :ref:`MVU_VSTYLE` :param point: :ref:`MVU_VPOINT` :param thickness: Line thickness (can be Dummy) :type mview: GXMVIEW :type vv_x: GXVV :type vv_y: GXVV :type vv_dx: GXVV :type vv_dy: GXVV :type scale: float :type pos: int :type size: int :type style: int :type point: int :type thickness: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The locations are given in two VVs, and the directions in the two others. A wide range of sizes are available. If the scaling is set to `rDUMMY <geosoft.gxapi.rDUMMY>`, then arrows are automatically scaled so the largest is 1cm in length. If the line thickness is set to `rDUMMY <geosoft.gxapi.rDUMMY>`, the line thickness scales with the arrow size, and is 1/20 of the vector length. """ gxapi_cy.WrapMVU._arrow_vector_vv(GXContext._get_tls_geo(), mview, vv_x, vv_y, vv_dx, vv_dy, scale, pos, size, style, point, thickness) @classmethod def bar_chart(cls, mview, group_name, data, line, x_chan, list, x_title, x_txt_size, y_title, y_txt_size, bar_title, bar_txt_size, bar_width, dist_fid, label, tick, right_axis, top_axis, bottom_axis, surround, left, bottom, right, top, xm, ym, widthm, heightm): """ Plot bar chart on a map. :param mview: View :param group_name: Group name :param data: Database handle :param line: Line handle :param x_chan: Horizontal (X) axis' channel name :param list: List of channel names (comma separated) :param x_title: X axis title :param x_txt_size: Text size for X axis :param y_title: Y axis title :param y_txt_size: Text size for Y axis :param bar_title: Overall chart title :param bar_txt_size: Text size for overall title :param bar_width: Bar width in mm :param dist_fid: Distance based (1) or fiducial based (0) :param label: :ref:`BARCHART_LABEL` :param tick: Draw ticks along X axis (1) or not (0) :param right_axis: Draw right vertical axis (1) or not :param top_axis: Draw top horizontal axis (1) :param bottom_axis: Draw bottom horizontal axis (1) or not :param surround: Draw surronding box (1) or not (0) The following 4 parameters are required if drawing the surronding box :param left: Width in mm between left Y axis of bar chart with left surronding line :param bottom: Width in mm between bottom X axis of bar chart with bottom surronding line :param right: Width in mm between right Y axis of bar chart with right surronding line :param top: Width in mm between top X axis of bar chart with top surronding line :param xm: X in mm (bottom left corner of bar chart) :param ym: Y in mm (bottom left corner of bar chart) :param widthm: Width of the bar chart in mm :param heightm: Height of the bar chart in mm :type mview: GXMVIEW :type group_name: str :type data: GXDB :type line: int :type x_chan: str :type list: str :type x_title: str :type x_txt_size: float :type y_title: str :type y_txt_size: float :type bar_title: str :type bar_txt_size: float :type bar_width: float :type dist_fid: int :type label: int :type tick: int :type right_axis: int :type top_axis: int :type bottom_axis: int :type surround: int :type left: float :type bottom: float :type right: float :type top: float :type xm: float :type ym: float :type widthm: float :type heightm: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._bar_chart(GXContext._get_tls_geo(), mview, group_name.encode(), data, line, x_chan.encode(), list.encode(), x_title.encode(), x_txt_size, y_title.encode(), y_txt_size, bar_title.encode(), bar_txt_size, bar_width, dist_fid, label, tick, right_axis, top_axis, bottom_axis, surround, left, bottom, right, top, xm, ym, widthm, heightm) @classmethod def cdi_pixel_plot(cls, mview, group, data_va, elev_va, xvv, itr): """ Create a color pixel-style plot of CDI data. :param mview: View :param group: Name of the group to create :param data_va: Data [lNR x lNC] :param elev_va: Elevations (Y) [lNR x lNC] :param xvv: Position (X) [lNC] :param itr: Data color transform :type mview: GXMVIEW :type group: str :type data_va: GXVA :type elev_va: GXVA :type xvv: GXVV :type itr: GXITR .. versionadded:: 7.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Draws a single colored rectangle for each data point in Conductivity-Depth data (for example). It is similar to the result you get if you plot a grid with Pixel=1, but in this data the row and column widths are not necessarily constant, and the data can move up and down with topography. The pixels are sized so that the boundaries are half-way between adjacent data, both vertically and horizontally. """ gxapi_cy.WrapMVU._cdi_pixel_plot(GXContext._get_tls_geo(), mview, group.encode(), data_va, elev_va, xvv, itr) @classmethod def cdi_pixel_plot_3d(cls, mview, group, data_va, elev_va, xvv, yvv, itr): """ Create a color pixel-style plot of CDI data in a 3D view. :param mview: View :param group: Name of the group to create :param data_va: Data [lNR x lNC] :param elev_va: Elevations (Z) [lNR x lNC] :param xvv: Position (X) [lNC] :param yvv: Position (Y) [lNC] :param itr: Data color transform :type mview: GXMVIEW :type group: str :type data_va: GXVA :type elev_va: GXVA :type xvv: GXVV :type yvv: GXVV :type itr: GXITR .. versionadded:: 7.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Similar to `cdi_pixel_plot <geosoft.gxapi.GXMVU.cdi_pixel_plot>`, but plotted onto a series of plotting planes which hang from the XY path in 3D. Each vertical plane azimuth is defined by two adjacent points on the path. The color "pixel" for each data point is plotted in two halves, with each half on adjacent plotting planes, with the bend at the data point. """ gxapi_cy.WrapMVU._cdi_pixel_plot_3d(GXContext._get_tls_geo(), mview, group.encode(), data_va, elev_va, xvv, yvv, itr) @classmethod def color_bar(cls, mview, itr, decimal, ann, height, width, x, y): """ Create a Color Bar in view :param mview: View :param itr: Itr :param decimal: Decimals :param ann: Annotation offset from box in mm. :param height: Box height :param width: Box width :param x: X location (bottom left corner of color boxes) :param y: Y location :type mview: GXMVIEW :type itr: GXITR :type decimal: int :type ann: float :type height: float :type width: float :type x: float :type y: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._color_bar(GXContext._get_tls_geo(), mview, itr, decimal, ann, height, width, x, y) @classmethod def color_bar2(cls, mview, itr, itr2, decimal, ann, height, width, x, y): """ Create a Color Bar from two `GXITR <geosoft.gxapi.GXITR>` :param mview: View :param itr2: Secondary `GXITR <geosoft.gxapi.GXITR>` :param decimal: Decimals :param ann: Annotation size :param height: Box height :param width: Box width :param x: X location (bottom left corner of color boxes) :param y: Y location :type mview: GXMVIEW :type itr: GXITR :type itr2: GXITR :type decimal: int :type ann: float :type height: float :type width: float :type x: float :type y: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The secondary `GXITR <geosoft.gxapi.GXITR>` is used to blend horizontally with the primary `GXITR <geosoft.gxapi.GXITR>` in each box. """ gxapi_cy.WrapMVU._color_bar2(GXContext._get_tls_geo(), mview, itr, itr2, decimal, ann, height, width, x, y) @classmethod def color_bar2_style(cls, mview, itr, itr2, decimal, ann, height, width, x, y, style): """ Create a Color Bar from two ITRs with style options :param mview: View :param itr2: Secondary `GXITR <geosoft.gxapi.GXITR>` :param decimal: Decimals :param ann: Annotation size :param height: Box height :param width: Box width :param x: X location (bottom left corner of color boxes) :param y: Y location :param style: :ref:`COLORBAR_STYLE` :type mview: GXMVIEW :type itr: GXITR :type itr2: GXITR :type decimal: int :type ann: float :type height: float :type width: float :type x: float :type y: float :type style: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The secondary `GXITR <geosoft.gxapi.GXITR>` is used to blend horizontally with the primary `GXITR <geosoft.gxapi.GXITR>` in each box. """ gxapi_cy.WrapMVU._color_bar2_style(GXContext._get_tls_geo(), mview, itr, itr2, decimal, ann, height, width, x, y, style) @classmethod def color_bar_hor(cls, mview, itr, decimal, ann, width, height, x, y, label_orient): """ Create a horizontal color bar in view :param mview: View :param itr: Itr :param decimal: Decimals :param ann: Annotation offset from box in mm (negative for labels below). :param width: Box width in mm :param height: Box height in mm :param x: X location (bottom left corner of color boxes) in mm :param y: Y location in mm :param label_orient: :ref:`COLORBAR_LABEL` :type mview: GXMVIEW :type itr: GXITR :type decimal: int :type ann: float :type width: float :type height: float :type x: float :type y: float :type label_orient: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The sign of the annotation offset determines whether labels are plotted above or below the colorbar. Labels above are text-justified to the bottom of the text, and labels below are text-justified to the top of the text. .. seealso:: `color_bar <geosoft.gxapi.GXMVU.color_bar>` """ gxapi_cy.WrapMVU._color_bar_hor(GXContext._get_tls_geo(), mview, itr, decimal, ann, width, height, x, y, label_orient) @classmethod def color_bar_hor2(cls, mview, itr, itr2, decimal, ann, height, width, x, y, label_orient): """ Create a Horizontal Color Bar from two ITRs :param mview: View :param itr2: Secondary `GXITR <geosoft.gxapi.GXITR>` :param decimal: Decimals :param ann: Annotation size :param height: Box height :param width: Box width :param x: X location (bottom left corner of color boxes) :param y: Y location :param label_orient: :ref:`COLORBAR_LABEL` :type mview: GXMVIEW :type itr: GXITR :type itr2: GXITR :type decimal: int :type ann: float :type height: float :type width: float :type x: float :type y: float :type label_orient: int .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The secondary `GXITR <geosoft.gxapi.GXITR>` is used to blend horizontally with the primary `GXITR <geosoft.gxapi.GXITR>` in each box. """ gxapi_cy.WrapMVU._color_bar_hor2(GXContext._get_tls_geo(), mview, itr, itr2, decimal, ann, height, width, x, y, label_orient) @classmethod def color_bar_hor2_style(cls, mview, itr, itr2, decimal, ann, height, width, x, y, style, label_orient): """ Create a Horizontal Color Bar from two ITRs with style options :param mview: View :param itr2: Secondary `GXITR <geosoft.gxapi.GXITR>` :param decimal: Decimals :param ann: Annotation size :param height: Box height :param width: Box width :param x: X location (bottom left corner of color boxes) :param y: Y location :param style: :ref:`COLORBAR_STYLE` :param label_orient: :ref:`COLORBAR_LABEL` :type mview: GXMVIEW :type itr: GXITR :type itr2: GXITR :type decimal: int :type ann: float :type height: float :type width: float :type x: float :type y: float :type style: int :type label_orient: int .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The secondary `GXITR <geosoft.gxapi.GXITR>` is used to blend horizontally with the primary `GXITR <geosoft.gxapi.GXITR>` in each box. """ gxapi_cy.WrapMVU._color_bar_hor2_style(GXContext._get_tls_geo(), mview, itr, itr2, decimal, ann, height, width, x, y, style, label_orient) @classmethod def color_bar_hor_style(cls, mview, itr, decimal, ann, height, width, x, y, style, label_orient): """ Create a Horizontal Color Bar in view with style options :param mview: View :param itr: Itr :param decimal: Decimals :param ann: Annotation size :param height: Box height :param width: Box width :param x: X location (bottom left corner of color boxes) :param y: Y location :param style: :ref:`COLORBAR_STYLE` :param label_orient: :ref:`COLORBAR_LABEL` :type mview: GXMVIEW :type itr: GXITR :type decimal: int :type ann: float :type height: float :type width: float :type x: float :type y: float :type style: int :type label_orient: int .. versionadded:: 5.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._color_bar_hor_style(GXContext._get_tls_geo(), mview, itr, decimal, ann, height, width, x, y, style, label_orient) @classmethod def color_bar_style(cls, mview, itr, decimal, ann, height, width, x, y, style): """ Create a Color Bar in view with style options :param mview: View :param itr: Itr :param decimal: Decimals :param ann: Annotation size :param height: Box height :param width: Box width :param x: X location (bottom left corner of color boxes) :param y: Y location :param style: :ref:`COLORBAR_STYLE` :type mview: GXMVIEW :type itr: GXITR :type decimal: int :type ann: float :type height: float :type width: float :type x: float :type y: float :type style: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._color_bar_style(GXContext._get_tls_geo(), mview, itr, decimal, ann, height, width, x, y, style) @classmethod def color_bar_reg(cls, mview, itr, itr2, reg): """ Create a Color Bar in view :param mview: View :param itr: Itr :param itr2: Optional 2nd Itr (can be null) :param reg: Parameters :type mview: GXMVIEW :type itr: GXITR :type itr2: GXITR :type reg: GXREG .. versionadded:: 8.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** To allow for expansion, all parameters are passed inside the `GXREG <geosoft.gxapi.GXREG>` object. BAR_ORIENTATION one of MVU_ORIENTATION_XXX (DEFAULT = `MVU_ORIENTATION_VERTICAL <geosoft.gxapi.MVU_ORIENTATION_VERTICAL>`) DECIMALS decimals in plotted values (see sFormatStr_GS for rules) (DEFAULT = 1) ANNOFF annotation offset from bar (+/- determines side of the bar left/right and below/above) BOX_SIZE box height (mm) (width for horizontal color bar) (DEFAULT = 4) BAR_WIDTH width (mm) (short dimension) of the color bar (DEFAULT = 8) MINIMUM_GAP Minimum space between annotations, otherwise drop annotations (DEFAULT = 0 mm) The actual height is over-estimated, so even with zero gap there will normally always be some space between labels. FIXED_INTERVAL Preset interval for annotations scale (DEFAULT = DUMMY, use color zones) FIXED_MINOR_INTERVAL Preset minor interval for annotations scale (DEFAULT = DUMMY, if defined must be 1/10, 1/5, 1/4 or 1/2 of FIXED_INTERVAL) X X location (REQUIRED) Y Y location (REQUIRED) POST_MAXMIN Post limit values at ends of the bar (0 or 1)? (DEFAULT = 0) DIVISION_STYLE One of MVU_DIVISION_STYLE_XXX (DEFAULT = `MVU_DIVISION_STYLE_LINES <geosoft.gxapi.MVU_DIVISION_STYLE_LINES>`) """ gxapi_cy.WrapMVU._color_bar_reg(GXContext._get_tls_geo(), mview, itr, itr2, reg) @classmethod def color_bar_reg_ex(cls, mview, st_hist, itr, itr2, reg, vv): """ Create a Color Bar in view :param mview: View :param st_hist: `GXST <geosoft.gxapi.GXST>` with histogram info of original :param itr: Itr :param itr2: Optional 2nd Itr (can be null) :param reg: Parameters :param vv: Ranges :type mview: GXMVIEW :type st_hist: GXST :type itr: GXITR :type itr2: GXITR :type reg: GXREG :type vv: GXVV .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See `color_bar_reg <geosoft.gxapi.GXMVU.color_bar_reg>`. This is the same except for the additional Ranges VV parameter. The VV is not part of the REG object, it contains the interval values for the exact interval option from an ASCII file. .. seealso:: `color_bar_reg <geosoft.gxapi.GXMVU.color_bar_reg>` """ gxapi_cy.WrapMVU._color_bar_reg_ex(GXContext._get_tls_geo(), mview, st_hist, itr, itr2, reg, vv) @classmethod def contour(cls, mview, con, grid): """ Creates a contour map. :param mview: View :param con: Control file name :param grid: Grid file name :type mview: GXMVIEW :type con: str :type grid: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._contour(GXContext._get_tls_geo(), mview, con.encode(), grid.encode()) @classmethod def contour_ply(cls, mview, ply, con, grid): """ Creates a contour map with clipped areas. :param mview: View :param ply: Clipping `GXPLY <geosoft.gxapi.GXPLY>` :param con: Control file name :param grid: Grid file name :type mview: GXMVIEW :type ply: GXPLY :type con: str :type grid: str .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The clipping `GXPLY <geosoft.gxapi.GXPLY>` can include a surrounding inclusive polygon and zero, one or more interior exclusive polygons. Construct a `GXPLY <geosoft.gxapi.GXPLY>` object using the `GXPLY.add_polygon_ex <geosoft.gxapi.GXPLY.add_polygon_ex>` function, to add both inclusive (as the first `GXPLY <geosoft.gxapi.GXPLY>`) and exclusive interior regions. """ gxapi_cy.WrapMVU._contour_ply(GXContext._get_tls_geo(), mview, ply, con.encode(), grid.encode()) @classmethod def c_symb_legend(cls, mview, x1, y1, font_size, symb_scale, file, title, sub_title): """ Plot a legend for the classified color symbols. :param mview: `GXMVIEW <geosoft.gxapi.GXMVIEW>` object :param x1: Plot origin X :param y1: Plot origin Y :param font_size: Label Font size (mm) :param symb_scale: Symbol scale factor :param file: `GXAGG <geosoft.gxapi.GXAGG>`, `GXITR <geosoft.gxapi.GXITR>` or ZON file name :param title: Plot title :param sub_title: Plot subtitle :type mview: GXMVIEW :type x1: float :type y1: float :type font_size: float :type symb_scale: float :type file: str :type title: str :type sub_title: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the symbol size, color, font etc are specified in the `GXITR <geosoft.gxapi.GXITR>`'s `GXREG <geosoft.gxapi.GXREG>`, then the Symbol scale factor is used allow the user to adjust the symbol sizes. They will be plotted at a size equal to the size in the `GXREG <geosoft.gxapi.GXREG>` times the scale factor. If no symbol size info can be found in the `GXREG <geosoft.gxapi.GXREG>`, then the symbol size is set equal to the Label Font Size. If no symbol font or number info is included in the `GXREG <geosoft.gxapi.GXREG>`, it is the programmer's responsibility to select the correct font and symbol before CSymbLegend is called. The same is true of the edge color. """ gxapi_cy.WrapMVU._c_symb_legend(GXContext._get_tls_geo(), mview, x1, y1, font_size, symb_scale, file.encode(), title.encode(), sub_title.encode()) @classmethod def decay_curve(cls, mview, vv_x, vv_y, v_ay, v_ax, log, log_min, angle, x_bar, y_bar, x_off_set, y_off_set, width, height, x_min, y_min, x_scale, y_scale, line_pitch, line_style, line_color): """ Plot decay curves at survey locations :param mview: View :param vv_x: X coordinate `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y coordinate `GXVV <geosoft.gxapi.GXVV>` :param v_ay: `GXVA <geosoft.gxapi.GXVA>` channel to plot :param v_ax: `GXVA <geosoft.gxapi.GXVA>` channel as horizontal axis (normally time channel) :param log: Log option: 0 linear (default), 1 logarithm, 2 log/linear :param log_min: Min value to apply log (must be > 0.0) :param angle: Angle in degrees measured CCW from East of the map :param x_bar: Draw horizontal bar: 0 none, 1 bottom, 2 top, 3 both :param y_bar: Draw vertical bar: 0 none, 1 bottom, 2 top, 3 both :param x_off_set: X offset in mm: Horizontal distance between survey location and origin of the box inside which decay curvey is drawn :param y_off_set: Y offset in mm :param width: Box width in mm:Decay curve at each survey location is drawn within this box :param height: Box height in mm :param x_min: Minimum value for X (horizontal axis) :param y_min: Minimum value for Y (vertical axis) :param x_scale: X scale :param y_scale: Y scale :param line_pitch: Line pitch, default is 5.0mm :param line_style: Line style :param line_color: Line color :type mview: GXMVIEW :type vv_x: GXVV :type vv_y: GXVV :type v_ay: GXVA :type v_ax: GXVA :type log: int :type log_min: float :type angle: float :type x_bar: int :type y_bar: int :type x_off_set: float :type y_off_set: float :type width: float :type height: float :type x_min: float :type y_min: float :type x_scale: float :type y_scale: float :type line_pitch: float :type line_style: int :type line_color: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Box width and height are used to draw horizontal and vertical bars. Curves outside the box are not clipped. """ gxapi_cy.WrapMVU._decay_curve(GXContext._get_tls_geo(), mview, vv_x, vv_y, v_ay, v_ax, log, log_min, angle, x_bar, y_bar, x_off_set, y_off_set, width, height, x_min, y_min, x_scale, y_scale, line_pitch, line_style, line_color.encode()) @classmethod def direction_plot(cls, mview, vv_x, vv_y, size, loc, align): """ Plot an arrow to indicate the direction of a flight line :param mview: View :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` :param size: Arrow size in mm :param loc: Location to draw in mm - can be X or Y depending on next parameter :param align: :ref:`ARROW_ALIGNMENT` :type mview: GXMVIEW :type vv_x: GXVV :type vv_y: GXVV :type size: float :type loc: float :type align: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** An arrow will be drawn in the direction from the first valid to the last points in the X and Y VVs. """ gxapi_cy.WrapMVU._direction_plot(GXContext._get_tls_geo(), mview, vv_x, vv_y, size, loc, align) @classmethod def em_forward(cls, mview, xo, yo, size_x, size_y, coil_sep, coil_frequency, coil_configuration, r, h, i, q, rvv, hvv, ivv, qvv, lin_log, var): """ Plot an EM forward model against inverted data. :param mview: View :param xo: Plot X origin :param yo: Plot Y origin :param size_x: Plot X size :param size_y: Plot Y size :param coil_sep: Coil Separation (m) :param coil_frequency: Coil Frequency (Hz) :param coil_configuration: :ref:`EMLAY_GEOMETRY` :param r: Inverted or current resistivity :param h: Inverted or current height :param i: In-phase datum :param q: Quadrature datum :param rvv: Forward model resistivities :param hvv: Forward model heights :param ivv: Forward model In-phase (ppm) :param qvv: Forward model Quadrature (ppm) :param lin_log: Plot resistivity as linear (0) or log (1) :param var: Plot as function of resistivity (0) or height (1) :type mview: GXMVIEW :type xo: float :type yo: float :type size_x: float :type size_y: float :type coil_sep: float :type coil_frequency: float :type coil_configuration: int :type r: float :type h: float :type i: float :type q: float :type rvv: GXVV :type hvv: GXVV :type ivv: GXVV :type qvv: GXVV :type lin_log: int :type var: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This function is designed to display an inverted result beside the forward model curves. This is useful for trouble-shooting or understanding why a certain inversion result was obtained. The earth model is a simple halfspace. The forward model is plotted either as a function of resistivity at a single height, or as a function of height at a single resistivity. In either case, the relevant VVs must be completely filled (even if one is all the same value). """ gxapi_cy.WrapMVU._em_forward(GXContext._get_tls_geo(), mview, xo, yo, size_x, size_y, coil_sep, coil_frequency, coil_configuration, r, h, i, q, rvv, hvv, ivv, qvv, lin_log, var) @classmethod def export_datamine_string(cls, mview, lst, file): """ Export selected map groups in a map view to a Datamine coordinate string file. :param mview: View :param lst: `GXLST <geosoft.gxapi.GXLST>` with group names in the name part of the `GXLST <geosoft.gxapi.GXLST>`. :param file: Datamine string file (``*.dm``) to export to :type mview: GXMVIEW :type lst: GXLST :type file: str .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The lines, rectangles and polygons in the specified groups will be exported to a Datamine coordinate string (``*.dm``) file. The function attempts to duplicate the colors, etc. used. Complex polygon objects will be exported as independent single polygons. .. seealso:: `GXLST <geosoft.gxapi.GXLST>` class """ gxapi_cy.WrapMVU._export_datamine_string(GXContext._get_tls_geo(), mview, lst, file.encode()) @classmethod def export_dxf_3d(cls, mview, lst, wa): """ Export selected map groups in a map view to an AutoCAD 3D DXF file. :param mview: View :param lst: `GXLST <geosoft.gxapi.GXLST>` with group names in the name part of the `GXLST <geosoft.gxapi.GXLST>`. :param wa: DXF file to export :type mview: GXMVIEW :type lst: GXLST :type wa: GXWA .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Supported objects exported include lines, polygons, text. .. seealso:: `GXLST <geosoft.gxapi.GXLST>` class """ gxapi_cy.WrapMVU._export_dxf_3d(GXContext._get_tls_geo(), mview, lst, wa) @classmethod def export_surpac_str(cls, mview, lst, str_wa, styles_wa): """ Export selected map groups in a map view to a Surpac `GXSTR <geosoft.gxapi.GXSTR>` file. :param mview: View :param lst: `GXLST <geosoft.gxapi.GXLST>` with group names in the name part of the `GXLST <geosoft.gxapi.GXLST>`. :param str_wa: `GXSTR <geosoft.gxapi.GXSTR>` file to export to :param styles_wa: Styles file to export to :type mview: GXMVIEW :type lst: GXLST :type str_wa: GXWA :type styles_wa: GXWA .. versionadded:: 6.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The lines, rectangles and polygons in the specified groups will be exported to a Surpac `GXSTR <geosoft.gxapi.GXSTR>` file. An accompanying styles file will be created which will attempt to duplicate the colors, etc. used. Complex polygon objects will be exported as independent single polygons. .. seealso:: `GXLST <geosoft.gxapi.GXLST>` class """ gxapi_cy.WrapMVU._export_surpac_str(GXContext._get_tls_geo(), mview, lst, str_wa, styles_wa) @classmethod def export_map_groups_to_gdb(cls, mview, lst, db): """ Export map group(s) to database line(s). :param mview: View :param lst: `GXLST <geosoft.gxapi.GXLST>` with group names in the name part of the `GXLST <geosoft.gxapi.GXLST>`. :param db: Database :type mview: GXMVIEW :type lst: GXLST :type db: GXDB .. versionadded:: 9.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapMVU._export_map_groups_to_gdb(GXContext._get_tls_geo(), mview, lst, db) @classmethod def flight_plot(cls, mview, vv_x, vv_y, line, locate, vangle, up, loff, voff): """ Draw a flight line :param mview: View :param vv_x: X :param vv_y: Y :param line: Line label :param locate: :ref:`MVU_FLIGHT_LOCATE` :param vangle: Lines steeper than this angle are considered vertical and the up label direction is used. :param up: Up label direction: 1 up is right, -1 up is left :param loff: Along line label offset in mm. :param voff: Perpendicular label offset mm. :type mview: GXMVIEW :type vv_x: GXVV :type vv_y: GXVV :type line: str :type locate: int :type vangle: float :type up: int :type loff: float :type voff: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Current line color, thickness and style are used to draw the line. Current font, font color and font style are used to annotate the line labels. If current clipping is ON in the VIEW, lines will be clipped to the window before plotting. In this case, labels should be located ABOVE or BELOW the line traces to prevent labels being clipped. The offsets dOffA and dOffB control the vertical and horizontal label offsets with respect to the ends of the line trace and depending on the label location. The vertical line reference angle dVerAng is used to determine if lines are considered vertical or horizontal. Vertical lines use the sUp parameter to determine the label up direction. Normally, use an angle of 60 degrees unless there are lines that run in this direction. .. seealso:: `path_plot <geosoft.gxapi.GXMVU.path_plot>` """ gxapi_cy.WrapMVU._flight_plot(GXContext._get_tls_geo(), mview, vv_x, vv_y, line.encode(), locate, vangle, up, loff, voff) @classmethod def gen_areas(cls, mview, lines, col_vv, pat_vv, pitch): """ Generate areas from an line group. :param mview: View :param lines: Group with lines :param col_vv: Colors (color int) :param pat_vv: Patterns (int), must be same length at colors :param pitch: Pattern size :type mview: GXMVIEW :type lines: str :type col_vv: GXVV :type pat_vv: GXVV :type pitch: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The specified line group will be used to create a new group that is composed of all the resolved polygonal areas in the line group. Each polygonal area is assigned a color/pattern as specified in the color and pattern `GXVV <geosoft.gxapi.GXVV>`'s. Color/patterns are assigned in rotating sequence. .. seealso:: `re_gen_areas <geosoft.gxapi.GXMVU.re_gen_areas>` """ gxapi_cy.WrapMVU._gen_areas(GXContext._get_tls_geo(), mview, lines.encode(), col_vv, pat_vv, pitch) @classmethod def get_range_gocad_surface(cls, file, min_x, min_y, min_z, max_x, max_y, max_z): """ Get the XYZ range of a GOCAD surface. :param file: GOCAD file name :param min_x: Min X value :param min_y: Min Y value :param min_z: Min Z value :param max_x: Max X value :param max_y: Max Y value :param max_z: Max Z value :type file: str :type min_x: float_ref :type min_y: float_ref :type min_z: float_ref :type max_x: float_ref :type max_y: float_ref :type max_z: float_ref .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Required to set up a map view before doing the actual surface import. """ min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value = gxapi_cy.WrapMVU._get_range_gocad_surface(GXContext._get_tls_geo(), file.encode(), min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value) @classmethod def histogram(cls, mview, st_data, st_hist, title, unit, xm, ym, widthm, heightm, xd, yd, widthd, heightd, sum_width, log, summ, fill_color, st_box): """ Plot the histogram on a map. :param mview: View :param st_data: `GXST <geosoft.gxapi.GXST>` with summary stats of original data :param st_hist: `GXST <geosoft.gxapi.GXST>` with histogram info of original or log10 data :param title: Title :param unit: Unit :param xm: X in mm (bottom left corner of histogram box) :param ym: Y in mm (bottom left corner of histogram box) :param widthm: Box width in mm :param heightm: Box height in mm :param xd: Minimum X in data unit (bottom left corner of histogram boxes) :param yd: Minimum Y in data unit :param widthd: Box width in data unit :param heightd: Box height in data unit :param sum_width: Width (mm) of the additional box for summary stats :param log: Log horizontal axis: 0 - Normal, 1 - Log :param summ: Summary stats: 0 - do not draw, 1 - draw :param fill_color: Fill color :param st_box: `GXST <geosoft.gxapi.GXST>` with histogram for box-whisker plot (-1 for no plot) :type mview: GXMVIEW :type st_data: GXST :type st_hist: GXST :type title: str :type unit: str :type xm: float :type ym: float :type widthm: float :type heightm: float :type xd: float :type yd: float :type widthd: float :type heightd: float :type sum_width: float :type log: int :type summ: int :type fill_color: int :type st_box: GXST .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This function just calls `histogram2 <geosoft.gxapi.GXMVU.histogram2>` with decimals set to -7 (7 significant figures). .. seealso:: `histogram2 <geosoft.gxapi.GXMVU.histogram2>`, `histogram3 <geosoft.gxapi.GXMVU.histogram3>` """ gxapi_cy.WrapMVU._histogram(GXContext._get_tls_geo(), mview, st_data, st_hist, title.encode(), unit.encode(), xm, ym, widthm, heightm, xd, yd, widthd, heightd, sum_width, log, summ, fill_color, st_box) @classmethod def histogram2(cls, mview, st_data, st_hist, x_title, y_title, xy_txt_size, title, plot_txt_size, unit, xm, ym, widthm, heightm, xd, yd, widthd, heightd, sum_width, log, summ, fill_color, st_box, x_marker): """ Plot the histogram on a map. :param mview: View :param st_data: `GXST <geosoft.gxapi.GXST>` with summary stats of original data :param st_hist: `GXST <geosoft.gxapi.GXST>` with histogram info of original or log10 data :param x_title: X axis title :param y_title: Y axis title :param xy_txt_size: Text size in mm for X/Y axis' titles. Accept dummy :param title: Overall title. Plotted below X axis if X axis title is not given :param plot_txt_size: Text size in mm for plot overall title. Accept dummy :param unit: Unit :param xm: X in mm (bottom left corner of histogram box) :param ym: Y in mm (bottom left corner of histogram box) :param widthm: Box width in mm :param heightm: Box height in mm :param xd: Minimum X in data unit (bottom left corner of histogram boxes) :param yd: Minimum Y in data unit :param widthd: Box width in data unit :param heightd: Box height in data unit :param sum_width: Width (mm) of the additional box for summary stats :param log: Log horizontal axis: 0 - Normal, 1 - Log :param summ: Summary stats: 0 - do not draw, 1 - draw :param fill_color: Fill color :param st_box: `GXST <geosoft.gxapi.GXST>` with histogram for box-wisker plot (-1 for no plot) :param x_marker: X value (threshold value) to draw a vertical line (see notes) :type mview: GXMVIEW :type st_data: GXST :type st_hist: GXST :type x_title: str :type y_title: str :type xy_txt_size: float :type title: str :type plot_txt_size: float :type unit: str :type xm: float :type ym: float :type widthm: float :type heightm: float :type xd: float :type yd: float :type widthd: float :type heightd: float :type sum_width: float :type log: int :type summ: int :type fill_color: int :type st_box: GXST :type x_marker: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A vertical line through from bottom to top horizontal axis is drawn Also a label 'Threshold value' is plotted against this line. However, None of them will be plotted if threshold value is dummy or outside the X data range. """ gxapi_cy.WrapMVU._histogram2(GXContext._get_tls_geo(), mview, st_data, st_hist, x_title.encode(), y_title.encode(), xy_txt_size, title.encode(), plot_txt_size, unit.encode(), xm, ym, widthm, heightm, xd, yd, widthd, heightd, sum_width, log, summ, fill_color, st_box, x_marker) @classmethod def histogram3(cls, mview, st_data, st_hist, title, unit, xm, ym, widthm, heightm, xd, yd, widthd, heightd, sum_width, log, summ, fill_color, data_decimal, stat_decimal, st_box): """ Plot the histogram on a map, specify decimals. :param mview: View :param st_data: `GXST <geosoft.gxapi.GXST>` with summary stats of original data :param st_hist: `GXST <geosoft.gxapi.GXST>` with histogram info of original or log10 data :param title: Title :param unit: Unit :param xm: X in mm (bottom left corner of histogram box) :param ym: Y in mm (bottom left corner of histogram box) :param widthm: Box width in mm :param heightm: Box height in mm :param xd: Minimum X in data unit (bottom left corner of histogram boxes) :param yd: Minimum Y in data unit :param widthd: Box width in data unit :param heightd: Box height in data unit :param sum_width: Width (mm) of the additional box for summary stats :param log: Log horizontal axis: 0 - Normal, 1 - Log :param summ: Summary stats: 0 - do not draw, 1 - draw :param fill_color: Fill color :param data_decimal: Decimals for data, negative for sig. fig. :param stat_decimal: Decimals for stats, negative for sig. fig. :param st_box: `GXST <geosoft.gxapi.GXST>` with histogram for box-whisker plot (-1 for no plot) :type mview: GXMVIEW :type st_data: GXST :type st_hist: GXST :type title: str :type unit: str :type xm: float :type ym: float :type widthm: float :type heightm: float :type xd: float :type yd: float :type widthd: float :type heightd: float :type sum_width: float :type log: int :type summ: int :type fill_color: int :type data_decimal: int :type stat_decimal: int :type st_box: GXST .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._histogram3(GXContext._get_tls_geo(), mview, st_data, st_hist, title.encode(), unit.encode(), xm, ym, widthm, heightm, xd, yd, widthd, heightd, sum_width, log, summ, fill_color, data_decimal, stat_decimal, st_box) @classmethod def histogram4(cls, mview, st_data, st_hist, title, unit, xm, ym, widthm, heightm, xd, yd, widthd, heightd, sum_width, log, summ, prob, fill_color, data_decimal, stat_decimal, st_box): """ As `histogram3 <geosoft.gxapi.GXMVU.histogram3>`, but allow probability scaling of percents. :param mview: View :param st_data: `GXST <geosoft.gxapi.GXST>` with summary stats of original data :param st_hist: `GXST <geosoft.gxapi.GXST>` with histogram info of original or log10 data :param title: Title :param unit: Unit :param xm: X in mm (bottom left corner of histogram box) :param ym: Y in mm (bottom left corner of histogram box) :param widthm: Box width in mm :param heightm: Box height in mm :param xd: Minimum X in data unit (bottom left corner of histogram boxes) :param yd: Minimum Y in data unit :param widthd: Box width in data unit :param heightd: Box height in data unit :param sum_width: Width (mm) of the additional box for summary stats :param log: Log horizontal axis: 0 - Normal, 1 - Log :param summ: Summary stats: 0 - do not draw, 1 - draw :param prob: Probability scaling: 0 - linear scale, 1 - scale as normal distribution :param fill_color: Fill color :param data_decimal: Decimals for data, negative for sig. fig. :param stat_decimal: Decimals for stats, negative for sig. fig. :param st_box: `GXST <geosoft.gxapi.GXST>` with histogram for box-whisker plot (-1 for no plot) :type mview: GXMVIEW :type st_data: GXST :type st_hist: GXST :type title: str :type unit: str :type xm: float :type ym: float :type widthm: float :type heightm: float :type xd: float :type yd: float :type widthd: float :type heightd: float :type sum_width: float :type log: int :type summ: int :type prob: int :type fill_color: int :type data_decimal: int :type stat_decimal: int :type st_box: GXST .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._histogram4(GXContext._get_tls_geo(), mview, st_data, st_hist, title.encode(), unit.encode(), xm, ym, widthm, heightm, xd, yd, widthd, heightd, sum_width, log, summ, prob, fill_color, data_decimal, stat_decimal, st_box) @classmethod def histogram5(cls, mview, st_data, st_hist, title, unit, lmd, xm, ym, widthm, heightm, xd, yd, widthd, heightd, sum_width, log, summ, prob, fill_color, data_decimal, stat_decimal, st_box, itr): """ As `histogram4 <geosoft.gxapi.GXMVU.histogram4>`, but allow `GXITR <geosoft.gxapi.GXITR>` to color bars. :param mview: View :param st_data: `GXST <geosoft.gxapi.GXST>` with summary stats of original data :param st_hist: `GXST <geosoft.gxapi.GXST>` with histogram info of original or log10 data :param title: Title :param unit: Unit :param lmd: [i] Lambda Value :param xm: X in mm (bottom left corner of histogram box) :param ym: Y in mm (bottom left corner of histogram box) :param widthm: Box width in mm :param heightm: Box height in mm :param xd: Minimum X in data unit (bottom left corner of histogram boxes) :param yd: Minimum Y in data unit :param widthd: Box width in data unit :param heightd: Box height in data unit :param sum_width: Width (mm) of the additional box for summary stats :param log: Log horizontal axis: 0 - Normal, 1 - Log, 2 - Lambda :param summ: Summary stats: 0 - do not draw, 1 - draw :param prob: Probability scaling: 0 - linear scale, 1 - scale as normal distribution :param fill_color: Fill color :param data_decimal: Decimals for data, negative for sig. fig. :param stat_decimal: Decimals for stats, negative for sig. fig. :param st_box: `GXST <geosoft.gxapi.GXST>` with histogram for box-whisker plot (-1 for no plot) :param itr: `GXITR <geosoft.gxapi.GXITR>` to color bars. :type mview: GXMVIEW :type st_data: GXST :type st_hist: GXST :type title: str :type unit: str :type lmd: float :type xm: float :type ym: float :type widthm: float :type heightm: float :type xd: float :type yd: float :type widthd: float :type heightd: float :type sum_width: float :type log: int :type summ: int :type prob: int :type fill_color: int :type data_decimal: int :type stat_decimal: int :type st_box: GXST :type itr: GXITR .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The `GXITR <geosoft.gxapi.GXITR>` can be empty (but must still be a valid `GXITR <geosoft.gxapi.GXITR>` object). """ gxapi_cy.WrapMVU._histogram5(GXContext._get_tls_geo(), mview, st_data, st_hist, title.encode(), unit.encode(), lmd, xm, ym, widthm, heightm, xd, yd, widthd, heightd, sum_width, log, summ, prob, fill_color, data_decimal, stat_decimal, st_box, itr) @classmethod def exportable_dxf_3d_groups_lst(cls, mview, lst): """ Return a `GXLST <geosoft.gxapi.GXLST>` of groups you can export using sExportDXF3D_MVU. :param mview: View :param lst: `GXLST <geosoft.gxapi.GXLST>` with group names in the name part of the `GXLST <geosoft.gxapi.GXLST>`. :type mview: GXMVIEW :type lst: GXLST :returns: The number of groups in the `GXLST <geosoft.gxapi.GXLST>`. :rtype: int .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Returns a list of visible groups that the DXF 3D export can export. Removes things like `GXVOXD <geosoft.gxapi.GXVOXD>`, `GXAGG <geosoft.gxapi.GXAGG>`, and target groups starting with "Dh", which are typically plotted in 3D views on a reference plan oriented toward the user, and thus not exportable. """ ret_val = gxapi_cy.WrapMVU._exportable_dxf_3d_groups_lst(GXContext._get_tls_geo(), mview, lst) return ret_val @classmethod def mapset_test(cls, min_x, max_x, min_y, max_y, size, port, exact, scale, conv, marg_xmin, marg_xmax, marg_ymin, marg_ymax, inside): """ Test function to ensure parameters to `mapset <geosoft.gxapi.GXMVU.mapset>` is sane :param min_x: Minimum X of data area (data units) :param max_x: Maximum X of data area (data units) :param min_y: Minimum Y of data area (data units) :param max_y: Maximum Y of data area (data units) :param size: Media size as a string 'x_cm,y_cm', or a standard paper size (e.g. 'A4', 'E') :param port: 0 - landscape; 1 - portrait :param exact: 1 - map size fixed to media; 0 - map size adjusted to data and margins. :param scale: Map scale (rDummy for default) :param conv: Conversion factor (to units/meter) (rDummy for default) :param marg_xmin: Left margin (cm) :param marg_xmax: Right margin (cm) :param marg_ymin: Bottom margin (cm) :param marg_ymax: Top margin (cm) :param inside: Inside data margin (cm) :type min_x: float :type max_x: float :type min_y: float :type max_y: float :type size: str :type port: int :type exact: int :type scale: float_ref :type conv: float :type marg_xmin: float :type marg_xmax: float :type marg_ymin: float :type marg_ymax: float :type inside: float :returns: ``True`` if the parameters are good. :rtype: bool .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Use `GXSYS.show_error <geosoft.gxapi.GXSYS.show_error>` to display errors that may have been encountered. This function can also be used to calculate the default scale without creating a map. """ ret_val, scale.value = gxapi_cy.WrapMVU._mapset_test(GXContext._get_tls_geo(), min_x, max_x, min_y, max_y, size.encode(), port, exact, scale.value, conv, marg_xmin, marg_xmax, marg_ymin, marg_ymax, inside) return ret_val @classmethod def mapset2_test(cls, min_x, max_x, min_y, max_y, size, port, exact, scale, vert_exag, conv, marg_xmin, marg_xmax, marg_ymin, marg_ymax, inside): """ Test function to ensure parameters to `mapset <geosoft.gxapi.GXMVU.mapset>` is sane :param min_x: Minimum X of data area (data units) :param max_x: Maximum X of data area (data units) :param min_y: Minimum Y of data area (data units) :param max_y: Maximum Y of data area (data units) :param size: Media size as a string 'x_cm,y_cm', or a standard paper size (e.g. 'A4', 'E') :param port: 0 - landscape; 1 - portrait :param exact: 1 - map size fixed to media; 0 - map size adjusted to data and margins. :param scale: Map scale (rDummy for default) :param vert_exag: Vertical exaggeration (Normally 1.0) :param conv: Conversion factor (to units/meter) (rDummy for default) :param marg_xmin: Left margin (cm) :param marg_xmax: Right margin (cm) :param marg_ymin: Bottom margin (cm) :param marg_ymax: Top margin (cm) :param inside: Inside data margin (cm) :type min_x: float :type max_x: float :type min_y: float :type max_y: float :type size: str :type port: int :type exact: int :type scale: float_ref :type vert_exag: float :type conv: float :type marg_xmin: float :type marg_xmax: float :type marg_ymin: float :type marg_ymax: float :type inside: float :returns: ``True`` if the parameters are good. :rtype: bool .. versionadded:: 8.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Same as `mapset_test <geosoft.gxapi.GXMVU.mapset_test>`, with vertical exaggeration. """ ret_val, scale.value = gxapi_cy.WrapMVU._mapset2_test(GXContext._get_tls_geo(), min_x, max_x, min_y, max_y, size.encode(), port, exact, scale.value, vert_exag, conv, marg_xmin, marg_xmax, marg_ymin, marg_ymax, inside) return ret_val @classmethod def import_gocad_surface(cls, mview, file, col): """ Import and plot a GOCAD surface model. :param mview: View :param file: GOCAD file name :param col: Color to plot (`C_TRANSPARENT <geosoft.gxapi.C_TRANSPARENT>` to use file-defined color). :type mview: GXMVIEW :type file: str :type col: int .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The vertex normals are not included in the GOCAD import, but are calculated using the normal of each defined triangle, and taking the average when vertex is shared among more than one triangle. """ gxapi_cy.WrapMVU._import_gocad_surface(GXContext._get_tls_geo(), mview, file.encode(), col) @classmethod def load_plot(cls, map, name): """ Load a Geosoft PLT file into a `GXMAP <geosoft.gxapi.GXMAP>`. :param map: Map handle :param name: Plot file name :type map: GXMAP :type name: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._load_plot(GXContext._get_tls_geo(), map, name.encode()) @classmethod def map_from_plt(cls, map, base, data, plt, mpx, mpy): """ Creates a new map from a PLT file. :param map: `GXMAP <geosoft.gxapi.GXMAP>` Handle :param base: Name to use for the base map view :param data: Name to use for the data view :param plt: Plot file name :param mpx: Map paper size in X direction (cm) :param mpy: Map paper size in Y direction (cm) :type map: GXMAP :type base: str :type data: str :type plt: str :type mpx: float :type mpy: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This only creates a map, it does not read the PLT into the map. The base view and data view will be the same size. .. seealso:: `load_plot <geosoft.gxapi.GXMVU.load_plot>` """ gxapi_cy.WrapMVU._map_from_plt(GXContext._get_tls_geo(), map, base.encode(), data.encode(), plt.encode(), mpx, mpy) @classmethod def map_mdf(cls, map, mdf, data): """ Creates an MDF from a Map. :param map: `GXMAP <geosoft.gxapi.GXMAP>` Handle :param mdf: MDF file name :param data: Data view name :type map: GXMAP :type mdf: str :type data: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._map_mdf(GXContext._get_tls_geo(), map, mdf.encode(), data.encode()) @classmethod def mapset(cls, map, base, data, min_x, max_x, min_y, max_y, size, port, exact, scale, conv, marg_xmin, marg_xmax, marg_ymin, marg_ymax, inside): """ Creates a new map directly from parameters. :param map: `GXMAP <geosoft.gxapi.GXMAP>` Handle :param base: Name to use for the base map view :param data: Name to use for the data view :param min_x: Minimum X of data area (data units) :param max_x: Maximum X of data area (data units) :param min_y: Minimum Y of data area (data units) :param max_y: Maximum Y of data area (data units) :param size: Media size as a string 'x_cm,y_cm', or a standard paper size (e.g. 'A4', 'E') :param port: 0 - landscape; 1 - portrait :param exact: 1 - map size fixed to media; 0 - map size adjusted to data and margins. :param scale: Map scale (rDummy for default) :param conv: Conversion factor (to units/meter) (rDummy for default) :param marg_xmin: Left margin (cm) :param marg_xmax: Right margin (cm) :param marg_ymin: Bottom margin (cm) :param marg_ymax: Top margin (cm) :param inside: Inside data margin (cm) :type map: GXMAP :type base: str :type data: str :type min_x: float :type max_x: float :type min_y: float :type max_y: float :type size: str :type port: int :type exact: int :type scale: float :type conv: float :type marg_xmin: float :type marg_xmax: float :type marg_ymin: float :type marg_ymax: float :type inside: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._mapset(GXContext._get_tls_geo(), map, base.encode(), data.encode(), min_x, max_x, min_y, max_y, size.encode(), port, exact, scale, conv, marg_xmin, marg_xmax, marg_ymin, marg_ymax, inside) @classmethod def mapset2(cls, map, base, data, min_x, max_x, min_y, max_y, size, port, exact, scale, vert_exag, conv, marg_xmin, marg_xmax, marg_ymin, marg_ymax, inside): """ Same as `mapset <geosoft.gxapi.GXMVU.mapset>`, with vertical exaggeration. :param map: `GXMAP <geosoft.gxapi.GXMAP>` Handle :param base: Name to use for the base map view :param data: Name to use for the data view :param min_x: Minimum X of data area (data units) :param max_x: Maximum X of data area (data units) :param min_y: Minimum Y of data area (data units) :param max_y: Maximum Y of data area (data units) :param size: Media size as a string 'x_cm,y_cm', or a standard paper size (e.g. 'A4', 'E') :param port: 0 - landscape; 1 - portrait :param exact: 1 - map size fixed to media; 0 - map size adjusted to data and margins. :param scale: Map scale (rDummy for default) :param vert_exag: Vertical Exaggeration (1.0 for none) :param conv: Conversion factor (to units/meter) (rDummy for default) :param marg_xmin: Left margin (cm) :param marg_xmax: Right margin (cm) :param marg_ymin: Bottom margin (cm) :param marg_ymax: Top margin (cm) :param inside: Inside data margin (cm) :type map: GXMAP :type base: str :type data: str :type min_x: float :type max_x: float :type min_y: float :type max_y: float :type size: str :type port: int :type exact: int :type scale: float :type vert_exag: float :type conv: float :type marg_xmin: float :type marg_xmax: float :type marg_ymin: float :type marg_ymax: float :type inside: float .. versionadded:: 8.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._mapset2(GXContext._get_tls_geo(), map, base.encode(), data.encode(), min_x, max_x, min_y, max_y, size.encode(), port, exact, scale, vert_exag, conv, marg_xmin, marg_xmax, marg_ymin, marg_ymax, inside) @classmethod def mdf(cls, map, mdf, base, data): """ Creates a new map from an MDF file. :param map: `GXMAP <geosoft.gxapi.GXMAP>` Handle :param mdf: MDF file name :param base: Name to use for the base map view :param data: Name to use for the data view :type map: GXMAP :type mdf: str :type base: str :type data: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._mdf(GXContext._get_tls_geo(), map, mdf.encode(), base.encode(), data.encode()) @classmethod def path_plot(cls, mview, vv_x, vv_y, line, locate, vangle, up, loff, voff, gap): """ Draw a flight line :param mview: View :param vv_x: X :param vv_y: Y :param line: Line label :param locate: :ref:`MVU_FLIGHT_LOCATE` :param vangle: Lines steeper than this angle are considered vertical and the up label direction is used. :param up: Up label direction: 1 up is right -1 up is left :param loff: Along line label offset in mm. :param voff: Perpendicular label offset mm. :param gap: Maximum gap before breaking line, 0.0 for no breaks. :type mview: GXMVIEW :type vv_x: GXVV :type vv_y: GXVV :type line: str :type locate: int :type vangle: float :type up: int :type loff: float :type voff: float :type gap: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See `flight_plot <geosoft.gxapi.GXMVU.flight_plot>`. This is the same except for the additional line gap parameter. .. seealso:: `flight_plot <geosoft.gxapi.GXMVU.flight_plot>` """ gxapi_cy.WrapMVU._path_plot(GXContext._get_tls_geo(), mview, vv_x, vv_y, line.encode(), locate, vangle, up, loff, voff, gap) @classmethod def path_plot_ex(cls, mview, vv_x, vv_y, line, locate, compass, vangle, up, loff, voff, gap): """ Draw a flight line :param mview: View :param vv_x: X :param vv_y: Y :param line: Line label :param locate: :ref:`MVU_FLIGHT_LOCATE` :param compass: :ref:`MVU_FLIGHT_COMPASS` :param vangle: Lines steeper than this angle are considered vertical and the up label direction is used. :param up: Up label direction: 1 up is right -1 up is left :param loff: Along line label offset in mm. :param voff: Perpendicular label offset mm. :param gap: Maximum gap before breaking line, 0.0 for no breaks. :type mview: GXMVIEW :type vv_x: GXVV :type vv_y: GXVV :type line: str :type locate: int :type compass: int :type vangle: float :type up: int :type loff: float :type voff: float :type gap: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is the same except for the additional line compass parameter. .. seealso:: `path_plot <geosoft.gxapi.GXMVU.path_plot>` """ gxapi_cy.WrapMVU._path_plot_ex(GXContext._get_tls_geo(), mview, vv_x, vv_y, line.encode(), locate, compass, vangle, up, loff, voff, gap) @classmethod def path_plot_ex2(cls, mview, vv_x, vv_y, line, locate, compass, vangle, up, loff, voff, gap, dummies): """ Draw a flight line :param mview: View :param vv_x: X :param vv_y: Y :param line: Line label :param locate: :ref:`MVU_FLIGHT_LOCATE` :param compass: :ref:`MVU_FLIGHT_COMPASS` :param vangle: Lines steeper than this angle are considered vertical and the up label direction is used. :param up: Up label direction: 1 up is right -1 up is left :param loff: Along line label offset in mm. :param voff: Perpendicular label offset mm. :param gap: Maximum gap before breaking line, 0.0 for no breaks. :param dummies: :ref:`MVU_FLIGHT_DUMMIES` :type mview: GXMVIEW :type vv_x: GXVV :type vv_y: GXVV :type line: str :type locate: int :type compass: int :type vangle: float :type up: int :type loff: float :type voff: float :type gap: float :type dummies: int .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is the same except for the additional line dummies parameter. .. seealso:: `path_plot_ex <geosoft.gxapi.GXMVU.path_plot_ex>` """ gxapi_cy.WrapMVU._path_plot_ex2(GXContext._get_tls_geo(), mview, vv_x, vv_y, line.encode(), locate, compass, vangle, up, loff, voff, gap, dummies) @classmethod def plot_voxel_slice(cls, mview, vox, itr, x, y, dOrigin, dMin, dmax, res): """ Extract a vertical slice from a voxel along a path and plot it to a 2D view. :param mview: View :param vox: Voxel model :param itr: colour model :param x: x-values along the line :param y: y-values along the line :param dOrigin: Distance at the first XY location (normally 0.0) :param dMin: Starting distance to plot :param dmax: Ending distance to plot :param res: Sampling resolution :type mview: GXMVIEW :type vox: GXVOX :type itr: GXITR :type x: GXVV :type y: GXVV :type dOrigin: float :type dMin: float :type dmax: float :type res: float .. versionadded:: 2022.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The path is splined linearly between points and sampled at the input interval. The resulting vertical samples of cubes are plotted as "pixel strips" in the view. """ gxapi_cy.WrapMVU._plot_voxel_slice(GXContext._get_tls_geo(), mview, vox, itr, x, y, dOrigin, dMin, dmax, res) @classmethod def plot_voxel_surface(cls, mview, vox, value, col, line_thick): """ Extract an iso-surface from a voxel and plot it to a 2D or 3D view. :param mview: View :param vox: Voxel model :param value: Iso-surface value :param col: Drawing color :param line_thick: Line thickness for line drawing, and 2D views. :type mview: GXMVIEW :type vox: GXVOX :type value: float :type col: int :type line_thick: float .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The Marching Cubes method of Lorensen and Cline, Computer Graphics, V21, Number 4, July 1987, is used to calculate a given iso-surface in a voxel model. The resulting surface is plotted to a 2D or 3D view. If the view is 2-D, then only the intersection of the surface with the 2D surface is plotted, using lines. """ gxapi_cy.WrapMVU._plot_voxel_surface(GXContext._get_tls_geo(), mview, vox, value, col, line_thick) @classmethod def plot_voxel_surface2(cls, mview, vox, value, col, line_thick, transparency, surface_name): """ Extract an iso-surface from a voxel and plot it to a 2D or 3D view. :param mview: View :param vox: Voxel model :param value: Iso-surface value :param col: Drawing color :param line_thick: Line thickness for line drawing, and 2D views. :param transparency: Transparency (0 - transparent, 1 - opaque). :param surface_name: Iso-surface name :type mview: GXMVIEW :type vox: GXVOX :type value: float :type col: int :type line_thick: float :type transparency: float :type surface_name: str .. versionadded:: 7.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The Marching Cubes method of Lorensen and Cline, Computer Graphics, V21, Number 4, July 1987, is used to calculate a given iso-surface in a voxel model. The resulting surface is plotted to a 2D or 3D view. If the view is 2-D, then only the intersection of the surface with the 2D surface is plotted, using lines. """ gxapi_cy.WrapMVU._plot_voxel_surface2(GXContext._get_tls_geo(), mview, vox, value, col, line_thick, transparency, surface_name.encode()) @classmethod def generate_surface_from_voxel(cls, mview, vox, method, option, min_value, max_value, col, line_thick, transparency, surface_name): """ TODO... :param mview: View :param vox: Voxel model :param method: :ref:`MVU_VOX_SURFACE_METHOD` :param option: :ref:`MVU_VOX_SURFACE_OPTION` :param min_value: Iso-surface value :param max_value: For closed surfaces: close between the selected value and this value (set equal to the Iso-surface to close within nearest values below, DUMMY to close within nearest value above) :param col: Drawing color :param line_thick: Line thickness for line drawing, and 2D views. :param transparency: Transparency (0 - transparent, 1 - opaque). :param surface_name: Geosurface file :type mview: GXMVIEW :type vox: GXVOX :type method: int :type option: int :type min_value: float :type max_value: float :type col: int :type line_thick: float :type transparency: float :type surface_name: str .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** TODO... Move to `GXVOX <geosoft.gxapi.GXVOX>` method for surface generation only and use GeosurfaceD to display. """ gxapi_cy.WrapMVU._generate_surface_from_voxel(GXContext._get_tls_geo(), mview, vox, method, option, min_value, max_value, col, line_thick, transparency, surface_name.encode()) @classmethod def post(cls, mview, vv_x, vv_y, vv_z, dummy, size, format, decimals, ref, angle): """ Post values on a map. :param mview: View :param vv_x: X locations :param vv_y: Y locations :param vv_z: Values to post :param dummy: Do not plot dummy values? :param size: Numb Size :param format: Format :param decimals: Decimals :param ref: Reference point number :param angle: Text angle :type mview: GXMVIEW :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type dummy: bool :type size: int :type format: int :type decimals: int :type ref: int :type angle: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._post(GXContext._get_tls_geo(), mview, vv_x, vv_y, vv_z, dummy, size, format, decimals, ref, angle) @classmethod def post_ex(cls, mview, vv_x, vv_y, vv_z, vv_s, dummy, base, min_detect, size, format, decimals, offset_l, offset_p, alternate, mod, ref, angle, fixed, ref_ang, up): """ Post values on a map with more paramters. :param mview: View :param vv_x: X locations :param vv_y: Y locations :param vv_z: Values to post :param vv_s: Station :param dummy: Do not plot dummy values? :param base: Base to remove, default is 0.0 :param min_detect: Detection limit, can be `GS_R8DM <geosoft.gxapi.GS_R8DM>` :param size: Numb Size :param format: Format :param decimals: Decimals :param offset_l: Offset along line (right and above are positive) :param offset_p: Offset perpendicular to line :param alternate: TRUE - Positive above, Negative below FALSE - All above. :param mod: Modulas on station vv :param ref: Reference point number :param angle: Text angle (degree, CCW from down-line) :param fixed: Fixed angle ? :param ref_ang: Vertical reference angle :param up: 1 up is right, -1 up is left :type mview: GXMVIEW :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vv_s: GXVV :type dummy: bool :type base: float :type min_detect: float :type size: int :type format: int :type decimals: int :type offset_l: float :type offset_p: float :type alternate: int :type mod: float :type ref: int :type angle: float :type fixed: int :type ref_ang: float :type up: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._post_ex(GXContext._get_tls_geo(), mview, vv_x, vv_y, vv_z, vv_s, dummy, base, min_detect, size, format, decimals, offset_l, offset_p, alternate, mod, ref, angle, fixed, ref_ang, up) @classmethod def probability(cls, mview, st_data, st_hist, title, unit, transform, lmd, xm, ym, widthm, heightm, symb_size, sigma, sum_width, summ, data_decimal, stat_decimal, itr): """ Plot a probability plot on a map. :param mview: View :param st_data: `GXST <geosoft.gxapi.GXST>` with summary stats of original data :param st_hist: `GXST <geosoft.gxapi.GXST>` with histogram info of original or log10 data :param title: Title :param unit: Unit :param transform: Transform type (0: Raw, 1: Log, 2: Lambda) :param lmd: Lambda Value for lambda transform :param xm: X in mm (bottom left corner of histogram box) :param ym: Y in mm (bottom left corner of histogram box) :param widthm: Box width in mm :param heightm: Box height in mm :param symb_size: Symbol size in mm :param sigma: Sigma (X range is -sigma to sigma) :param sum_width: Width (mm) of the additional box for summary stats :param summ: Summary stats: 0 - do not draw, 1 - draw :param data_decimal: Decimals for data, negative for sig. fig. :param stat_decimal: Decimals for stats, negative for sig. fig. :param itr: `GXITR <geosoft.gxapi.GXITR>` to color symbols. :type mview: GXMVIEW :type st_data: GXST :type st_hist: GXST :type title: str :type unit: str :type transform: int :type lmd: float :type xm: float :type ym: float :type widthm: float :type heightm: float :type symb_size: float :type sigma: float :type sum_width: float :type summ: int :type data_decimal: int :type stat_decimal: int :type itr: GXITR .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The `GXITR <geosoft.gxapi.GXITR>` can be empty (but must still be a valid `GXITR <geosoft.gxapi.GXITR>` object). """ gxapi_cy.WrapMVU._probability(GXContext._get_tls_geo(), mview, st_data, st_hist, title.encode(), unit.encode(), transform, lmd, xm, ym, widthm, heightm, symb_size, sigma, sum_width, summ, data_decimal, stat_decimal, itr) @classmethod def profile_plot(cls, mview, vv_x, vv_y, vv_z, vangle, up, gap, base, scale, join): """ Draw a profile along line trace :param mview: View :param vv_x: X :param vv_y: Y :param vv_z: Z :param vangle: Lines steeper than this angle are considered vertical and the up label direction is used. :param up: Up label direction: 1 up is right -1 up is left :param gap: Maximum gap in data to span (view units) :param base: Z profile base, `rDUMMY <geosoft.gxapi.rDUMMY>` to use data minimum :param scale: Z scale in view units/Z unit :param join: 1 to join profile to line ends. :type mview: GXMVIEW :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vangle: float :type up: int :type gap: float :type base: float :type scale: float :type join: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Profiles will be drawn in the current line style. """ gxapi_cy.WrapMVU._profile_plot(GXContext._get_tls_geo(), mview, vv_x, vv_y, vv_z, vangle, up, gap, base, scale, join) @classmethod def profile_plot_ex(cls, mview, vv_x, vv_y, vv_z, vangle, up, gap, base, scale, join, log, log_base, smooth, pos_f_color, neg_f_color): """ Draw a profile along line trace with more parameters :param mview: View :param vv_x: X :param vv_y: Y :param vv_z: Z :param vangle: Lines steeper than this angle are considered vertical and the up label direction is used. :param up: Up label direction: 1 up is right -1 up is left :param gap: Maximum gap in data to span (view units) :param base: Z profile base, `rDUMMY <geosoft.gxapi.rDUMMY>` to use data minimum :param scale: Z scale in view units/Z unit :param join: 1 to join profile to line ends. :param log: Log option: 0 linear (default), 1 logarithm, 2 log/linear :param log_base: Log base :param smooth: Smooth curve option: 0 no (default), 1 yes :param pos_f_color: Positive fill color :param neg_f_color: Negative fill color :type mview: GXMVIEW :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type vangle: float :type up: int :type gap: float :type base: float :type scale: float :type join: int :type log: int :type log_base: float :type smooth: int :type pos_f_color: str :type neg_f_color: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Profiles will be drawn in the current line style. """ gxapi_cy.WrapMVU._profile_plot_ex(GXContext._get_tls_geo(), mview, vv_x, vv_y, vv_z, vangle, up, gap, base, scale, join, log, log_base, smooth, pos_f_color.encode(), neg_f_color.encode()) @classmethod def prop_symb_legend(cls, mview, x1, y1, font_size, symb_scale, base, n_symb, start, increment, title, sub_title): """ Draw a legend for proportional symbols. :param mview: `GXMVIEW <geosoft.gxapi.GXMVIEW>` object :param x1: Plot origin X :param y1: Plot origin Y :param font_size: Label Font size (mm) :param symb_scale: Symbol scale factor (data value/mm) :param base: Base value to remove before scaling :param n_symb: Number of symbols :param start: Starting symbol data value (>= Base value) :param increment: Data value increment (>0.0) :param title: Plot title :param sub_title: Plot subtitle :type mview: GXMVIEW :type x1: float :type y1: float :type font_size: float :type symb_scale: float :type base: float :type n_symb: int :type start: float :type increment: float :type title: str :type sub_title: str .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** All symbol attributes, except for the size, are assumed to be defined (or defaults are used). Spacing is based on the maximum of the largest plotted symbol and the font size. """ gxapi_cy.WrapMVU._prop_symb_legend(GXContext._get_tls_geo(), mview, x1, y1, font_size, symb_scale, base, n_symb, start, increment, title.encode(), sub_title.encode()) @classmethod def re_gen_areas(cls, mview, lines): """ Re-Generate from a line group and existing area group :param mview: View :param lines: Group with lines :type mview: GXMVIEW :type lines: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The area group must exist and will be modified to match the current line group. All non-polygon entities in the current area group will remain in the new area group. All existing polygon groups will be used to determine the most likely attributes for the new polygon groups. There must be existing polygon groups in the area group. .. seealso:: `gen_areas <geosoft.gxapi.GXMVU.gen_areas>` """ gxapi_cy.WrapMVU._re_gen_areas(GXContext._get_tls_geo(), mview, lines.encode()) @classmethod def symb_off(cls, mview, vv_x, vv_y, vv_f, x_off, y_off): """ Draws symbols with an offset and against a flag channel :param mview: View :param vv_x: X, must be type of REAL :param vv_y: Y, must be type of REAL :param vv_f: Flag `GXVV <geosoft.gxapi.GXVV>`, must be type of INT :param x_off: X Offset :param y_off: Y Offset :type mview: GXMVIEW :type vv_x: GXVV :type vv_y: GXVV :type vv_f: GXVV :type x_off: float :type y_off: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Symbols are not plotted for positions where the flag `GXVV <geosoft.gxapi.GXVV>` value is 0 or `iDUMMY <geosoft.gxapi.iDUMMY>`. """ gxapi_cy.WrapMVU._symb_off(GXContext._get_tls_geo(), mview, vv_x, vv_y, vv_f, x_off, y_off) @classmethod def text_box(cls, mview, xmin, ymin, xmax, ymax, text, space, type): """ Draw a wrapped text box :param mview: View :param xmin: Min X :param ymin: Min Y :param xmax: Max X :param ymax: Max Y :param text: Text :param space: Line spacing (1.2 good) :param type: :ref:`MVU_TEXTBOX` :type mview: GXMVIEW :type xmin: float :type ymin: float :type xmax: float :type ymax: float :type text: str :type space: float :type type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._text_box(GXContext._get_tls_geo(), mview, xmin, ymin, xmax, ymax, text.encode(), space, type) @classmethod def tick(cls, mview, vv_x, vv_y, vv_s, size, mod, mt_size, mt_mod): """ Draw line ticks on a map. :param mview: View :param vv_x: X locations :param vv_y: Y locations :param vv_s: Station :param size: Tick size :param mod: Tick modulus on station vv :param mt_size: Major tick size :param mt_mod: Major tick modulus on station vv :type mview: GXMVIEW :type vv_x: GXVV :type vv_y: GXVV :type vv_s: GXVV :type size: float :type mod: float :type mt_size: float :type mt_mod: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._tick(GXContext._get_tls_geo(), mview, vv_x, vv_y, vv_s, size, mod, mt_size, mt_mod) @classmethod def tick_ex(cls, mview, vv_x, vv_y, vv_s, size, mod, mt_size, mt_mod, gap): """ Same as `tick <geosoft.gxapi.GXMVU.tick>`, with gap allowance. :param mview: View :param vv_x: X locations :param vv_y: Y locations :param vv_s: Station :param size: Tick size :param mod: Tick modulus on station vv :param mt_size: Major tick size :param mt_mod: Major tick modulus on station vv :param gap: Maximum gap to span; set to 0 or `rDUMMY <geosoft.gxapi.rDUMMY>` to ignore all gaps. :type mview: GXMVIEW :type vv_x: GXVV :type vv_y: GXVV :type vv_s: GXVV :type size: float :type mod: float :type mt_size: float :type mt_mod: float :type gap: float .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMVU._tick_ex(GXContext._get_tls_geo(), mview, vv_x, vv_y, vv_s, size, mod, mt_size, mt_mod, gap) @classmethod def trnd_path(cls, mview, vv_x, vv_y, min_sect, min_dist): """ Plot min and max trend lines. :param mview: View :param vv_x: X :param vv_y: Y :param min_sect: Minimum number of sections :param min_dist: Minimum length of sections :type mview: GXMVIEW :type vv_x: GXVV :type vv_y: GXVV :type min_sect: int :type min_dist: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Trend lines positions consist of X and Y VVs interspersed with dummies, which separate the individual trend sections. Set the minimum number of sections to > 0 to plot only the longer trend lines. (The number of sections in one trend section is equal to the number of points between dummies minus one.) Set the minimum distance to > 0 to plot only the longer trend lines. """ gxapi_cy.WrapMVU._trnd_path(GXContext._get_tls_geo(), mview, vv_x, vv_y, min_sect, min_dist) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/tutorial/Geosoft Project/grid_statistics_remember_user_input.py import os import geosoft.gxapi as gxapi import geosoft.gxpy.grid as gxgrid import geosoft.gxpy.project as gxproj import geosoft.gxpy.utility as gxu # function to calculate grid statistics def grid_stats(grid_file): # create a gxapi.GXST instance to accumulate statistics stats = gxapi.GXST.create() # open the grid grid = gxgrid.Grid.open(grid_file) # add data from each row to the stats instance for row in range(grid.ny): stats.data_vv(grid.read_row(row).gxvv) return stats # entry point when run from a Geosoft Desktop application def rungx(): # parameter 'GRID_FILE' is the last-specified grid file name for this script. grid_parameter = 'GRID_FILE' group = os.path.basename(__file__).split('.')[0] parms = gxu.get_parameters(group, {grid_parameter: ''}) # get the name of a grid from the user grid_file = gxproj.get_user_input(title='Grid statistics', prompt='Grid file', default=parms.get(grid_parameter), kind='file', filemask='*.grd') # save the grid file name as the default the next time this script is run parms[grid_parameter] = grid_file gxu.save_parameters(group, parms) stats = grid_stats(grid_file) gxproj.user_message(grid_file, 'min: {}\nmax: {}\nmean: {}\nstd_dev: {}'.format(stats.get_info(gxapi.ST_MIN), stats.get_info(gxapi.ST_MAX), stats.get_info(gxapi.ST_MEAN), stats.get_info(gxapi.ST_STDDEV))) <file_sep>/geosoft/gxpy/tests/test_group.py import unittest import os import numpy as np import geosoft import geosoft.gxapi as gxapi import geosoft.gxpy.system as gsys import geosoft.gxpy.map as gxmap import geosoft.gxpy.geometry as gxgm import geosoft.gxpy.grid as gxgrd import geosoft.gxpy.agg as gxagg import geosoft.gxpy.system as gxsys import geosoft.gxpy.view as gxv import geosoft.gxpy.group as gxg import geosoft.gxpy.vv as gxvv import geosoft.gxpy.viewer as gxviewer from base import GXPYTest def rect_line(g, size=100): g.rectangle(gxgm.Point2((0, 0, size, size), coordinate_system="cm"), pen=g.new_pen(line_thick=1)) p1 = gxgm.Point((0.1, 0.1)) * size p2 = gxgm.Point((0.9, 0.9)) * size poff = gxgm.Point((0.15, 0.05)) * size g.rectangle((p1, p2), pen=g.new_pen(fill_color=gxg.C_LT_GREEN)) p12 = gxgm.Point2((p1 + poff, p2 - poff)) g.line((p12.p0.x, p12.p0.y, p12.p1.x, p12.p1.y), pen=g.new_pen(line_style=2, line_pitch=2.0)) def pline(): return gxgm.PPoint([[10, 5], [20, 20], [30, 15], [50, 50], [60, 70], [75, 35], [90, 65], [20, 50], [35, 18.5]]) def draw_stuff(g, size=1.0): plinelist = [[110, 5], [120, 20], [130, 15], [150, 50], [160, 70], [175, 35], [190, 65], [220, 50], [235, 18.5]] pp = gxgm.PPoint.from_list(plinelist) * size g.pen = g.new_pen(line_style=2, line_pitch=2.0) g.polyline(pp) g.pen = g.new_pen(line_style=4, line_pitch=2.0, line_smooth=gxg.SMOOTH_AKIMA) g.polyline(pp) ppp = np.array(plinelist) pp = gxgm.PPoint(ppp[3:, :]) * size g.pen = g.new_pen(line_style=5, line_pitch=5.0, line_smooth=gxg.SMOOTH_CUBIC, line_color=gxg.C_RED, line_thick=0.25, fill_color=gxg.C_LT_BLUE) g.polygon(pp) g.pen = g.new_pen(fill_color=gxg.C_LT_GREEN) p1 = gxgm.Point((100, 0, 0)) * size p2 = gxgm.Point((100, 0, 0)) * size pp = (pp - p1) / 2 + p2 g.polygon(pp) pp += gxgm.Point((0, 25, 0)) * size g.pen = g.new_pen(fill_color=gxg.C_LT_RED) g.polygon(pp) class Test(GXPYTest): def test_version(self): self.start() self.assertEqual(gxmap.__version__, geosoft.__version__) def test_create(self): self.start() def test_lock(self): self.start() with gxmap.Map.new(data_area=(0, 0, 50, 40), coordinate_system='cm') as map: with gxv.View.open(map, 'data') as v: self.assertFalse(bool(v.lock)) with gxg.Draw(v, 'rectangle') as g: self.assertEqual(str(g), 'rectangle/data') self.assertTrue(g.drawing_plane is None) self.assertEqual(g.unit_of_measure, '') self.assertTrue(bool(v.lock)) self.assertEqual(v.lock, 'rectangle') self.assertRaises(gxg.GroupException, gxg.Group, v) self.assertFalse(bool(v.lock)) def test_metadata(self): self.start() with gxmap.Map.new(data_area=(0, 0, 50, 40), coordinate_system='cm') as map: with gxv.View.open(map, 'data') as v: with gxg.Draw(v, 'rectangle') as g: self.assertTrue(g.guid) meta = g.gx_metadata meta.node_token('maki/data/more') meta.set_attribute('/maki/data/more/scale', 45) meta.set_attribute('/maki/data/more/unit_of_measure', 'cm') g.gx_metadata = meta g.unit_of_measure = 'billy-bob' with gxg.Draw(v, 'rectangle') as g: meta = g.gx_metadata self.assertTrue(meta.has_node('/maki/data')) self.assertTrue(meta.has_node('/maki/data/more')) self.assertEqual(meta.get_attribute('/maki/data/more/scale'), 45) self.assertEqual(meta.get_attribute('/maki/data/more/unit_of_measure'), 'cm') self.assertEqual(g.unit_of_measure, 'billy-bob') def test_cs(self): self.start() with gxmap.Map.new(data_area=(0, 0, 50, 40), coordinate_system='cm') as map: with gxv.View.open(map, 'data') as v: with gxg.Draw(v, 'rectangle') as g: self.assertEqual(g.drawing_coordinate_system.unit_of_measure, 'cm') g.drawing_coordinate_system = "NAD83 / UTM zone 15N" self.assertEqual(str(g.drawing_coordinate_system), "NAD83 / UTM zone 15N") g.drawing_coordinate_system = None self.assertEqual(g.drawing_coordinate_system.unit_of_measure, 'cm') def test_extent(self): self.start() map_file = None try: with gxmap.Map.new(data_area=(3, 2, 50, 40), coordinate_system='cm', overwrite=True) as map: map_file = map.file_name with gxv.View.open(map, 'data') as v: self.assertEqual(v.extent_map_cm(), (2.0, 6.0, 41.6, 38.4)) with gxg.Draw(v, 'rectangle') as g: g.rectangle((3, 2, 28, 20), pen=g.new_pen(line_thick=0.25, line_color='R', line_style=gxg.LINE_STYLE_LONG, line_pitch=5)) self.assertEqual(g.extent, (3., 2., 28., 20.)) self.assertEqual(g.extent_map_cm(), (3.0, 7.0, 23.0, 21.4)) finally: gxmap.delete_files(map_file) @unittest.skip('skipping to let fixture pass') def test_force_assert(self): self.start() with gxmap.Map.figure((0, 0, 1000, 1000)) as gmap: with gxv.View.open(gmap, "data") as v: gxapi.GXMVU.arrow(v.gxview, 500, 500, 450, 450, 0.5, 30, 1) # asserts with gxg.Draw(v, "arrow") as g: gxapi.GXMVU.arrow(g.view.gxview, 500, 500, 450, 450, 0.5, 30, 1) def test_point(self): self.start() p1 = gxgm.Point((10, 20)) p2 = gxgm.Point((20, 20)) p3 = gxgm.Point((30, 20)) rect = gxgm.Point2((p1 - (15, 15), p3 + (15, 15))) with gxmap.Map.new(data_area=rect.extent_xy) as gmap: map_file = gmap.file_name with gxv.View.new(gmap, "data") as v: with gxg.Draw(v, 'test_point') as g: g.pen = gxg.Pen(line_thick=1) g.rectangle(rect) g.pen = gxg.Pen(line_thick=2, line_color='R') g.line((p1, p1)) # invisible - zero-length, but we should see it g.pen = gxg.Pen(line_thick=2, line_color='G') g.line((p2, p2 + (0.04, 0))) # invisible - bug g.pen = gxg.Pen(line_thick=2, line_color='B') g.line((p3, p3 + (0.05, 0))) # visible - correct! self.crc_map(map_file, pix_width=800) def test_points(self): self.start() plinelist = [[110, 5], [120, 20], [130, 15], [150, 50], [160, 70], [175, 35], [190, 65], [220, 50], [235, 18.5]] pp = gxgm.PPoint.from_list(plinelist) with gxmap.Map.new() as gmap: map_file = gmap.file_name with gxv.View.new(gmap, "points", area=(100, 0, 260, 100)) as v: with gxg.Draw(v, 'test_group') as g: g.rectangle(pp.extent, pen=gxg.Pen(line_thick=1)) g.pen = gxg.Pen(line_thick=2, line_color='B') for p in pp: g.point(p) pp += (15, 15) g.pen = gxg.Pen(line_thick=1.5, line_color='G') g.polypoint(pp) pp -= (0, 5) g.pen = gxg.Pen(line_thick=1, line_color='R') g.polypoint((gxvv.GXvv(pp.x), gxvv.GXvv(pp.y))) self.crc_map(map_file, pix_width=800) def test_rectangle(self): self.start() with gxmap.Map.new(data_area=(0, 0, 50, 40), coordinate_system='cm', overwrite=True) as map: map_file = map.file_name with gxv.View.open(map, 'data') as v: with gxg.Draw(v, 'rectangle') as g: g.rectangle(v.extent_clip, pen=g.new_pen(line_thick=0.5, line_color='B')) g.rectangle((2, 2, 48, 38), pen=g.new_pen(line_thick=0.25, line_color='R', line_style=gxg.LINE_STYLE_LONG, line_pitch=5)) self.crc_map(map_file) def test_smooth_line(self): self.start() pp = pline() p1, p2 = pp.extent area = (p1.x, p1.y, p2.x, p2.y) with gxmap.Map.new() as map: map_file = map.file_name with gxv.View.new(map, 'smooth') as v: v.locate(coordinate_system='mm', area=area, map_location=(1,1), scale=0.4) with gxg.Draw(v) as g: g.rectangle(v.extent_clip) g.polyline(pp, pen=g.new_pen(line_smooth=gxg.SMOOTH_AKIMA, line_color='r', line_thick=1)) g.polyline(pp, pen=g.new_pen(line_smooth=gxg.SMOOTH_CUBIC, line_color='b', line_thick=2)) g.polyline(pp) map.delete_view('data') map.delete_view('base') self.crc_map(map_file) def test_view_groups_1(self): self.start() testmap = os.path.join(self.gx.temp_folder(), "test") with gxmap.Map.new(testmap, overwrite=True) as gmap: map_file = gmap.file_name with gxv.View.new(gmap, "rectangle_test", area=(0, 0, 250, 125)) as v: with gxg.Draw(v, 'test_group') as g: rect_line(g) g.graticule(25, 20, style=gxg.GRATICULE_LINE) g.pen = g.new_pen(line_thick=0.1) g.rectangle(((0, 0), (250, 125)), pen=g.new_pen(line_thick=0.1, line_color='R')) with gxv.View.new(gmap, "poly") as v: with gxg.Draw(v) as g: draw_stuff(g) try: self.crc_map(map_file) finally: gxmap.delete_files(map_file) def test_view_groups_2(self): self.start() testmap = os.path.join(self.gx.temp_folder(), "test") with gxmap.Map.new(testmap, overwrite=True) as gmap: map_file = gmap.file_name with gxv.View.new(gmap, "rectangle_test", area=(0, 0, 250, 125)) as v: with gxg.Draw(v, 'line') as g: rect_line(g) with gxg.Draw(v, 'graticule') as g: g.graticule(25, 20, style=gxg.GRATICULE_LINE) g.pen = g.new_pen(line_thick=0.1) with gxg.Draw(v, 'test_rectangles') as g: g.rectangle(((0, 0), (250, 125)), pen=g.new_pen(line_thick=0.1, line_color='R')) g.rectangle(((10, 5), (240, 120)), pen=g.new_pen(line_thick=2, line_color='B')) v.delete_group('graticule') with gxv.View.new(gmap, "poly") as v: with gxg.Draw(v, 'test_group') as g: draw_stuff(g) try: self.crc_map(map_file) finally: gxmap.delete_files(map_file) def test_reopen_map_view(self): self.start() testmap = os.path.join(self.gx.temp_folder(), "test") with gxmap.Map.new(testmap, overwrite=True) as gmap: map_file = gmap.file_name with gxv.View.new(gmap, "test_view") as v: with gxg.Draw(v) as g: rect_line(g) with gxv.View.open(gmap, "test_view") as v: pass gxmap.delete_files(map_file) def test_3D(self): self.start() testmap = os.path.join(self.gx.temp_folder(), "test.map") with gxmap.Map.new(testmap, overwrite=True) as gmap: with gxv.View.open(gmap, "base") as view_base: with gxg.Draw(view_base, 'Surround') as g: g.rectangle(((0, 0), (280, 260))) test3dv = os.path.join(self.gx.temp_folder(), "test.geosoft_3dv") with gxv.View_3d.new(test3dv, overwrite=True) as view_3d: self.assertTrue(view_3d.extent == None) with gxg.Draw(view_3d, '2d_group') as g: rect_line(g) draw_stuff(g) with gxg.Draw_3d(view_3d, '3d_group_cylinders') as g: self.assertEqual(g.render_backfaces, False) g.cylinder_3d(((100, 10, 10), (120, 10, 10)), 8, pen='r', close=gxg.CYLINDER_CLOSE_ALL) self.assertEqual(view_3d.extent_xyz, (92.0, 2.0, 2.0, 128.0, 18.0, 18.0)) g.cylinder_3d(((100, 10, 70), (120, 10, 70)), 8, pen='c', close=gxg.CYLINDER_OPEN) self.assertEqual(view_3d.extent_xyz, (92.0, 2.0, 2.0, 128.0, 18.0, 78.0)) g.cylinder_3d(((100, 10, 50), (120, 10, 50)), 8, pen='b', close=gxg.CYLINDER_CLOSE_END) g.cylinder_3d(((100, 10, 30), (120, 10, 30)), 8, pen='g', close=gxg.CYLINDER_CLOSE_START) self.assertEqual(view_3d.extent_xyz, (92.0, 2.0, 2.0, 128.0, 18.0, 78.0)) self.assertEqual(g.render_backfaces, True) with gxg.Draw_3d(view_3d, '3d_group') as g: g.cylinder_3d(((20, 10, 60), (80, 50, 80)), 5, pen='b') g.cone_3d(((20, 10, 80), (80, 50, 60)), 8, pen='g') g.cone_3d(((20, 50, 65), (20, 50, 40)), 30, pen='r') g.sphere((20, 50, 80), 10, pen='c') self.assertEqual(g.render_backfaces, False) g.cylinder_3d(((80, 10, 0), (80, 10, 80)), 5, pen='y', close=gxg.CYLINDER_OPEN) self.assertEqual(g.render_backfaces, True) g.box_3d(((20, 10, 30), (80, 50, 50)), pen=g.new_pen(line_color='R255G100B50')) g.box_3d(((80, 50, 50), (90,60, 65)), wireframe=True, pen=g.new_pen(line_color='R25G255B50', line_thick=2)) with gxmap.Map.open(testmap) as gmap: gmap.create_linked_3d_view(view_3d, area_on_map=(10, 10, 270, 250)) # test re-open a 3D view, with explicit close view_3d = gxv.View_3d.open(test3dv) group_list = view_3d.group_list self.assertEqual(len(group_list), 3) view_3d.close() self.crc_map(test3dv, alt_crc_name=gxsys.func_name() + '_3dv') self.crc_map(testmap, alt_crc_name=gxsys.func_name() + '_map') def test_basic_grid_1(self): self.start() # test grid file folder, files = gsys.unzip(os.path.join(os.path.dirname(self._test_case_py), 'testgrids.zip'), folder=self.gx.temp_folder()) grid_file = os.path.join(folder, 'test_agg_utm.grd') map_file = os.path.join(self.gx.temp_folder(), "test_agg_utm") with gxgrd.Grid(grid_file) as grd: cs = grd.coordinate_system area = grd.extent_2d() with gxmap.Map.new(map_file, data_area=area, media="A4", margins=(0, 10, 0, 0), coordinate_system=cs, overwrite=True) as gmap: map_file = gmap.file_name with gxv.View.open(gmap, "base") as v: with gxg.Draw(v, 'line') as g: g.rectangle(v.extent_clip, pen=g.new_pen(line_thick=1, line_color='K')) with gxv.View.open(gmap, "data") as v: with gxg.Draw(v, 'line') as g: g.rectangle(area, pen=g.new_pen(line_thick=0.1, line_color='R')) with gxagg.Aggregate_image.new(grid_file) as agg: with gxg.Aggregate_group.new(v, agg) as gagg: self.assertEqual(gagg.name, str(agg)) self.assertEqual(len(v.group_list_agg), 1) self.crc_map(map_file) def test_basic_grid_3D(self): self.start() # test grid file folder, files = gsys.unzip(os.path.join(os.path.dirname(self._test_case_py), 'testgrids.zip'), folder=self.gx.temp_folder()) grid_file = os.path.join(folder, 'test_agg_utm.grd') with gxgrd.Grid(grid_file) as grd: cs = grd.coordinate_system area = grd.extent_2d() with gxv.View_3d.new() as v: v3d_file = v.file_name with gxg.Draw(v, 'line') as g: self.assertEqual(g.drawing_plane, 'Plane') self.assertEqual(str(g), 'line/Plane/uuid_test_basic_grid_3D_1') g.rectangle(area, pen=g.new_pen(line_thick=0.1, line_color='R')) with gxagg.Aggregate_image.new(grid_file) as agg: with gxg.Aggregate_group.new(v, agg) as gagg: self.assertEqual(str(gagg), agg.name + '/Plane/uuid_test_basic_grid_3D_1') self.assertEqual(len(v.group_list_agg), 1) self.crc_map(v3d_file) def test_basic_grid_2(self): self.start() # test grid file folder, files = gsys.unzip(os.path.join(os.path.dirname(self._test_case_py), 'testgrids.zip'), folder=self.gx.temp_folder()) grid_file = os.path.join(folder, 'test_agg_utm.grd') map_file = os.path.join(self.gx.temp_folder(), "test_agg_utm") with gxgrd.Grid(grid_file) as grd: cs = grd.coordinate_system area = grd.extent_2d() with gxmap.Map.new(map_file, data_area=area, media="A3", margins=(0, 0, 0, 0), scale=(area[2] - area[0]) / 0.2, coordinate_system=cs, overwrite=True) as gmap: map_file = gmap.file_name with gxv.View.open(gmap, "base") as v: with gxg.Draw(v, 'line') as g: g.rectangle(v.extent_clip, pen=g.new_pen(line_thick=2, line_color='K')) with gxv.View.open(gmap, "data") as v: with gxg.Draw(v, 'line') as g: g.rectangle(area, pen=g.new_pen(line_thick=0.1, line_color='G')) with gxagg.Aggregate_image.new(grid_file) as agg: gxg.Aggregate_group.new(v, agg) self.crc_map(map_file) def test_zone_grid(self): self.start() def test_zone(zone, suffix, shade=False): map_file = os.path.join(self.gx.temp_folder(), "test_agg_" + suffix) with gxmap.Map.new(map_file, overwrite=True, data_area=(ex[0], ex[1], ex[2], ex[3]), scale=(ex[2] - ex[0]) / 0.2) as gmap: map_file = gmap.file_name with gxv.View.open(gmap, "data") as v: with gxagg.Aggregate_image.new(grid_file, zone=zone, shade=shade) as agg: gxg.Aggregate_group.new(v, agg) gmap.delete_view('base') self.crc_map(map_file, alt_crc_name='{}_{}'.format(gxsys.func_name(1), suffix)) # test grid file folder, files = gsys.unzip(os.path.join(os.path.dirname(self._test_case_py), 'testgrids.zip'), folder=self.gx.temp_folder()) with gxgrd.Grid(os.path.join(folder, 'test_agg_utm.grd')) as grd: ex = grd.extent_2d() grid_file = 'test_zone' gxgrd.delete_files(grid_file) with gxgrd.Grid.copy(grd, grid_file) as test: grid_file = test.file_name try: test_zone(gxagg.ZONE_LINEAR, "linear_shade", shade=True) test_zone(gxagg.ZONE_EQUALAREA, "eq_area") test_zone(gxagg.ZONE_DEFAULT, "default") test_zone(gxagg.ZONE_LAST, "last") test_zone(gxagg.ZONE_LINEAR, "linear") test_zone(gxagg.ZONE_NORMAL, "normal") test_zone(gxagg.ZONE_SHADE, "shade") test_zone(gxagg.ZONE_LOGLINEAR, "log_linear") finally: gxgrd.delete_files(grid_file) def test_text_definition(self): self.start() t = gxg.Text_def() self.assertEqual(t.slant, 0) self.assertEqual(t.height, 0.25) self.assertEqual(t.weight, gxg.FONT_WEIGHT_MEDIUM) self.assertEqual(t.font, 'DEFAULT') t.font = "Arial" self.assertEqual(t.font, 'Arial') self.assertEqual(t.mapplot_string, '0.25,,,0,"Arial(TT)"') t.font = 'sr.gfn' self.assertEqual(t.mapplot_string, '0.25,,,0,"sr"') t.font = '' self.assertEqual(t.mapplot_string, '0.25,,,0,"DEFAULT"') t.italics = True self.assertTrue(t.italics) self.assertEqual(t.slant, 15) t.italics = 0 self.assertFalse(t.italics) self.assertEqual(t.slant, 0) t.weight = gxg.FONT_WEIGHT_ULTRALIGHT self.assertAlmostEqual(t.line_thick, 0.005208333333333333) t.weight = gxg.FONT_WEIGHT_BOLD self.assertAlmostEqual(t.line_thick, 0.020833333333333331) thick = t.line_thick t.weight = gxg.FONT_WEIGHT_XXBOLD self.assertAlmostEqual(t.line_thick, 0.0625) t.line_thick = thick self.assertEqual(t.weight, gxg.FONT_WEIGHT_BOLD) t.height = 10. self.assertEqual(t.weight, gxg.FONT_WEIGHT_BOLD) self.assertAlmostEqual(t.line_thick, 0.8333333333333333) t.line_thick = t.line_thick self.assertEqual(t.weight, gxg.FONT_WEIGHT_BOLD) def test_colours(self): self.start() c = gxg.Color((150, 200, 500)) self.assertEqual(c.rgb, (150, 200, 255)) c = gxg.Color((150, 200, 500), model=gxg.CMODEL_CMY) self.assertEqual(c.cmy, (150, 200, 255)) c = gxg.Color('r255g128b56') self.assertEqual(c.rgb, (255, 128, 56)) self.assertEqual(c.cmy, (0, 127, 199)) c.rgb = (64, 32, 16) self.assertEqual(c.rgb, (64, 32, 16)) c.cmy = (100, 200, 300) self.assertEqual(c.cmy, (100, 200, 255)) c = gxg.Color((0, 127, 64), gxg.CMODEL_HSV) self.assertEqual(c.rgb, (191, 96, 96)) c = gxg.Color((0, 127, 64), gxg.CMODEL_RGB) self.assertEqual(c.rgb, (0, 127, 64)) c = gxg.Color(gxg.C_GREEN) self.assertEqual(c.rgb, (0, 255, 0)) c2 = gxg.Color(c) self.assertEqual(c2.rgb, (0, 255, 0)) c = gxg.Color(gxg.C_TRANSPARENT) self.assertEqual(c.rgb, None) self.assertEqual(c.cmy, None) self.assertTrue(c == gxg.Color(gxg.C_TRANSPARENT)) def test_pen(self): self.start() p = gxg.Pen() self.assertEqual(p.line_color.int_value, gxg.C_BLACK) self.assertEqual(p.fill_color.int_value, gxg.C_TRANSPARENT) self.assertEqual(p.line_style, gxg.LINE_STYLE_SOLID) p.line_color = (255, 127, 64) self.assertEqual(p.mapplot_string, 'r255g127b64t10') p2 = gxg.Pen(line_color = (255, 127, 64)) self.assertTrue(p == p2) p2.line_color = 'K' self.assertFalse(p == p2) p = gxg.Pen.from_mapplot_string('r20b100k16R64K16') ms = p.mapplot_string self.assertEqual(ms, 'r4g0b84R48G0B0t1') p = gxg.Pen.from_mapplot_string(ms) self.assertEqual(p.mapplot_string, ms) p = gxg.Pen.from_mapplot_string('c64K64') self.assertEqual(p.line_color.rgb, (191, 255, 255)) self.assertEqual(p.fill_color.rgb, (191, 191, 191)) p = gxg.Pen(line_color='K') self.assertEqual(p.line_color.int_value, gxg.C_BLACK) self.assertTrue(p.line_color == gxg.Color(gxg.C_BLACK)) p = gxg.Pen(line_color=gxg.C_WHITE) self.assertEqual(p.line_color.int_value, gxg.C_WHITE) self.assertTrue(p.line_color == gxg.Color(gxg.C_WHITE)) p = gxg.Pen.from_mapplot_string('r20b100k16R64K16') p = gxg.Pen(default=p, line_thick=0.5, fill_color='K') ms = p.mapplot_string self.assertEqual(ms, 'r4g0b84R0G0B0t500') p = gxg.Pen.from_mapplot_string(ms) self.assertEqual(p.mapplot_string, ms) self.assertRaises(gxg.GroupException, gxg.Pen, bad=1) def test_scaled(self): self.start() p = gxg.Pen(factor=10) self.assertEqual(p.line_thick, 0.1) self.assertEqual(p.line_pitch, 5.0) self.assertEqual(p.pat_thick, 0.1) self.assertEqual(p.pat_size, 10.0) p = gxg.Pen(default=p, factor=5) self.assertEqual(p.line_thick, 0.5) self.assertEqual(p.line_pitch, 25.0) self.assertEqual(p.pat_thick, 0.5) self.assertEqual(p.pat_size, 50.0) t = gxg.Text_def(factor=0.2) self.assertEqual(t.height, 0.05) def test_text(self): self.start() with gxmap.Map.new(data_area=(400000, 5000000, 500000, 5150000), coordinate_system='WGS 84 / UTM zone 15N [geoid]') as map: map_file = map.file_name with gxv.View.open(map, 'base') as v: with gxg.Draw(v) as g: g.rectangle(g.extent) g.text('Text on base view') g.text('Bigger, blue, higher', (v.units_per_map_cm, v.units_per_map_cm), text_def=gxg.Text_def(height=20, color='B', font='Times New Roman')) g.text('Bigger, blue, angled, italics', (10, 25), angle=60, text_def=gxg.Text_def(height=20, color='B', font='Calibri', italics=True)) g.text_def = gxg.Text_def(height=20, color='B', font='Calibri', italics=True) tex = g.text_extent('Bigger, blue, angled, italics') self.assertAlmostEqual(209.9629, tex.dimension_xy[0], 3) self.assertAlmostEqual(334.6408, tex.dimension_xy[1], 3) tex = g.text_extent('Bigger, blue, angled, italics', gxg.Text_def(height=10, font='Calibri', italics=True)) self.assertAlmostEqual(104.98147, tex.dimension_xy[0], 3) self.assertAlmostEqual(167.32042, tex.dimension_xy[1], 3) self.crc_map(map_file) def test_text_1(self): self.start() with gxmap.Map.new(data_area=(400000, 5000000, 500000, 5050000), coordinate_system='WGS 84 / UTM zone 15N [geoid]') as map: map_file = map.file_name with gxv.View.open(map, '*data') as v: with gxg.Draw(v) as g: g.rectangle(g.extent) ex = g.extent width = ex[2] - ex[0] height = ex[3] - ex[1] cxy = (ex[0] + width / 2, ex[1] + height / 2) td = gxg.Text_def(height=width / 20, color='K128', font='sr.gfn', weight=gxg.FONT_WEIGHT_XBOLD) self.assertTrue(td == gxg.Text_def(height=width / 20, color='K128', font='sr.gfn', weight=gxg.FONT_WEIGHT_XBOLD)) self.assertEqual(td.mapplot_string, '5227.3,,,0,"sr"') g.rectangle(ex) g.line((ex[0], cxy[1], ex[2], cxy[1])) g.line((cxy[0], ex[1], cxy[0], ex[3])) g.text('Centered', cxy, text_def=td, reference=gxg.REF_CENTER) g.text('Bottom', (cxy[0], ex[1]), text_def=td, reference=gxg.REF_BOTTOM_CENTER) g.text('Top', (cxy[0], ex[3]), text_def=td, reference=gxg.REF_TOP_CENTER) g.text('Left', (ex[0], cxy[1]), text_def=td, angle=90, reference=gxg.REF_TOP_CENTER) g.text('Right', (ex[2], cxy[1]), text_def=td, angle=-90, reference=gxg.REF_TOP_CENTER) self.crc_map(map_file) def test_text_multiline(self): self.start() with gxmap.Map.new(data_area=(400000, 5000000, 500000, 5050000), coordinate_system='WGS 84 / UTM zone 15N [geoid]') as map: map_file = map.file_name with gxv.View.open(map, '*data') as v: with gxg.Draw(v) as g: g.rectangle(g.extent) ex = v.extent_clip width = ex[2] - ex[0] height = ex[3] - ex[1] cxy = (ex[0] + width / 2, ex[1] + height / 2) td = gxg.Text_def(height=width / 20, color='K128', font='sr.gfn', weight=gxg.FONT_WEIGHT_XBOLD) g.rectangle(ex) g.line((ex[0], cxy[1], ex[2], cxy[1])) g.line((cxy[0], ex[1], cxy[0], ex[3])) g.text('Centered\nline2\nand another', cxy, text_def=td, reference=gxg.REF_CENTER) self.crc_map(map_file) def test_locate_group(self): self.start() with gxmap.Map.new(data_area=(400000, 5000000, 500000, 5050000), coordinate_system='WGS 84 / UTM zone 15N [geoid]') as map: map_file = map.file_name with gxv.View.open(map, '*data') as v: with gxg.Draw(v) as g: g.rectangle(v.extent_clip) rect = gxgm.Point2((v.extent_clip[0], v.extent_clip[1], (v.extent_clip[2] + v.extent_clip[0]) * 0.5, (v.extent_clip[3] + v.extent_clip[1]) * 0.5)) with gxg.Draw(v, 'a') as g: g.rectangle(rect) with gxg.Draw(v, 'b') as g: self.assertEqual(g.number, 2) g.rectangle(rect, pen="b") g.locate((450000, 5025000), reference=gxg.REF_TOP_CENTER) self.crc_map(map_file) def test_color_bar(self): self.start() # test grid file folder, files = gsys.unzip(os.path.join(os.path.dirname(self._test_case_py), 'testgrids.zip'), folder=self.gx.temp_folder()) grid_file = os.path.join(folder, 'test_agg_utm.grd') map_file = os.path.join(self.gx.temp_folder(), "test_agg_utm") with gxgrd.Grid.open(grid_file, mode=gxgrd.FILE_READWRITE) as grd: cs = grd.coordinate_system area = grd.extent_2d() grd.unit_of_measure = 'maki' with gxmap.Map.new(map_file, fixed_size=False, data_area=area, media="A4", margins=(7, 7, 2.5, 2.5), coordinate_system=cs, overwrite=True) as gmap: map_file = gmap.file_name with gxv.View.open(gmap, "base") as v: with gxg.Draw(v, 'line') as g: g.rectangle(v.extent_clip, pen=g.new_pen(line_thick=1, line_color='K')) with gxv.View.open(gmap, "data") as v: with gxg.Draw(v, 'line') as g: g.rectangle(v.extent_clip, pen=g.new_pen(line_thick=0.1, line_color='G')) g.rectangle(v.extent_all, pen=g.new_pen(line_thick=0.1, line_color='B')) with gxagg.Aggregate_image.new(grid_file) as agg: self.assertEqual(agg.layer_unit_of_measure(0), 'maki') self.assertEqual(agg.layer_unit_of_measure(agg.layer_file_names[0]), 'maki') self.assertEqual(agg.layer_color_map(0).unit_of_measure, 'maki') gxg.legend_color_bar(v, 'color_legend', agg.layer_color_map()) gxg.legend_color_bar(v, 'color_legend', agg.layer_color_map(), bar_location=gxg.COLOR_BAR_LEFT) gxg.legend_color_bar(v, 'bottom', agg.layer_color_map(), bar_location=gxg.COLOR_BAR_BOTTOM, box_size=0.5, location=(1, -0.1), annotation_offset=0.1) gxg.legend_color_bar(v, 'top', agg.layer_color_map(), bar_location=gxg.COLOR_BAR_TOP, box_size=0.5, bar_width=0.1, location=0.5, interval_1 = 50, annotation_offset=0.1) self.crc_map(map_file) def test_color_bar_existing_agg(self): self.start() # test grid file folder, files = gsys.unzip(os.path.join(os.path.dirname(self._test_case_py), 'testgrids.zip'), folder=self.gx.temp_folder()) grid_file = os.path.join(folder, 'test_agg_utm.grd') map_file = os.path.join(self.gx.temp_folder(), "test_agg_utm") with gxgrd.Grid(grid_file) as grd: cs = grd.coordinate_system area = grd.extent_2d() with gxmap.Map.new(map_file, fixed_size=False, data_area=area, media="A4", margins=(2, 10, 2, 1), coordinate_system=cs, overwrite=True) as gmap: map_file = gmap.file_name with gxv.View.open(gmap, "base") as v: with gxg.Draw(v, 'line') as g: g.rectangle(v.extent_clip, pen=g.new_pen(line_thick=1, line_color='K')) with gxv.View.open(gmap, "data") as v: with gxg.Draw(v, 'line') as g: g.rectangle(v.extent_clip, pen=g.new_pen(line_thick=0.1, line_color='G')) g.rectangle(v.extent_all, pen=g.new_pen(line_thick=0.1, line_color='B')) with gxagg.Aggregate_image.new(grid_file) as agg: with gxg.Aggregate_group.new(v, agg) as g: agg_group_name = g.name with gxv.View.open(gmap, "data") as v: with gxg.Aggregate_group.open(v, agg_group_name) as g: gxg.legend_color_bar(v, 'color_legend', g.agg.layer_color_map()) self.crc_map(map_file) def test_properties(self): self.start() with gxmap.Map.new() as map: with gxv.View.open(map, "base") as v: with gxg.Draw(v, 'edge') as g: g.rectangle(v.extent_clip, pen=g.new_pen(line_thick=1, line_color='K')) with gxv.View.open(map, "data") as v: with gxg.Draw(v, 'edge') as g: g.rectangle(v.extent_clip, pen=g.new_pen(line_thick=1, line_color='B')) self.assertTrue(g.visible) g.visible = False self.assertFalse(g.visible) def test_graticule(self): self.start() test_map = os.path.join(self.gx.temp_folder(), "test") with gxmap.Map.new(test_map, overwrite=True) as map: map_file = map.file_name map.delete_view('data') with gxv.View.new(map, "my_data_1", map_location=(2, 3), area=(0, 0, 1000, 1500), scale=10000) as v: with gxg.Draw(v, 'line') as g: g.rectangle(v.extent_clip, pen=g.new_pen(line_thick=5, line_color='G')) g.graticule(style=gxg.GRATICULE_LINE, pen=g.new_pen(line_thick=5)) ex1 = v.extent_group('line', unit=gxv.UNIT_MAP) with gxv.View.new(map, "my_data_2", map_location=(15, 3), area=(0, 0, 1000, 1500), scale=10000) as v: with gxg.Draw(v, 'line') as g: g.rectangle(v.extent_clip, pen=g.new_pen(line_thick=5, line_color='G')) g.graticule(style=gxg.GRATICULE_DOT, pen=g.new_pen(line_thick=5)) ex2 = v.extent_group('line', unit=gxv.UNIT_MAP) with gxv.View.new(map, "my_data_3", map_location=(28, 3), area=(0, 0, 1000, 1500), scale=10000) as v: with gxg.Draw(v, 'line') as g: g.rectangle(v.extent_clip, pen=g.new_pen(line_thick=5, line_color='G')) g.graticule(style=gxg.GRATICULE_CROSS, pen=g.new_pen(line_thick=5)) ex3 = v.extent_group('line', unit=gxv.UNIT_MAP) area = (min(ex1[0], ex2[0], ex3[0])/10.0 - 2, max(ex1[1], ex2[1], ex3[1])/10.0 - 2, max(ex1[2], ex2[2], ex3[2])/10.0 + 2, max(ex1[3], ex2[3], ex3[3])/10.0 + 2) with gxv.View.new(map, "my_base_view", area=area, scale=100.0) as v: with gxg.Draw(v, 'base_edge') as g: g.rectangle(v.extent_clip, pen=g.new_pen(line_thick=0.1, line_color='R')) map.delete_view('base') self.crc_map(map_file) def test_ppoint_3d(self): self.start() plist = [[110, 5, 0], [120, 20, 10], [130, 15, 50], [150, 50, 20], [160, 70, 0], [175, 35, 30], [190, 65, 80], [220, 50, 90], [235, 18.5, 100]] pp = gxgm.PPoint(plist) with gxv.View_3d.new(gxsys.func_name(), overwrite=True) as v: file_name = v.file_name with gxg.Draw_3d(v) as g: g.pen = gxg.Pen(line_color='R') g.polypoint_3d(pp) pp += (0, 0, 20) g.polypoint_3d(pp, style=gxg.POINT_STYLE_SPHERE, pen=gxg.Pen(line_color='G', line_thick=5)) try: self.crc_map(file_name) finally: gxmap.delete_files(file_name) def test_pp_3d(self): self.start() plist = [[110, 5, 0], [120, 20, 10], [130, 15, 50], [150, 50, 20], [160, 70, 0], [175, 35, 30], [190, 65, 80], [220, 50, 90], [235, 18.5, 100]] with gxv.View_3d.new(gxsys.func_name(), overwrite=True) as v: file_name = v.file_name with gxg.Draw_3d(v) as g: pp = gxgm.PPoint(plist) g.pen = gxg.Pen(line_color='R') g.polypoint_3d(pp) pp += (0, 0, 10) g.polypoint_3d(pp, style=gxg.POINT_STYLE_SPHERE, pen=gxg.Pen(line_color='G', line_thick=4)) pp += (0, 0, 10) g.pen = gxg.Pen(line_color='R') g.polyline_3d(pp) pp += (0, 0, 10) g.pen = gxg.Pen(line_color='C', line_thick=3) g.polyline_3d(pp, style=gxg.LINE3D_STYLE_TUBE) pp += (0, 0, 10) g.polyline_3d(pp, style=gxg.LINE3D_STYLE_TUBE_JOINED, pen=gxg.Pen(line_color='K64', line_thick=4)) try: self.crc_map(file_name) finally: gxmap.delete_files(file_name) def test_color_map(self): self.start() cm = gxg.Color_map() self.assertEqual(cm.length, 39) self.assertFalse(cm.initialized) cm = gxg.Color_map(16) self.assertEqual(cm.length, 16) self.assertEqual(cm[0][1], gxg.Color(gxg.C_BLACK)) self.assertEqual(cm[cm.length-1], (None, gxg.Color(gxg.C_BLACK))) cm[4] = (cm[4][0], gxg.Color(gxg.C_GREEN)) self.assertEqual(cm[4][1].rgb, (0, 255, 0)) self.assertFalse(cm.initialized) self.assertTrue(isinstance(cm.gxitr, gxapi.GXITR)) cm = gxg.Color_map('grey') self.assertFalse(cm.initialized) cm.set_sequential() self.assertTrue(cm.initialized) self.assertEqual(cm.length, 32) self.assertEqual(cm[0][1].rgb, (31, 31, 31)) self.assertEqual(cm[cm.length-1][1].rgb, (255, 255, 255)) self.assertEqual(cm.color_of_value(0), cm[0][1]) self.assertEqual(cm.color_of_value(7.0), cm[7][1]) self.assertEqual(cm.color_of_value(7.000001), cm[8][1]) self.assertEqual(cm.brightness, 0.) cm.brightness = 0.5 self.assertEqual(cm.brightness, 0.5) self.assertEqual(cm[0][1].rgb, (143, 143, 143)) self.assertEqual(cm[cm.length - 1][1].rgb, (255, 255, 255)) cm.brightness = -0.25 self.assertEqual(cm.brightness, -0.25) self.assertEqual(cm[0][1].rgb, (24, 24, 24)) self.assertEqual(cm[cm.length - 1][1].rgb, (192, 192, 192)) cm.brightness = 0 self.assertEqual(cm[0][1].rgb, (31, 31, 31)) self.assertEqual(cm[cm.length - 1][1].rgb, (255, 255, 255)) cm.set_linear(4, 45) self.assertEqual(cm.length, 32) self.assertEqual(cm[0][0], 4) self.assertEqual(cm[30][0], 45) cm.set_linear(4, 45, inner_limits=False) self.assertEqual(cm.length, 32) self.assertEqual(cm[0][0], 5.28125) self.assertEqual(cm[30][0], 43.71875) cm.set_linear(5, 50, contour_interval=5) self.assertEqual(cm.length, 11) cm = gxg.Color_map('grey') cm.set_logarithmic(0.0001,1000) self.assertEqual(cm.length, 32) cm.set_logarithmic(0.0001,1000, contour_interval=10) self.assertEqual(cm.length, 7) cm = gxg.Color_map('grey') cm.set_logarithmic(0.000023,18000, contour_interval=100) self.assertEqual(cm.length, 5) cm = gxg.Color_map() cm.set_normal(25, 55000) self.assertAlmostEqual(cm[cm.length//2][0], 55000.811582690316) itr = cm.save_file() cm2 = gxg.Color_map(itr) self.assertTrue(cm == cm2) tbl = gxg.Color_map().save_file() self.assertEqual(os.path.splitext(tbl)[1], '.tbl') cm = gxg.Color_map(tbl) self.assertFalse(cm.initialized) def test_color_symbols(self): self.start() data = [((0, 0), 1), ((10, 0), 2), ((0, 10), 3), ((10, 10), 4)] data2 = [((0, 0, 45), 1, 4), ((10, 0, 8), None, None), ((0, 10, 16), 3, 75), ((None, 10, -22), 4, 7)] cmap = gxg.Color_map() cmap.set_linear(0, 5, contour_interval=1) with gxmap.Map.new(data_area=(-1, -1, 11, 11), scale=100) as map: map_file = map.file_name with gxv.View.open(map, '*data') as v: with gxg.Draw(v) as g: g.rectangle(g.extent) gxg.Color_symbols_group.new(v, 'outer_symbols', data, cmap, unit_of_measure='maki').close() with gxg.Color_symbols_group.open(v, 'outer_symbols') as cs: cm = cs.color_map() self.assertEqual(cm.unit_of_measure, 'maki') self.assertEqual(cm.unit_of_measure, cs.unit_of_measure) cmap = gxg.Color_map() cmap.set_linear(0, 5, contour_interval=1) with gxg.Color_symbols_group.new(v, 'mark', data2, cmap, symbol=gxg.SYMBOL_BOX, symbol_def=gxg.Text_def(font='symbols.gfn', height=0.15, color=gxg.C_WHITE, weight=gxg.FONT_WEIGHT_ULTRALIGHT)) as cs: nv = cs.name with gxg.Color_symbols_group.open(v, nv) as cs: gxg.legend_color_bar(v, 'symbol_legend', cs.color_map()) self.crc_map(map_file) def test_color_symbols_from_array(self): self.start() data = [(0, 0, 1), (10, 0, 2), (0, 10, 3), (10, 10, 4)] cmap = gxg.Color_map() cmap.set_linear(0, 5, contour_interval=1) with gxmap.Map.new(data_area=(-1, -1, 11, 11), scale=100) as map: map_file = map.file_name with gxv.View.open(map, '*data') as v: with gxg.Draw(v) as g: g.rectangle(g.extent) gxg.Color_symbols_group.new(v, 'outer_symbols', np.array(data), cmap, unit_of_measure='maki').close() cmap = gxg.Color_map() cmap.set_linear(0, 5, contour_interval=1) self.crc_map(map_file) def test_color_symbols_3d(self): self.start() data = [((0, 0), 1), ((10, 0), 2), ((0, 10), 3), ((10, 10), 4)] data2 = [((0, 0, 45), 1, 4), ((10, 0, 8), None, None), ((0, 10, 16), 3, 75), ((None, 10, -22), 4, 7)] cmap = gxg.Color_map() cmap.set_linear(0, 5, contour_interval=1) with gxv.View_3d.new() as v: v3d_file = v.file_name with gxg.Draw(v) as g: g.rectangle(g.extent) gxg.Color_symbols_group.new(v, 'outer_symbols', data, cmap, unit_of_measure='maki').close() cmap = gxg.Color_map('hotcycle') cmap.set_linear(0, 5, contour_interval=1) with gxg.Color_symbols_group.new(v, 'mark', data2, cmap, symbol=gxg.SYMBOL_BOX, symbol_def=gxg.Text_def(font='symbols.gfn', height=0.15, color=gxg.C_WHITE, weight=gxg.FONT_WEIGHT_ULTRALIGHT)) as cs: nv = cs.name with gxg.Color_symbols_group.open(v, nv) as cs: self.assertEqual(cs.number, 2) self.crc_map(v3d_file) def test_polydata_3d(self): self.start() def render_spheres(item, cmap_radius): xyz, value = item if None in xyz or value is None: return None cmap, radius = cmap_radius cint = cmap.color_of_value(value) return gxg.SYMBOL_3D_SPHERE, xyz, cint.int_value, radius def render_cubes(point, size_color): size, cint = size_color half = size * 0.5 p2 = gxgm.Point2((point - (half, half, half), point + (half, half, half))) return gxg.SYMBOL_3D_CUBE, p2, cint, None def render_cylinders(point, size_color): size, cint = size_color half = size * 0.2 p2 = gxgm.Point2((point - (half, half, half), point + (half, half, half))) return gxg.SYMBOL_3D_CYLINDER, p2, cint, size * 0.4 def render_cones(point, size_color): size, cint = size_color half = size * 0.5 p2 = gxgm.Point2((point - (half, half, half), point + (half, half, half))) return gxg.SYMBOL_3D_CONE, p2, cint, size * 0.2 data = [((0, 0, 0), 1), ((10, 0, 5), 2), ((0, 10, -5), 3), ((0, None, -5), 99), ((0, 10, -5), None), ((10, 10, 10), 4)] cmap = gxg.Color_map() cmap.set_linear(0, 5, contour_interval=1) for c in cmap: if c[0]: self.assertTrue(isinstance(c[0], float)) self.assertTrue(isinstance(c[1], gxg.Color)) with gxv.View_3d.new(area_2d=(-1, -1, 11, 11)) as v: v3d_file = v.file_name with gxg.Draw(v, 'rect') as g: g.rectangle((0,0,10,10), pen=gxg.Pen(line_color=gxg.C_BLACK, line_thick=0.2, fill_color=gxg.C_GREEN)) with gxg.Draw_3d(v, 'pipes') as g: g.polyline_3d(((0,0,0), (10,0,0), (10,10,0), (0,10,0), (0,0,0)), style=gxg.LINE3D_STYLE_TUBE_JOINED, pen=gxg.Pen(line_color=gxg.C_GREY, line_thick=0.2)) with gxg.Draw_3d(v, 'outer') as g: g.polydata_3d(data, render_spheres, (cmap, 0.25)) pp = gxgm.PPoint(((5, 5, 5), (7, 5, 5), (7, 7, 7))) g.polydata_3d(pp, render_cubes, (1, gxg.Color('y').int_value)) pp += (0, 0, 2) g.polydata_3d(pp, render_cylinders, (1, gxg.Color('m').int_value)) pp += (0, 0, 2) n = 0 g.polydata_3d(pp, render_cones, (1, gxg.Color('r255g128b128').int_value)) self.crc_map(v3d_file) def test_polydata_3d_grd(self): self.start() def render_spheres(item, cmap_radius): cmap, radius = cmap_radius if not np.isnan(item[2]): cint = cmap.color_of_value(item[2]).int_value return gxg.SYMBOL_3D_SPHERE, item, cint, radius folder, files = gsys.unzip(os.path.join(os.path.dirname(self._test_case_py), 'dem_small.zip'), folder=self.gx.temp_folder()) grid_file = os.path.join(folder, 'dem_small.grd') with gxgrd.Grid.open(grid_file) as grd: # get the data and replace z with DEM valie data = grd.xyzv().reshape(-1, 4) data[:, 2] = data[:, 3] * 3 data = data[:, 0:3] cmap = gxg.Color_map() try: std = np.nanstd(data[:, 2]) mean = np.nanmean(data[:, 2]) cmap.set_normal(std, mean) except: cmap.set_linear(0, 1) with gxv.View_3d.new(coordinate_system=grd.coordinate_system) as v: v3d_file = v.file_name with gxg.Draw_3d(v, 'dem_points') as g: g.polydata_3d(data.reshape((-1, 3)), render_spheres, (cmap, 10 * v.units_per_map_cm)) p_min = gxgm.Point((np.nanmin(data[:, 0]), np.nanmin(data[:, 1]), np.nanmin(data[:, 2]))) p_max = gxgm.Point((np.nanmax(data[:, 0]), np.nanmax(data[:, 1]), np.nanmax(data[:, 2]))) extent = gxgm.Point2((p_min, p_max)) g.box_3d(extent, wireframe=True, pen=gxg.Pen(line_color='c', line_thick= 20 * v.units_per_map_cm)) self.crc_map(v3d_file) def test_plane_relief_surface(self): self.start() folder, files = gsys.unzip(os.path.join(os.path.dirname(self._test_case_py), 'dem_small.zip'), folder=self.gx.temp_folder()) grid_file = os.path.join(folder, 'dem_small.grd') v3d_name = '' try: # create a 3D view with gxv.View_3d.new("data", area_2d=gxgrd.Grid(grid_file).extent_2d(), coordinate_system=gxgrd.Grid(grid_file).coordinate_system, scale=5000, overwrite=True) as v: v3d_name = v.file_name v.set_plane_relief_surface(grid_file, base=200, scale=2, max=250, min=150, refine=2) # add the grid image to the view, with shading, 20 nT contour interval to match default contour lines gxg.Aggregate_group.new(v, gxagg.Aggregate_image.new(grid_file, shade=True, contour=20)) # contour the grid gxg.contour(v, 'TMI_contour', grid_file) self.crc_map(v3d_name) finally: gxv.delete_files(v3d_name) def test_plane_contour(self): self.start() folder, files = gsys.unzip(os.path.join(os.path.dirname(self._test_case_py), 'dem_small.zip'), folder=self.gx.temp_folder()) grid_file = os.path.join(folder, 'dem_small.grd') # create a 2D view with gxmap.Map.new(data_area=gxgrd.Grid(grid_file).extent_2d(), scale=20000, inside_margin=0.1, coordinate_system=gxgrd.Grid(grid_file).coordinate_system, overwrite=True) as map: map_name = map.file_name with gxv.View.open(map, "data") as v: gxg.contour(v, 'TMI_contour', grid_file) with gxg.Draw(v, 'edge') as g: g.rectangle((v.extent_clip), pen=gxg.Pen(line_thick=v.units_per_map_cm * 0.1)) self.crc_map(map_name) def test_plane_contour_3d(self): self.start() folder, files = gsys.unzip(os.path.join(os.path.dirname(self._test_case_py), 'dem_small.zip'), folder=self.gx.temp_folder()) grid_file = os.path.join(folder, 'dem_small.grd') v3d_name = '' try: # create a 3D view with gxv.View_3d.new("data", area_2d=gxgrd.Grid(grid_file).extent_2d(), coordinate_system=gxgrd.Grid(grid_file).coordinate_system, scale=20000, overwrite=True) as v: v3d_name = v.file_name gxg.contour(v, 'TMI_contour', grid_file) with gxg.Draw(v, 'edge') as g: g.rectangle((v.extent_clip), pen=gxg.Pen(line_thick=v.units_per_map_cm * 0.1)) self.crc_map(v3d_name) finally: gxv.delete_files(v3d_name) def test_polydata_3d_grd_cone(self): self.start() def render_spheres(item, cmap_radius): cmap, radius = cmap_radius if not np.isnan(item[2]): cint = cmap.color_of_value(item[2]).int_value item = gxgm.Point(item) item2 = item + (0, radius, radius * 2) return gxg.SYMBOL_3D_CONE, gxgm.Point2((item, item2)), cint, radius folder, files = gsys.unzip(os.path.join(os.path.dirname(self._test_case_py), 'dem_small.zip'), folder=self.gx.temp_folder()) grid_file = os.path.join(folder, 'dem_small.grd') with gxgrd.Grid.open(grid_file) as grd: # get the data and replace z with DEM valie data = grd.xyzv().reshape(-1, 4) data[:, 2] = data[:, 3] * 3 data = data[:, 0:3] cmap = gxg.Color_map() try: std = np.nanstd(data[:, 2]) mean = np.nanmean(data[:, 2]) cmap.set_normal(std, mean) except: cmap.set_linear(0, 1) with gxv.View_3d.new(coordinate_system=grd.coordinate_system) as v: v3d_file = v.file_name with gxg.Draw_3d(v, 'dem_points') as g: g.polydata_3d(data.reshape((-1, 3)), render_spheres, (cmap, 10 * v.units_per_map_cm)) p_min = gxgm.Point((np.nanmin(data[:, 0]), np.nanmin(data[:, 1]), np.nanmin(data[:, 2]))) p_max = gxgm.Point((np.nanmax(data[:, 0]), np.nanmax(data[:, 1]), np.nanmax(data[:, 2]))) extent = gxgm.Point2((p_min, p_max)) g.box_3d(extent, wireframe=True, pen=gxg.Pen(line_color='c', line_thick= 20 * v.units_per_map_cm)) self.crc_map(v3d_file) def test_polydata_3d_grd_cylinder(self): self.start() def render_spheres(item, cmap_radius): cmap, radius = cmap_radius if not np.isnan(item[2]): cint = cmap.color_of_value(item[2]).int_value item = gxgm.Point(item) item2 = item + (0, radius, radius * 2) return gxg.SYMBOL_3D_CYLINDER, gxgm.Point2((item, item2)), cint, radius folder, files = gsys.unzip(os.path.join(os.path.dirname(self._test_case_py), 'dem_small.zip'), folder=self.gx.temp_folder()) grid_file = os.path.join(folder, 'dem_small.grd') with gxgrd.Grid.open(grid_file) as grd: # get the data and replace z with DEM valie data = grd.xyzv().reshape(-1, 4) data[:, 2] = data[:, 3] * 3 data = data[:, 0:3] cmap = gxg.Color_map() try: std = np.nanstd(data[:, 2]) mean = np.nanmean(data[:, 2]) cmap.set_normal(std, mean) except: cmap.set_linear(0, 1) with gxv.View_3d.new(coordinate_system=grd.coordinate_system) as v: v3d_file = v.file_name with gxg.Draw_3d(v, 'dem_points') as g: g.polydata_3d(data.reshape((-1, 3)), render_spheres, (cmap, 10 * v.units_per_map_cm)) p_min = gxgm.Point((np.nanmin(data[:, 0]), np.nanmin(data[:, 1]), np.nanmin(data[:, 2]))) p_max = gxgm.Point((np.nanmax(data[:, 0]), np.nanmax(data[:, 1]), np.nanmax(data[:, 2]))) extent = gxgm.Point2((p_min, p_max)) g.box_3d(extent, wireframe=True, pen=gxg.Pen(line_color='c', line_thick= 20 * v.units_per_map_cm)) self.crc_map(v3d_file) def test_contour(self): self.start() # test grid file folder, files = gsys.unzip(os.path.join(os.path.dirname(self._test_case_py), 'testgrids.zip'), folder=self.gx.temp_folder()) grid_file = os.path.join(folder, 'test_agg_utm.grd') map_file = os.path.join(self.gx.temp_folder(), "test_agg_utm") with gxgrd.Grid(grid_file) as grd: cs = grd.coordinate_system area = grd.extent_2d() with gxmap.Map.new(map_file, data_area=area, media="A4", margins=(0, 10, 0, 0), coordinate_system=cs, overwrite=True) as gmap: map_file = gmap.file_name with gxv.View.open(gmap, "base") as v: with gxg.Draw(v, 'line') as g: g.rectangle(v.extent_clip, pen=g.new_pen(line_thick=1, line_color='K')) with gxv.View.open(gmap, "data") as v: with gxg.Draw(v, 'line') as g: g.rectangle(area, pen=g.new_pen(line_thick=0.1, line_color='R')) with gxagg.Aggregate_image.new(grid_file) as agg: with gxg.Aggregate_group.new(v, agg) as gagg: self.assertEqual(gagg.name, str(agg)) self.assertEqual(len(v.group_list_agg), 1) gxg.contour(v, 'contour', grid_file) self.crc_map(map_file) def test_contour2(self): self.start() # test grid file folder, files = gsys.unzip(os.path.join(os.path.dirname(self._test_case_py), 'testgrids.zip'), folder=self.gx.temp_folder()) grid_file = os.path.join(folder, 'test_agg_utm.grd') map_file = os.path.join(self.gx.temp_folder(), "test_agg_utm") with gxgrd.Grid(grid_file) as grd: cs = grd.coordinate_system area = grd.extent_2d() with gxmap.Map.new(map_file, data_area=area, margins=(2, 10, 2, 2), coordinate_system=cs, overwrite=True, scale=20000) as gmap: map_file = gmap.file_name with gxv.View.open(gmap, "base") as v: with gxg.Draw(v, 'line') as g: g.rectangle(v.extent_clip, pen=g.new_pen(line_thick=1, line_color='K')) with gxv.View.open(gmap, "data") as v: with gxg.Draw(v, 'line') as g: g.rectangle(area, pen=g.new_pen(line_thick=0.1, line_color='R')) with gxagg.Aggregate_image.new(grid_file, contour=10) as agg: cmap = agg.layer_color_map() cname = agg.name with gxg.Aggregate_group.new(v, agg) as gagg: self.assertEqual(gagg.name, str(agg)) gxg.legend_color_bar(v, cname, cmap) self.assertEqual(len(v.group_list_agg), 1) gxg.contour(v, 'contour', grid_file) self.crc_map(map_file) def test_contour_parameters(self): self.start() # test grid file folder, files = gsys.unzip(os.path.join(os.path.dirname(self._test_case_py), 'testgrids.zip'), folder=self.gx.temp_folder()) grid_file = os.path.join(folder, 'test_agg_utm.grd') map_file = os.path.join(self.gx.temp_folder(), "test_agg_utm") with gxgrd.Grid(grid_file) as grd: cs = grd.coordinate_system area = grd.extent_2d() with gxmap.Map.new(map_file, data_area=area, margins=(2, 10, 2, 2), coordinate_system=cs, overwrite=True, scale=20000) as gmap: map_file = gmap.file_name with gxv.View.open(gmap, "base") as v: with gxg.Draw(v, 'line') as g: g.rectangle(v.extent_clip, pen=g.new_pen(line_thick=1, line_color='K')) with gxv.View.open(gmap, "data") as v: with gxg.Draw(v, 'line') as g: g.rectangle(area, pen=g.new_pen(line_thick=0.1, line_color='R')) #gxg.contour(v, '_250', grid_file, parameters=('', '', '', '', '', '', '50/')) gxg.contour(v, '_250', grid_file, parameters=('', '', '', '', '', '', '10', '50', '250')) gxg.contour(v, '_260_270', grid_file, parameters={'levels': {'levopt': 1}, 'contours': [{'cint': 260, 'label': 0, 'catt': 'a=rt50'}, {'cint': 270, 'label': 1, 'catt': 'b=gt1000'}, {'cint': 280, 'label': 1, 'catt': 'c=br100g100t500'}]}) self.crc_map(map_file) def test_color_str(self): self.start() self.assertEqual(gxg.color_from_string("R"), 33554687) self.assertEqual(gxg.color_from_string("H255S127V32"), 18907135) def test_group_properties(self): self.start() rect = gxgm.Point2((0,0,10,5)) with gxmap.Map.new(data_area=rect.extent_xy) as gmap: with gxv.View.new(gmap, "data") as v: gxg.Draw(v, 'rect').rectangle(rect) self.assertTrue(len(v.group_list), 1) gxg.Draw(v, 'rect').rectangle(rect) self.assertTrue(len(v.group_list), 1) gxg.Draw(v, 'rect', mode=gxg.NEW).rectangle(rect) self.assertTrue(len(v.group_list), 2) self.assertTrue('rect_1' in v.group_list) gxg.Draw(v, 'rect_1', mode=gxg.REPLACE).rectangle(rect) self.assertTrue(len(v.group_list), 2) self.assertTrue('rect_1' in v.group_list) with gxg.Draw(v, 'property_test') as g: self.assertEqual(g.group_opacity, 1.0) g.group_opacity = 0.25 self.assertEqual(g.group_opacity, 0.25) g.group_opacity = -50 self.assertEqual(g.group_opacity, 0.) g.group_opacity = 5 self.assertEqual(g.group_opacity, 1.) self.assertFalse(g.group_3d) self.assertEqual(g.name, 'property_test') self.assertEqual(g.view.name, 'data') @unittest.skip('WIP see issue #73') def test_surface(self): self.start() verts = np.array([[0, 0, 0], [5, 0, 0], [5, 5, 0], [0, 3, 5], [2.5, 2, 10], [-3, 6, 8], [-4, 0, 12]], dtype=np.float64) faces = np.array([[0, 1, 2], [0, 2, 3], [3, 2, 4], [1, 2, 4], [3, 4, 5], [6, 4, 5]], dtype=np.int32) with gxv.View_3d.new() as v3d: v3d_file = v3d.file_name with gxg.Draw_3d(v3d, 'Surface') as g: g._surface(faces, verts) image_file = gxmap.Map.open(v3d_file).image_file(pix_width=800) gxviewer.view_document(v3d_file, wait_for_close=True) pass # self.crc_map(v3d_file) if __name__ == '__main__': unittest.main()<file_sep>/docs/GXARCDB.rst .. _GXARCDB: GXARCDB class ================================== .. autoclass:: geosoft.gxapi.GXARCDB :members: .. _ARC_SELTBL_TYPE: ARC_SELTBL_TYPE constants ----------------------------------------------------------------------- Describes what kind of table was selected .. autodata:: geosoft.gxapi.ARC_SELTBL_STANDALONE :annotation: .. autoattribute:: geosoft.gxapi.ARC_SELTBL_STANDALONE .. autodata:: geosoft.gxapi.ARC_SELTBL_FEATURELAYER :annotation: .. autoattribute:: geosoft.gxapi.ARC_SELTBL_FEATURELAYER .. autodata:: geosoft.gxapi.ARC_SELTBL_CANCELED :annotation: .. autoattribute:: geosoft.gxapi.ARC_SELTBL_CANCELED <file_sep>/geosoft/gxpy/tests/test_map.py import unittest import os import numpy as np import geosoft import geosoft.gxapi as gxapi import geosoft.gxpy.gx as gx import geosoft.gxpy.map as gxmap import geosoft.gxpy.view as gxv import geosoft.gxpy.geometry as gxgm import geosoft.gxpy.coordinate_system as gxcs import geosoft.gxpy.system as gxsys import geosoft.gxpy.group as gxg import geosoft.gxpy.grid as gxgrd import geosoft.gxpy.utility as gxu from base import GXPYTest class Test(GXPYTest): def _new_data_map(self, mapname=None, rescale=1.0): if mapname is None: mapname = os.path.join(self.gx.temp_folder(), 'test') with gxmap.Map.new(mapname, overwrite=True) as map: with gxv.View(map, "rectangle_test") as v: with gxg.Draw(v, 'rectangle') as g: g.rectangle((gxgm.Point((0, 0)), gxgm.Point((250, 110))), pen=gxg.Pen(line_thick=1)) p1 = gxgm.Point((5, 5)) * rescale p2 = gxgm.Point((100, 100)) * rescale poff = gxgm.Point((10, 5)) * rescale g.pen = gxg.Pen(fill_color=gxg.C_LT_GREEN) g.rectangle((p1, p2)) g.pen = gxg.Pen(line_style=2, line_pitch=2.0) g.line((p1 + poff, p2 - poff)) with gxv.View(map, "poly") as v: with gxg.Draw(v, 'poly') as g: plinelist = [[110, 5], [120, 20], [130, 15], [150, 50], [160, 70], [175, 35], [190, 65], [220, 50], [235, 18.5]] pp = gxgm.PPoint.from_list(plinelist) * rescale g.pen = gxg.Pen(line_style=2, line_pitch=2.0) g.polyline(pp) g.pen = gxg.Pen(line_style=4, line_pitch=2.0, line_smooth=gxg.SMOOTH_AKIMA) g.polyline(pp) ppp = np.array(plinelist) pp = gxgm.PPoint(ppp[3:, :]) * rescale g.pen = gxg.Pen(line_style=5, line_pitch=5.0, line_smooth=gxg.SMOOTH_CUBIC, line_color=gxapi.C_RED, line_thick=0.25, fill_color=gxapi.C_LT_BLUE) g.polyline(pp, close=True) g.pen = gxg.Pen(fill_color=gxapi.C_LT_GREEN) pp = (pp - (100, 0, 0)) / 2 + (100, 0, 0) g.polyline(pp, close=True) pp += (0, 25, 0) g.pen = gxg.Pen(fill_color=gxapi.C_LT_RED) g.polyline(pp, close=True) return map.file_name def _data_map(self, name=None, data_area=(1000, 0, 11000, 5000), margins=None, coordinate_system=None): if name is None: name = os.path.join(self.gx.temp_folder(), "test") gxmap.delete_files(name) if coordinate_system is None: coordinate_system = gxcs.Coordinate_system("WGS 84 / UTM zone 15N") if margins is None: margins = (1.5, 1.5, 3, 1) return gxmap.Map.new(file_name=name, data_area=data_area, coordinate_system=coordinate_system, media="A4", margins=margins, inside_margin=0.5) def test_version(self): self.start() self.assertEqual(gxmap.__version__, geosoft.__version__) def test_newmap(self): self.start() # test map name = 'test_newmap' gxmap.delete_files(name) with gxmap.Map.new(name) as map: self.assertEqual(map.name, name) mapfile = map.file_name self.assertEqual(mapfile, os.path.abspath((name + '.map'))) self.assertTrue(os.path.isfile(mapfile)) # verify can't write on a new map self.assertRaises(gxmap.MapException, gxmap.Map.new, name) self.assertRaises(gxmap.MapException, gxmap.Map.new, mapfile) with gxmap.Map.new(name, overwrite=True): pass # but I can open it with gxmap.Map.open(name): pass with gxmap.Map.open(mapfile): pass gxmap.delete_files(mapfile) self.assertFalse(os.path.isfile(mapfile)) self.assertRaises(gxmap.MapException, gxmap.Map, 'bogus') def test_new_geosoft_map(self): self.start() # temp map with gxmap.Map.new(data_area=(0, 0, 100, 80)) as map: views = map.view_list self.assertTrue('base' in views) self.assertTrue('data' in views) self.assertEqual(len(map.aggregate_list()), 0) with gxmap.Map.new(data_area=(0, 0, 100, 80), coordinate_system=gxcs.Coordinate_system("DHDN / Okarito 2000 [geodetic]")) as map: with gxv.View(map, 'data', mode=gxv.WRITE_OLD) as v: self.assertEqual("DHDN / Okarito 2000 [geodetic]", str(v.coordinate_system)) self.assertEqual(map.current_data_view, 'data') self.assertEqual(map.current_base_view, 'base') self.assertEqual(map.current_section_view, 'section') # the following does not make sense, for testing purposes only map.current_data_view = 'base' self.assertEqual(map.current_data_view, 'base') map.current_base_view = 'data' self.assertEqual(map.current_base_view, 'data') map.current_section_view = 'data' self.assertEqual(map.current_section_view, 'data') def test_lists(self): self.start() mapname = self._new_data_map() with gxmap.Map.open(mapname) as map: views = map.view_list self.assertTrue('rectangle_test' in views) self.assertTrue('poly' in views) views = map.view_list_2D self.assertTrue('rectangle_test' in views) self.assertTrue('poly' in views) views = map.view_list_3D self.assertEqual(len(views), 0) def test_map_delete(self): self.start() with gxmap.Map.new(file_name='test_geosoft', overwrite=True) as map: file_name = map.file_name self.assertEqual(len(map.view_list), 2) self.assertTrue(map.has_view('data')) self.assertTrue(map.has_view('base')) self.assertTrue(os.path.isfile(file_name)) with open(file_name, 'rb') as f: pass with gxmap.Map.new(file_name='test_geosoft', overwrite=True, data_area=(1000, 200, 11000, 5000)) as map: file_name = map.file_name self.assertEqual(len(map.view_list), 2) self.assertTrue(map.has_view('data')) self.assertTrue(map.has_view('base')) self.assertTrue(os.path.isfile(file_name)) with open(file_name, 'rb') as f: pass with gxmap.Map.new(file_name='test_geosoft', overwrite=True) as map: file_name = map.file_name map.remove_on_close(True) self.assertFalse(os.path.isfile(file_name)) for i in range(3): map = gxmap.Map.new(file_name='t{}'.format(i), overwrite=True) map.remove_on_close(True) map.close() def test_map_classes(self): self.start() with gxmap.Map.new(file_name='test_geosoft', overwrite=True) as map: self.assertEqual(map.get_class_name('data'), 'data') self.assertEqual(map.get_class_name('base'), 'base') self.assertEqual(map.get_class_name('section'), 'section') self.assertEqual(map.get_class_name('some_class_name'), 'some_class_name') map.set_class_name('base', 'bogus') self.assertEqual(map.get_class_name('base'), 'bogus') map.set_class_name('data', 'bogus_data') #self.assertEqual(map.get_class_name('data'), 'bogus_data') map.set_class_name('Section', 'yeah') self.assertEqual(map.get_class_name('Section'), 'yeah') map.set_class_name('mine', 'boom') self.assertEqual(map.get_class_name('mine'), 'boom') with gxmap.Map.new(data_area=(0, 0, 100, 80), coordinate_system=gxcs.Coordinate_system("DHDN / Okarito 2000 [geodetic]")) as map: self.assertEqual(map.get_class_name('base'), 'base') self.assertEqual(map.get_class_name('data'), 'data') with gxv.View(map, "copy_data", mode=gxv.WRITE_NEW, copy="data"): pass map.set_class_name('data', 'copy_data') self.assertEqual(map.get_class_name('data'), 'copy_data') def test_current_view(self): self.start() with gxmap.Map.new() as map: self.assertEqual(map.current_data_view, 'data') map.current_data_view = 'base' self.assertEqual(map.current_data_view, 'base') self.assertEqual(map.get_class_name('data'), 'base') map.current_data_view = 'data' self.assertEqual(map.current_data_view, 'data') try: map.current_data_view = 'Bogus' self.assertTrue(False) except gxmap.MapException: pass map.copy_view('data', 'Bogus') map.current_data_view = 'Bogus' self.assertEqual(map.current_data_view, 'bogus') self.assertRaises(gxmap.MapException, map.copy_view, 'Data', 'bogus') self.assertRaises(gxmap.MapException, map.copy_view, 'data', 'bogus') map.copy_view('data', 'bogus', overwrite=True) self.assertEqual(map.current_data_view, 'bogus') def test_media(self): self.start() def crc_media(map_file, test_number): with gxmap.Map.open(map_file) as map: with gxv.View(map, "base") as v: with gxg.Draw(v) as g: g.rectangle(v.extent_clip, pen=gxg.Pen(line_thick=0.2, line_color='K')) with gxv.View(map, "data") as v: with gxg.Draw(v) as g: g.rectangle(v.extent_clip, pen=gxg.Pen(line_thick=0.2, line_color='R')) self.crc_map(map_file, pix_width=256, alt_crc_name='{}_{}'.format(gxsys.func_name(1), test_number)) test_media_map = os.path.join(gx.gx().temp_folder(), 'test_media') test_number = 0 with gxmap.Map.new(test_media_map + 'scale_800', overwrite=True, scale=800, data_area=(5, 10, 50, 100)) as map: file_name = map.file_name crc_media(file_name, test_number) test_number += 1 with gxmap.Map.new(test_media_map + 'scale_100', overwrite=True, scale=100, data_area=(5, 10, 50, 100)) as map: file_name = map.file_name crc_media(file_name, test_number) test_number += 1 with gxmap.Map.new(test_media_map + 'a4_portrait', overwrite=True, media='A4', layout=gxmap.MAP_PORTRAIT) as map: file_name = map.file_name crc_media(file_name, test_number) test_number += 1 with gxmap.Map.new(test_media_map + 'portrait_a4', overwrite=True, media='a4', fixed_size=True, layout=gxmap.MAP_PORTRAIT) as map: file_name = map.file_name crc_media(file_name, test_number) test_number += 1 with gxmap.Map.new(test_media_map + 'a4_landscape', overwrite=True, media='A4', fixed_size=True, layout=gxmap.MAP_LANDSCAPE) as map: file_name = map.file_name crc_media(file_name, test_number) test_number += 1 with gxmap.Map.new(test_media_map + 'A4_1', overwrite=True, media='A4', fixed_size=True, data_area=(10, 5, 100, 50)) as map: file_name = map.file_name crc_media(file_name, test_number) test_number += 1 with gxmap.Map.new(test_media_map + 'A4_2', overwrite=True, media='A4', fixed_size=True, data_area=(5, 10, 50, 100), layout=gxmap.MAP_LANDSCAPE) as map: file_name = map.file_name crc_media(file_name, test_number) test_number += 1 with gxmap.Map.new(test_media_map + 'A4_3', overwrite=True, media='A4', data_area=(5, 10, 50, 100)) as map: file_name = map.file_name crc_media(file_name, test_number) test_number += 1 for m in (None, (60, 50), 'unlimited', 'bogus', 'A4', 'A3', 'A2', 'A1', 'A0', 'A', 'B', 'C', 'D', 'E'): with gxmap.Map.new(media=m) as map: pass self.assertRaises(gxmap.MapException, gxmap.Map.new, test_media_map, overwrite=True, media='A4', data_area=(100, 50, 10, 5), layout='landscape') def test_multiple_temp_maps(self): self.start() mapfiles = [] for i in range(3): with gxmap.Map.new() as map: mapfiles.append(map.file_name) with gxv.View(map, 'data') as v: with gxg.Draw(v) as g: g.rectangle(v.extent_clip) for fn in mapfiles: self.assertTrue(os.path.isfile(fn)) def test_north_arrow_0(self): self.start() with gxmap.Map.new(coordinate_system='ft') as map: mapfile = map.file_name with gxv.View(map, 'base') as v: with gxg.Draw(v) as g: g.rectangle(v.extent_clip) with gxv.View(map, 'data') as v: with gxg.Draw(v) as g: g.rectangle(v.extent_clip) map.north_arrow() self.crc_map(mapfile) def test_north_arrow_1(self): self.start() with gxmap.Map.new(coordinate_system='m', data_area=(0,0,20,10), scale=100) as map: mapfile = map.file_name with gxv.View(map, 'base') as v: with gxg.Draw(v) as g: g.rectangle(v.extent_clip) with gxv.View(map, 'data') as v: with gxg.Draw(v) as g: g.rectangle(v.extent_clip) map.north_arrow(location=(2, 0, 3), inclination=-12, declination=74.5, text_def=gxg.Text_def(font='Calibri')) self.crc_map(mapfile) def test_scale_1(self): self.start() with gxmap.Map.new(data_area=(350000,7000000,400000,7030000), coordinate_system='ft') as map: mapfile = map.file_name with gxv.View(map, 'base') as v: with gxg.Draw(v) as g: g.rectangle(v.extent_clip) with gxv.View(map, 'data') as v: with gxg.Draw(v) as g: g.rectangle(v.extent_clip) map.scale_bar() map.scale_bar(location=(2, 0, 2), length=10, sections=12) map.scale_bar(location=(5, 0, 0), length=8, sections=2, post_scale=True) map.scale_bar(location=(3, -3, 1.5), length=4, text_def=gxg.Text_def(height=0.2, italics=True), post_scale=True) self.crc_map(mapfile) def test_scale_2(self): self.start() with gxmap.Map.new(data_area=(350000,7000000,400000,7030000), coordinate_system='NAD83 / UTM zone 15N') as map: mapfile = map.file_name with gxv.View(map, 'base') as v: with gxg.Draw(v) as g: g.rectangle(v.extent_clip) with gxv.View(map, 'data') as v: with gxg.Draw(v) as g: g.rectangle(v.extent_clip) map.scale_bar() map.scale_bar(location=(2, 0, 2), length=10, sections=12, pen=gxg.Pen(line_color='R'), text_def=gxg.Text_def(color='B', weight=gxg.FONT_WEIGHT_BOLD)) self.crc_map(mapfile) def test_surround_1(self): self.start() cs = gxcs.Coordinate_system('NAD83 / UTM zone 15N') with gxmap.Map.new(data_area=(350000, 7000000, 400000, 7030000), coordinate_system=cs) as map: mapfile = map.file_name with gxv.View(map, 'data') as v: with gxg.Draw(v) as g: g.rectangle(v.extent_clip) map.surround() self.crc_map(mapfile) def test_surround_2(self): self.start() cs = gxcs.Coordinate_system('NAD83 / UTM zone 15N') with gxmap.Map.new(data_area=(350000, 7000000, 400000, 7030000), coordinate_system=cs) as map: mapfile = map.file_name with gxv.View(map, 'data') as v: with gxg.Draw(v) as g: g.rectangle(v.extent_clip) map.surround(gap=0.2, outer_pen=gxg.Pen(line_thick=0.2), inner_pen='bt500') self.crc_map(mapfile) def test_surround_3(self): self.start() with gxmap.Map.new(data_area=(350000, 7000000, 400000, 7030000)) as map: mapfile = map.file_name with gxv.View(map, 'data') as v: with gxg.Draw(v) as g: g.rectangle(v.extent_clip) map.surround(gap=0.2, outer_pen='rt500', inner_pen='K16') self.crc_map(mapfile) def test_annotate_xy_0(self): self.start() with self._data_map() as map: mapfile = map.file_name map.annotate_data_xy(x_sep=1500) self.crc_map(mapfile) def test_annotate_xy_1(self): self.start() with self._data_map() as map: mapfile = map.file_name map.annotate_data_xy(x_sep=1500, grid=gxmap.GRID_DOTTED, offset=0.5, text_def=gxg.Text_def(color='R', weight=gxg.FONT_WEIGHT_BOLD)) self.crc_map(mapfile) def test_annotate_xy_2(self): self.start() with self._data_map() as map: mapfile = map.file_name map.annotate_data_xy(x_sep=1500, tick=0.1, text_def=gxg.Text_def(color='G', weight=gxg.FONT_WEIGHT_ULTRALIGHT), grid=gxmap.GRID_CROSSES, grid_pen=gxg.Pen(line_color='b', line_thick=0.015)) self.crc_map(mapfile) def test_annotate_xy_3(self): self.start() with self._data_map() as map: mapfile = map.file_name map.annotate_data_xy(x_sep=1500, tick=0.1, grid=gxmap.GRID_LINES, offset=0.2, edge_pen=gxg.Pen(line_color='k64', line_thick=0.3), grid_pen=gxg.Pen(line_color='b', line_thick=0.015)) self.crc_map(mapfile) def test_annotate_ll_0(self): self.start() with self._data_map(data_area=(350000,7000000,400000,7030000)) as map: mapfile = map.file_name map.annotate_data_ll() self.crc_map(mapfile) def test_annotate_ll_1(self): self.start() with self._data_map(data_area=(350000,7000000,400000,7030000)) as map: mapfile = map.file_name map.annotate_data_ll(grid=gxmap.GRID_LINES, grid_pen=gxg.Pen(line_color='b'), text_def=gxg.Text_def(color='K127', font='cr.gfn', weight=gxg.FONT_WEIGHT_BOLD, italics=True)) self.crc_map(mapfile) def test_annotate_ll_2(self): self.start() with self._data_map(data_area=(350000,7000000,400000,7030000)) as map: mapfile = map.file_name map.annotate_data_xy() map.annotate_data_ll(grid=gxmap.GRID_LINES, grid_pen=gxg.Pen(line_color='b', line_thick=0.015), text_def=gxg.Text_def(color='r', height=0.2, italics=True), top=gxmap.TOP_IN) self.crc_map(mapfile) def test_annotate_ll_3(self): self.start() with self._data_map(data_area=(350000,7000000,400000,7030000)) as map: mapfile = map.file_name map.annotate_data_xy(tick=0.1, grid=gxmap.GRID_LINES, text_def=gxg.Text_def(weight=gxg.FONT_WEIGHT_ULTRALIGHT), grid_pen=gxg.Pen(line_thick=0.01)) map.annotate_data_ll(grid=gxmap.GRID_LINES, grid_pen=gxg.Pen(line_color='b', line_thick=0.025), text_def=gxg.Text_def(height=0.18, italics=True, color='g')) self.crc_map(mapfile) def test_annotate_ll_4(self): self.start() with self._data_map(data_area=(350000,7000000,400000,7030000)) as map: mapfile = map.file_name map.annotate_data_xy(tick=0.1, grid=gxmap.GRID_LINES, text_def=gxg.Text_def(weight=gxg.FONT_WEIGHT_BOLD), top=gxmap.TOP_IN) map.annotate_data_ll(grid=gxmap.GRID_LINES, grid_pen=gxg.Pen(line_color='b', line_thick=0.025), text_def=gxg.Text_def(height=0.18, italics=True), top=gxmap.TOP_IN) self.crc_map(mapfile) def test_annotate_ll_local(self): self.start() cs = gxcs.Coordinate_system({'type': 'local', 'lon_lat': (-96, 45), 'datum': 'nad83', 'azimuth': -30}) cs = gxcs.Coordinate_system("NAD27 / UTM zone 15N <425000,6500145,0,0,0,-30>") name = os.path.join(gx.gx().temp_folder(), "test") with gxmap.Map.new(file_name='mapplot_anoxy_rotated_cs_bug_UTM', overwrite=True, data_area=(0, 0, 5000, 3500), coordinate_system=cs, media="A3", margins=(3,3,4,3)) as map: mapfile = map.file_name map.scale_bar(location=(2, 0, 2), length=6, sections=5) map.surround() map.annotate_data_xy(grid=gxmap.GRID_LINES) map.annotate_data_ll(grid=gxmap.GRID_LINES, grid_pen=gxg.Pen(line_color='b', line_thick=0.025), text_def=gxg.Text_def(height=0.18, italics=True), top=gxmap.TOP_IN) map.surround() self.crc_map(mapfile) gxmap.delete_files(mapfile) def test_view_extents(self): self.start() testmap = os.path.join(self.gx.temp_folder(), "test") with gxmap.Map.new(testmap, overwrite=True) as gmap: gmap.delete_view('data') gxv.View(gmap, "my_data_1", map_location=(2, 3), area=(0, 0, 1000, 1500), scale=10000).close() gxv.View(gmap, "my_data_2", map_location=(15, 3), area=(0, 0, 1000, 1500), scale=10000).close() ex = gmap.extent_data_views() self.assertEqual(ex, (2, 3, 25, 18)) mdf = gmap.mdf() self.assertEqual(mdf[0], (36.39513677811551, 39.99513677811551, 3.0, 24.395136778115507, 21.99513677811551, 2.0)) self.assertEqual(mdf[1], (10000.0, 1.0, 0.0, 0.0)) def test_metadata(self): self.start() testmap = os.path.join(self.gx.temp_folder(), "test") with gxmap.Map.new(testmap, overwrite=True) as gmap: gmap.delete_view('data') gxv.View(gmap, "my_data_1", map_location=(2, 3), area=(0, 0, 1000, 1500), scale=10000).close() gxv.View(gmap, "my_data_2", map_location=(15, 3), area=(0, 0, 1000, 1500), scale=10000).close() m = gmap.metadata gm = m['geosoft'] self.assertEqual(len(gm), 2) self.assertTrue('dataset' in gm) newstuff = {'maki': {'a': 1, 'b': (4, 5, 6), 'units': 'nT'}} gmap.metadata = newstuff with gxmap.Map.open(testmap) as gmap: m = gmap.metadata gm = m['geosoft'] self.assertEqual(len(gm), 2) self.assertTrue('dataset' in gm) maki = m['maki'] self.assertEqual(maki['b'], ['4', '5', '6']) self.assertEqual(maki['units'], 'nT') def test_geotiff(self): self.start() mapname = self._new_data_map() with gxmap.Map.open(mapname) as map: map.export_geotiff('test_geotiff.tif') with gxgrd.Grid.open('test_geotiff.tif(TIF)') as tif: tif.delete_files() properties = tif.properties() self.assertEqual(properties.get('nx'),1376) self.assertEqual(properties.get('ny'),1512) self.assertEqual(str(properties.get('coordinate_system')),'*unknown') def test_figure(self): self.start() mapfile1 = gxmap.Map.figure((400, -1000, 1400, -200), file_name='test_figure1.map').file_name gxmap.crc_map(mapfile1) mapfile2 = gxmap.Map.figure((400400, 6000000, 401400, 6000800), coordinate_system="NAD27 / UTM zone 25N", title='Test Coordinate System', features='all', file_name='test_figure2.map').file_name gxmap.crc_map(mapfile2) def test_from_gxapi(self): self.start() gxapi_map = gxapi.GXMAP.create('test_from_gxapi.map', gxmap.WRITE_NEW) with gxmap.Map.from_gxapi(gxapi_map) as map: self.assertEqual('test_from_gxapi', map.name) if __name__ == '__main__': unittest.main() <file_sep>/geosoft/gxapi/GXMATH.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXMATH(gxapi_cy.WrapMATH): """ GXMATH class. This is not a class. This is a collection of standard mathematical functions, including most of the common logarithmic and geometric functions. """ def __init__(self, handle=0): super(GXMATH, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXMATH <geosoft.gxapi.GXMATH>` :returns: A null `GXMATH <geosoft.gxapi.GXMATH>` :rtype: GXMATH """ return GXMATH() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def cross_product_(cls, x1, y1, z1, x2, y2, z2, x3, y3, z3): """ Cross product of two vectors. :param x1: X1 component :param y1: Y1 component :param z1: Z1 component :param x2: X2 component :param y2: Y2 component :param z2: Z2 component :param x3: X3 component (output) :param y3: Y3 component (output) :param z3: Z3 component (output) :type x1: float :type y1: float :type z1: float :type x2: float :type y2: float :type z2: float :type x3: float_ref :type y3: float_ref :type z3: float_ref .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ x3.value, y3.value, z3.value = gxapi_cy.WrapMATH._cross_product_(GXContext._get_tls_geo(), x1, y1, z1, x2, y2, z2, x3.value, y3.value, z3.value) @classmethod def abs_int_(cls, n): """ Calculate absolute value :param n: Integer :type n: int :returns: Integer :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Dummy values return dummy """ ret_val = gxapi_cy.WrapMATH._abs_int_(GXContext._get_tls_geo(), n) return ret_val @classmethod def and_(cls, pi_val1, pi_val2): """ Return the unary operation result of A & B Returns an integer number If A or B is a dummy, returns dummy. :param pi_val1: A :param pi_val2: B :type pi_val1: int :type pi_val2: int :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMATH._and_(GXContext._get_tls_geo(), pi_val1, pi_val2) return ret_val @classmethod def mod_int_(cls, a, b): """ Calculates the modulus of two integers :param a: A :param b: B (must not be zero) :type a: int :type b: int :returns: Int :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If A or B is a dummy, returns dummy. """ ret_val = gxapi_cy.WrapMATH._mod_int_(GXContext._get_tls_geo(), a, b) return ret_val @classmethod def or_(cls, pi_val1, pi_val2): """ Return the unary operation result of A | B Returns an integer number If A or B is a dummy, returns dummy. :param pi_val1: A :param pi_val2: B :type pi_val1: int :type pi_val2: int :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMATH._or_(GXContext._get_tls_geo(), pi_val1, pi_val2) return ret_val @classmethod def round_int_(cls, z): """ Round to the nearest whole number :param z: Round :type z: float :returns: Integer :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Negative values with decimal parts larger than .5 round down (-1.5 -> 2.0) Positive values with decimal parts larger than .5 round up (1.5 -> 2.0) Dummy values return dummy """ ret_val = gxapi_cy.WrapMATH._round_int_(GXContext._get_tls_geo(), z) return ret_val @classmethod def xor_(cls, pi_val1, pi_val2): """ Return the unary operation result of A ^ B Returns an integer number If A or B is a dummy, returns dummy. :param pi_val1: A :param pi_val2: B :type pi_val1: int :type pi_val2: int :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMATH._xor_(GXContext._get_tls_geo(), pi_val1, pi_val2) return ret_val @classmethod def nicer_log_scale_(cls, min, max, fine): """ Finds nicer min, max values for logarithmic plot scales. :param min: Min value (changed) :param max: Max value (changed) :param fine: Fine flag :type min: float_ref :type max: float_ref :type fine: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Will fail if the input upper bound is less than the lower bound, but will work if the two values are equal. The input bounds are overwritten. Input lower and upper bounds, returns "nicer" values. If the Fine flag is set to TRUE, the values will have the form N x 10^Y, where N is a value from 1 to 9, and 10^Y is an integral power of 10. If the Fine flag is set to FALSE, the scaling is coarse, and the bounding exact powers of 10 are returned. For example, the values (.034, 23) return (.03, 30) for fine scaling, and (0.01, 100) for coarse scaling. """ min.value, max.value = gxapi_cy.WrapMATH._nicer_log_scale_(GXContext._get_tls_geo(), min.value, max.value, fine) @classmethod def nicer_scale_(cls, min, max, inc, pow): """ Compute a nicer scale for a given min and max. :param min: Min value (changed) :param max: Max value (changed) :param inc: Inc value (returned) :param pow: Power value (returned) :type min: float_ref :type max: float_ref :type inc: float_ref :type pow: int_ref .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ min.value, max.value, inc.value, pow.value = gxapi_cy.WrapMATH._nicer_scale_(GXContext._get_tls_geo(), min.value, max.value, inc.value, pow.value) @classmethod def normalise_3d_(cls, x, y, z): """ Scale a vector to unit length. :param x: X component (altered) :param y: Y component (altered) :param z: Z component (altered) :type x: float_ref :type y: float_ref :type z: float_ref .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Divides each component by the vector magnitude. """ x.value, y.value, z.value = gxapi_cy.WrapMATH._normalise_3d_(GXContext._get_tls_geo(), x.value, y.value, z.value) @classmethod def abs_double_(cls, z): """ Calculate absolute value :param z: Real :type z: float :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Dummy values return dummy """ ret_val = gxapi_cy.WrapMATH._abs_double_(GXContext._get_tls_geo(), z) return ret_val @classmethod def arc_cos_(cls, val): """ Calculate the arccosine :param val: Real :type val: float :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Dummy values or values < -1 or > 1 return dummy """ ret_val = gxapi_cy.WrapMATH._arc_cos_(GXContext._get_tls_geo(), val) return ret_val @classmethod def arc_sin_(cls, val): """ Calculate the arcsin :param val: Real :type val: float :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Dummy values or values < -1 or > 1 return dummy """ ret_val = gxapi_cy.WrapMATH._arc_sin_(GXContext._get_tls_geo(), val) return ret_val @classmethod def arc_tan_(cls, val): """ Calculate the arctan :param val: Real :type val: float :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Dummy values return dummy """ ret_val = gxapi_cy.WrapMATH._arc_tan_(GXContext._get_tls_geo(), val) return ret_val @classmethod def arc_tan2_(cls, y, x): """ Calculate ArcTan(Y/X) :param y: Y :param x: X :type y: float :type x: float :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If either X or Y is a dummy, returns dummy """ ret_val = gxapi_cy.WrapMATH._arc_tan2_(GXContext._get_tls_geo(), y, x) return ret_val @classmethod def ceil_(cls, z): """ Calculates the ceiling of the value :param z: Real :type z: float :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Dummy values return dummy """ ret_val = gxapi_cy.WrapMATH._ceil_(GXContext._get_tls_geo(), z) return ret_val @classmethod def cos_(cls, val): """ Calculate the cosine :param val: Angle in radians :type val: float :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Dummy values return dummy """ ret_val = gxapi_cy.WrapMATH._cos_(GXContext._get_tls_geo(), val) return ret_val @classmethod def dot_product_3d_(cls, x1, y1, z1, x2, y2, z2): """ Compute Dot product of two vectors. :param x1: X1 component :param y1: Y1 component :param z1: Z1 component :param x2: X2 component :param y2: Y2 component :param z2: Z2 component :type x1: float :type y1: float :type z1: float :type x2: float :type y2: float :type z2: float :returns: Dot product :rtype: float .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMATH._dot_product_3d_(GXContext._get_tls_geo(), x1, y1, z1, x2, y2, z2) return ret_val @classmethod def exp_(cls, val): """ Calculate e raised to the power of X :param val: X :type val: float :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Dummy values return dummy """ ret_val = gxapi_cy.WrapMATH._exp_(GXContext._get_tls_geo(), val) return ret_val @classmethod def floor_(cls, z): """ Calculates the floor of the value :param z: Real :type z: float :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Dummy values return dummy """ ret_val = gxapi_cy.WrapMATH._floor_(GXContext._get_tls_geo(), z) return ret_val @classmethod def hypot_(cls, x, y): """ sqrt(X*X + Y*Y) :param x: X :param y: Y :type x: float :type y: float :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If either X or Y is a dummy, the returned value is dummy """ ret_val = gxapi_cy.WrapMATH._hypot_(GXContext._get_tls_geo(), x, y) return ret_val @classmethod def lambda_trans_(cls, z, lda): """ Performs lambda transform on a value. :param z: Z Value :param lda: Lambda value :type z: float :type lda: float :returns: The lambda transformed value :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Returns 0 for input Z = 0. returns log10(Z) for lambda = 0. returns (Z^lambda - 1)/lambda for Z > 0. returns dummy for Z = dummy. .. seealso:: `lambda_trans_rev_ <geosoft.gxapi.GXMATH.lambda_trans_rev_>` """ ret_val = gxapi_cy.WrapMATH._lambda_trans_(GXContext._get_tls_geo(), z, lda) return ret_val @classmethod def lambda_trans_rev_(cls, z, lda): """ Performs a reverse lambda transform on a value. :param z: Lambda transformed Z Value :param lda: Lambda value :type z: float :type lda: float :returns: The original non-lambda transformed value :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See rLambdaTrans. .. seealso:: `lambda_trans_ <geosoft.gxapi.GXMATH.lambda_trans_>` """ ret_val = gxapi_cy.WrapMATH._lambda_trans_rev_(GXContext._get_tls_geo(), z, lda) return ret_val @classmethod def log_(cls, val): """ Calculate the natural log :param val: Real :type val: float :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Dummy values return dummy """ ret_val = gxapi_cy.WrapMATH._log_(GXContext._get_tls_geo(), val) return ret_val @classmethod def log10_(cls, val): """ Calculate the base 10 log :param val: Real :type val: float :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Dummy values return dummy """ ret_val = gxapi_cy.WrapMATH._log10_(GXContext._get_tls_geo(), val) return ret_val @classmethod def log_z_(cls, z, mode, min): """ Given a Z value and the Log style and Log Minimum this function will return the log value. :param z: Z Value :param mode: Log Mode (0 - Log, 1 - LogLinearLog) :param min: Log Minimum (must be greater than 0) :type z: float :type mode: int :type min: float :returns: The Log Value. :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Mode = 0 (regular log mode) returns: :: Log10(Z) for Z > minimum Log10(minimum) for Z <= minimum Mode = 1 (log / linear / negative log mode) returns: :: minimum * ( log10( |Z| / minimum) + 1 ) for Z > minimum Z for |Z| <= minimum (the linear part of the range) -minimum * ( log10( |Z| / minimum) + 1 ) for Z < -minimum .. seealso:: `un_log_z_ <geosoft.gxapi.GXMATH.un_log_z_>` """ ret_val = gxapi_cy.WrapMATH._log_z_(GXContext._get_tls_geo(), z, mode, min) return ret_val @classmethod def mod_double_(cls, a, b): """ Calculates the modulus of two reals (A mod B) :param a: A :param b: B (must not be zero) :type a: float :type b: float :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The modulus of A with respect to B is defined as the difference of A with the largest integral multiple of B smaller than or equal to A. e.g. A mod B 20 mod 10 = 0 20 mod 9 = 2 f A or B is a dummy, returns dummy. """ ret_val = gxapi_cy.WrapMATH._mod_double_(GXContext._get_tls_geo(), a, b) return ret_val @classmethod def rotate_vector_(cls, x1, y1, z1, angle, x2, y2, z2, x3, y3, z3): """ Rotate a vector about an axis. :param x1: X1 component (vector to rotate) :param y1: Y1 component :param z1: Z1 component :param angle: Angle to rotate, CW in radians :param x2: X2 component (axis of rotation) :param y2: Y2 component :param z2: Z2 component :param x3: X3 component (rotated vector, can :param y3: Y3 component be the same as input) :param z3: Z3 component :type x1: float :type y1: float :type z1: float :type angle: float :type x2: float :type y2: float :type z2: float :type x3: float_ref :type y3: float_ref :type z3: float_ref .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Rotates a vector by the input angle around an arbitrary axis. Angles are measured clockwise looking along the axis (away from the origin). Assumes a right hand coordinate system. """ x3.value, y3.value, z3.value = gxapi_cy.WrapMATH._rotate_vector_(GXContext._get_tls_geo(), x1, y1, z1, angle, x2, y2, z2, x3.value, y3.value, z3.value) @classmethod def pow_(cls, x, y): """ Calculate X raised to the power of Y :param x: X :param y: Y :type x: float :type y: float :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If either X or Y is a dummy, returns dummy """ ret_val = gxapi_cy.WrapMATH._pow_(GXContext._get_tls_geo(), x, y) return ret_val @classmethod def rand_(cls): """ Get a random number between 0 and 1 :returns: A real number :rtype: float .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Use `s_rand_ <geosoft.gxapi.GXMATH.s_rand_>` to seed the random number generator before a series of calls to this function are made. The standard "C" function rand() is called. """ ret_val = gxapi_cy.WrapMATH._rand_(GXContext._get_tls_geo()) return ret_val @classmethod def round_double_(cls, z, n): """ Round to n significant digits :param z: Real :param n: Number of significant digits to round to :type z: float :type n: int :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Negative values ending in 5XXX to n sig digits round down Positive values ending in 5XXX to n sig digits round up Dummy values return dummy """ ret_val = gxapi_cy.WrapMATH._round_double_(GXContext._get_tls_geo(), z, n) return ret_val @classmethod def sign_(cls, z_sign, z_val): """ Determine return value based on value of Z1 :param z_sign: Z1 :param z_val: Z2 :type z_sign: float :type z_val: float :returns: ``|Z2| if Z1 > 0, -|Z2| if Z1 < 0, 0 if Z1 = 0, and Z2 if Z1 = Dummy`` :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Dummy values return dummy """ ret_val = gxapi_cy.WrapMATH._sign_(GXContext._get_tls_geo(), z_sign, z_val) return ret_val @classmethod def sin_(cls, val): """ Calculate the sin :param val: Angle in radians :type val: float :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Dummy values return dummy """ ret_val = gxapi_cy.WrapMATH._sin_(GXContext._get_tls_geo(), val) return ret_val @classmethod def sqrt_(cls, val): """ Calculate the square root :param val: Real :type val: float :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Dummy values return dummy """ ret_val = gxapi_cy.WrapMATH._sqrt_(GXContext._get_tls_geo(), val) return ret_val @classmethod def tan_(cls, val): """ Calculate the tangent :param val: Angle in radians :type val: float :returns: Real :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Dummy values return dummy """ ret_val = gxapi_cy.WrapMATH._tan_(GXContext._get_tls_geo(), val) return ret_val @classmethod def un_log_z_(cls, z, mode, min): """ Inverse of rLogZ :param z: Log value :param mode: Log Mode (0 - Log, 1 - LogLinearLog) :param min: Log Minimum (must be greater than 0) :type z: float :type mode: int :type min: float :returns: The original value :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See Notes for rLogZ. .. seealso:: `log_z_ <geosoft.gxapi.GXMATH.log_z_>` """ ret_val = gxapi_cy.WrapMATH._un_log_z_(GXContext._get_tls_geo(), z, mode, min) return ret_val @classmethod def s_rand_(cls): """ Seed the random-number generator with current time .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Use the `rand_ <geosoft.gxapi.GXMATH.rand_>` function to create a random number between 0 and 1. The standard "C" function srand() is called. """ gxapi_cy.WrapMATH._s_rand_(GXContext._get_tls_geo()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/tutorial/Geosoft Databases/modify_channel_data_vv.py import geosoft.gxpy as gxpy import geosoft.gxpy.gdb as gxdb import geosoft.gxapi as gxapi gxc = gxpy.gx.GXpy() # open the database, best practice is to use a 'with ...' construct with gxdb.Geosoft_gdb.open('mag_data_split') as gdb: # make a new channel for the output, duplicate properties of 'mag' channel new_mag_channel = gxdb.Channel.new(gdb, 'mag_base', dup='mag', replace=True) # work through each line for line in gdb.list_lines(): print ('processing line {}'.format(line)) # read data from the line. # The read_channel method returns the data in a geosoft VV mag_data = gdb.read_channel_vv(line, 'mag') # use Geosoft GXVVU.translate function to subtract 5000. gxapi.GXVVU.translate(mag_data.gxvv, -5000, 1) gdb.write_channel_vv(line, new_mag_channel, mag_data) exit() <file_sep>/docs/GXKGRD.rst .. _GXKGRD: GXKGRD class ================================== .. autoclass:: geosoft.gxapi.GXKGRD :members: <file_sep>/docs/GXVVU.rst .. _GXVVU: GXVVU class ================================== .. autoclass:: geosoft.gxapi.GXVVU :members: .. _QC_CRITERION: QC_CRITERION constants ----------------------------------------------------------------------- Criterion .. autodata:: geosoft.gxapi.QC_CRITERION_1 :annotation: .. autoattribute:: geosoft.gxapi.QC_CRITERION_1 .. autodata:: geosoft.gxapi.QC_CRITERION_2 :annotation: .. autoattribute:: geosoft.gxapi.QC_CRITERION_2 .. autodata:: geosoft.gxapi.QC_CRITERION_12 :annotation: .. autoattribute:: geosoft.gxapi.QC_CRITERION_12 .. _TEM_ARRAY: TEM_ARRAY constants ----------------------------------------------------------------------- Array Type .. autodata:: geosoft.gxapi.TEM_ARRAY_VERTICALSOUNDING :annotation: .. autoattribute:: geosoft.gxapi.TEM_ARRAY_VERTICALSOUNDING .. autodata:: geosoft.gxapi.TEM_ARRAY_PROFILING :annotation: .. autoattribute:: geosoft.gxapi.TEM_ARRAY_PROFILING .. autodata:: geosoft.gxapi.TEM_ARRAY_BOREHOLE :annotation: .. autoattribute:: geosoft.gxapi.TEM_ARRAY_BOREHOLE .. _VV_DUP: VV_DUP constants ----------------------------------------------------------------------- Duplicate handling mode .. autodata:: geosoft.gxapi.VV_DUP_AVERAGE :annotation: .. autoattribute:: geosoft.gxapi.VV_DUP_AVERAGE .. autodata:: geosoft.gxapi.VV_DUP_1 :annotation: .. autoattribute:: geosoft.gxapi.VV_DUP_1 .. autodata:: geosoft.gxapi.VV_DUP_2 :annotation: .. autoattribute:: geosoft.gxapi.VV_DUP_2 .. autodata:: geosoft.gxapi.VV_DUP_DUMMY :annotation: .. autoattribute:: geosoft.gxapi.VV_DUP_DUMMY .. autodata:: geosoft.gxapi.VV_DUP_SAMPLE :annotation: .. autoattribute:: geosoft.gxapi.VV_DUP_SAMPLE .. _VV_XYDUP: VV_XYDUP constants ----------------------------------------------------------------------- Sample handling .. autodata:: geosoft.gxapi.VV_XYDUP_AVERAGE :annotation: .. autoattribute:: geosoft.gxapi.VV_XYDUP_AVERAGE .. autodata:: geosoft.gxapi.VV_XYDUP_SUM :annotation: .. autoattribute:: geosoft.gxapi.VV_XYDUP_SUM .. _VVU_CASE: VVU_CASE constants ----------------------------------------------------------------------- String case handling .. autodata:: geosoft.gxapi.VVU_CASE_TOLERANT :annotation: .. autoattribute:: geosoft.gxapi.VVU_CASE_TOLERANT .. autodata:: geosoft.gxapi.VVU_CASE_SENSITIVE :annotation: .. autoattribute:: geosoft.gxapi.VVU_CASE_SENSITIVE .. _VVU_CLIP: VVU_CLIP constants ----------------------------------------------------------------------- Type of clipping .. autodata:: geosoft.gxapi.VVU_CLIP_DUMMY :annotation: .. autoattribute:: geosoft.gxapi.VVU_CLIP_DUMMY .. autodata:: geosoft.gxapi.VVU_CLIP_LIMIT :annotation: .. autoattribute:: geosoft.gxapi.VVU_CLIP_LIMIT .. _VVU_DUMMYREPEAT: VVU_DUMMYREPEAT constants ----------------------------------------------------------------------- How to deal with repeats .. autodata:: geosoft.gxapi.VVU_DUMMYREPEAT_FIRST :annotation: .. autoattribute:: geosoft.gxapi.VVU_DUMMYREPEAT_FIRST .. autodata:: geosoft.gxapi.VVU_DUMMYREPEAT_LAST :annotation: .. autoattribute:: geosoft.gxapi.VVU_DUMMYREPEAT_LAST .. autodata:: geosoft.gxapi.VVU_DUMMYREPEAT_MIDDLE :annotation: .. autoattribute:: geosoft.gxapi.VVU_DUMMYREPEAT_MIDDLE .. _VVU_INTERP: VVU_INTERP constants ----------------------------------------------------------------------- Interpolation method to use .. autodata:: geosoft.gxapi.VVU_INTERP_NEAREST :annotation: .. autoattribute:: geosoft.gxapi.VVU_INTERP_NEAREST .. autodata:: geosoft.gxapi.VVU_INTERP_LINEAR :annotation: .. autoattribute:: geosoft.gxapi.VVU_INTERP_LINEAR .. autodata:: geosoft.gxapi.VVU_INTERP_CUBIC :annotation: .. autoattribute:: geosoft.gxapi.VVU_INTERP_CUBIC .. autodata:: geosoft.gxapi.VVU_INTERP_AKIMA :annotation: .. autoattribute:: geosoft.gxapi.VVU_INTERP_AKIMA .. autodata:: geosoft.gxapi.VVU_INTERP_PREDICT :annotation: .. autoattribute:: geosoft.gxapi.VVU_INTERP_PREDICT .. _VVU_INTERP_EDGE: VVU_INTERP_EDGE constants ----------------------------------------------------------------------- Interpolation method to use on edges .. autodata:: geosoft.gxapi.VVU_INTERP_EDGE_NONE :annotation: .. autoattribute:: geosoft.gxapi.VVU_INTERP_EDGE_NONE .. autodata:: geosoft.gxapi.VVU_INTERP_EDGE_SAME :annotation: .. autoattribute:: geosoft.gxapi.VVU_INTERP_EDGE_SAME .. autodata:: geosoft.gxapi.VVU_INTERP_EDGE_NEAREST :annotation: .. autoattribute:: geosoft.gxapi.VVU_INTERP_EDGE_NEAREST .. autodata:: geosoft.gxapi.VVU_INTERP_EDGE_LINEAR :annotation: .. autoattribute:: geosoft.gxapi.VVU_INTERP_EDGE_LINEAR .. _VVU_LINE: VVU_LINE constants ----------------------------------------------------------------------- Line Types .. autodata:: geosoft.gxapi.LINE_2_POINTS :annotation: .. autoattribute:: geosoft.gxapi.LINE_2_POINTS .. autodata:: geosoft.gxapi.LINE_POINT_AZIMUTH :annotation: .. autoattribute:: geosoft.gxapi.LINE_POINT_AZIMUTH .. _VVU_MASK: VVU_MASK constants ----------------------------------------------------------------------- Type of clipping .. autodata:: geosoft.gxapi.VVU_MASK_INSIDE :annotation: .. autoattribute:: geosoft.gxapi.VVU_MASK_INSIDE .. autodata:: geosoft.gxapi.VVU_MASK_OUTSIDE :annotation: .. autoattribute:: geosoft.gxapi.VVU_MASK_OUTSIDE .. _VVU_MATCH: VVU_MATCH constants ----------------------------------------------------------------------- Matching style .. autodata:: geosoft.gxapi.VVU_MATCH_FULL_STRINGS :annotation: .. autoattribute:: geosoft.gxapi.VVU_MATCH_FULL_STRINGS .. autodata:: geosoft.gxapi.VVU_MATCH_INPUT_LENGTH :annotation: .. autoattribute:: geosoft.gxapi.VVU_MATCH_INPUT_LENGTH .. _VVU_MODE: VVU_MODE constants ----------------------------------------------------------------------- Statistic to select .. autodata:: geosoft.gxapi.VVU_MODE_MEAN :annotation: .. autoattribute:: geosoft.gxapi.VVU_MODE_MEAN .. autodata:: geosoft.gxapi.VVU_MODE_MEDIAN :annotation: .. autoattribute:: geosoft.gxapi.VVU_MODE_MEDIAN .. autodata:: geosoft.gxapi.VVU_MODE_MAXIMUM :annotation: .. autoattribute:: geosoft.gxapi.VVU_MODE_MAXIMUM .. autodata:: geosoft.gxapi.VVU_MODE_MINIMUM :annotation: .. autoattribute:: geosoft.gxapi.VVU_MODE_MINIMUM .. _VVU_OFFSET: VVU_OFFSET constants ----------------------------------------------------------------------- Heading .. autodata:: geosoft.gxapi.VVU_OFFSET_FORWARD :annotation: .. autoattribute:: geosoft.gxapi.VVU_OFFSET_FORWARD .. autodata:: geosoft.gxapi.VVU_OFFSET_BACKWARD :annotation: .. autoattribute:: geosoft.gxapi.VVU_OFFSET_BACKWARD .. autodata:: geosoft.gxapi.VVU_OFFSET_RIGHT :annotation: .. autoattribute:: geosoft.gxapi.VVU_OFFSET_RIGHT .. autodata:: geosoft.gxapi.VVU_OFFSET_LEFT :annotation: .. autoattribute:: geosoft.gxapi.VVU_OFFSET_LEFT .. _VVU_PRUNE: VVU_PRUNE constants ----------------------------------------------------------------------- Prune options .. autodata:: geosoft.gxapi.VVU_PRUNE_DUMMY :annotation: .. autoattribute:: geosoft.gxapi.VVU_PRUNE_DUMMY .. autodata:: geosoft.gxapi.VVU_PRUNE_VALID :annotation: .. autoattribute:: geosoft.gxapi.VVU_PRUNE_VALID .. _VVU_SPL: VVU_SPL constants ----------------------------------------------------------------------- Spline types .. autodata:: geosoft.gxapi.VVU_SPL_LINEAR :annotation: .. autoattribute:: geosoft.gxapi.VVU_SPL_LINEAR .. autodata:: geosoft.gxapi.VVU_SPL_CUBIC :annotation: .. autoattribute:: geosoft.gxapi.VVU_SPL_CUBIC .. autodata:: geosoft.gxapi.VVU_SPL_AKIMA :annotation: .. autoattribute:: geosoft.gxapi.VVU_SPL_AKIMA .. autodata:: geosoft.gxapi.VVU_SPL_NEAREST :annotation: .. autoattribute:: geosoft.gxapi.VVU_SPL_NEAREST .. _VVU_SRCHREPL_CASE: VVU_SRCHREPL_CASE constants ----------------------------------------------------------------------- Search and Replace handling of string case .. autodata:: geosoft.gxapi.VVU_SRCHREPL_CASE_TOLERANT :annotation: .. autoattribute:: geosoft.gxapi.VVU_SRCHREPL_CASE_TOLERANT .. autodata:: geosoft.gxapi.VVU_SRCHREPL_CASE_SENSITIVE :annotation: .. autoattribute:: geosoft.gxapi.VVU_SRCHREPL_CASE_SENSITIVE <file_sep>/examples/tutorial/Geosoft Project/geosoft_project.py import geosoft.gxpy.project as gxpj def rungx(): project = gxpj.Geosoft_project() gxpj.user_message("Project name: {}".format(project.name), "Project user: {}".format(project.gid))<file_sep>/geosoft/gxapi/GX3DC.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GX3DC(gxapi_cy.Wrap3DC): """ GX3DC class. This is a 3D container class which facilitates rendering a 3D viewport to controls. To be used in tandem with the Geosoft.View3D.View class present in geoengine.3dv.csharp. Creation of the 3D container is facilitated through Create_3DC, and disposal through the instance method Destroy_3DC. """ def __init__(self, handle=0): super(GX3DC, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GX3DC <geosoft.gxapi.GX3DC>` :returns: A null `GX3DC <geosoft.gxapi.GX3DC>` :rtype: GX3DC """ return GX3DC() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, handle): """ Create a 3D view container which can be used to instantiate a full 3D View. :param handle: Window handle for the OpenGL context. :type handle: int :returns: `GX3DC <geosoft.gxapi.GX3DC>` object :rtype: GX3DC .. versionadded:: 2022.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: DestroyInternal_3DC """ ret_val = gxapi_cy.Wrap3DC._create(GXContext._get_tls_geo(), handle) return GX3DC(ret_val) def get_geo_view(self): """ Retrieves the GeoView associated with the 3D container. :rtype: Type.INT64_T .. versionadded:: 2022.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_geo_view() return ret_val def destroy_internal(self): """ Destroys a 3D container object and cleans up any unmanaged resources. .. versionadded:: 2022.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._destroy_internal() ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXMSTK.rst .. _GXMSTK: GXMSTK class ================================== .. autoclass:: geosoft.gxapi.GXMSTK :members: <file_sep>/examples/geosoft_research/self_organizing_maps/python/som_om_ui_qt5.py # -*- coding: utf-8 -*- # From implementation generated from reading ui file 'ui_som_om.ui' # # Created by: PyQt5 UI code generator 5.6 # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore, QtGui, QtWidgets class Ui_som_om: def setupUi(self, som_om): som_om.setObjectName("som_om") som_om.resize(438, 777) self.layoutWidget = QtWidgets.QWidget(som_om) self.layoutWidget.setGeometry(QtCore.QRect(10, 10, 421, 34)) self.layoutWidget.setObjectName("layoutWidget") self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.layoutWidget) self.verticalLayout_2.setContentsMargins(0, 0, 0, 0) self.verticalLayout_2.setObjectName("verticalLayout_2") self.label_5 = QtWidgets.QLabel(self.layoutWidget) font = QtGui.QFont() font.setBold(True) font.setWeight(75) self.label_5.setFont(font) self.label_5.setObjectName("label_5") self.verticalLayout_2.addWidget(self.label_5) self.databaseName = QtWidgets.QLabel(self.layoutWidget) self.databaseName.setObjectName("databaseName") self.verticalLayout_2.addWidget(self.databaseName) self.layoutWidget1 = QtWidgets.QWidget(som_om) self.layoutWidget1.setGeometry(QtCore.QRect(332, 47, 101, 461)) self.layoutWidget1.setObjectName("layoutWidget1") self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.layoutWidget1) self.verticalLayout_3.setContentsMargins(0, 0, 0, 0) self.verticalLayout_3.setObjectName("verticalLayout_3") self.label_6 = QtWidgets.QLabel(self.layoutWidget1) self.label_6.setAlignment(QtCore.Qt.AlignCenter) self.label_6.setObjectName("label_6") self.verticalLayout_3.addWidget(self.label_6) self.verticalLayout = QtWidgets.QVBoxLayout() self.verticalLayout.setObjectName("verticalLayout") self.norm = QtWidgets.QComboBox(self.layoutWidget1) self.norm.setObjectName("norm") self.verticalLayout.addWidget(self.norm) self.norm_1 = QtWidgets.QComboBox(self.layoutWidget1) self.norm_1.setObjectName("norm_1") self.verticalLayout.addWidget(self.norm_1) self.norm_2 = QtWidgets.QComboBox(self.layoutWidget1) self.norm_2.setObjectName("norm_2") self.verticalLayout.addWidget(self.norm_2) self.norm_3 = QtWidgets.QComboBox(self.layoutWidget1) self.norm_3.setObjectName("norm_3") self.verticalLayout.addWidget(self.norm_3) self.norm_4 = QtWidgets.QComboBox(self.layoutWidget1) self.norm_4.setObjectName("norm_4") self.verticalLayout.addWidget(self.norm_4) self.norm_5 = QtWidgets.QComboBox(self.layoutWidget1) self.norm_5.setObjectName("norm_5") self.verticalLayout.addWidget(self.norm_5) self.norm_6 = QtWidgets.QComboBox(self.layoutWidget1) self.norm_6.setObjectName("norm_6") self.verticalLayout.addWidget(self.norm_6) self.norm_7 = QtWidgets.QComboBox(self.layoutWidget1) self.norm_7.setObjectName("norm_7") self.verticalLayout.addWidget(self.norm_7) self.norm_8 = QtWidgets.QComboBox(self.layoutWidget1) self.norm_8.setObjectName("norm_8") self.verticalLayout.addWidget(self.norm_8) self.norm_9 = QtWidgets.QComboBox(self.layoutWidget1) self.norm_9.setObjectName("norm_9") self.verticalLayout.addWidget(self.norm_9) self.norm_10 = QtWidgets.QComboBox(self.layoutWidget1) self.norm_10.setObjectName("norm_10") self.verticalLayout.addWidget(self.norm_10) self.norm_11 = QtWidgets.QComboBox(self.layoutWidget1) self.norm_11.setObjectName("norm_11") self.verticalLayout.addWidget(self.norm_11) self.norm_12 = QtWidgets.QComboBox(self.layoutWidget1) self.norm_12.setObjectName("norm_12") self.verticalLayout.addWidget(self.norm_12) self.norm_13 = QtWidgets.QComboBox(self.layoutWidget1) self.norm_13.setObjectName("norm_13") self.verticalLayout.addWidget(self.norm_13) self.norm_14 = QtWidgets.QComboBox(self.layoutWidget1) self.norm_14.setObjectName("norm_14") self.verticalLayout.addWidget(self.norm_14) self.norm_15 = QtWidgets.QComboBox(self.layoutWidget1) self.norm_15.setObjectName("norm_15") self.verticalLayout.addWidget(self.norm_15) self.norm_16 = QtWidgets.QComboBox(self.layoutWidget1) self.norm_16.setObjectName("norm_16") self.verticalLayout.addWidget(self.norm_16) self.verticalLayout_3.addLayout(self.verticalLayout) self.layoutWidget2 = QtWidgets.QWidget(som_om) self.layoutWidget2.setGeometry(QtCore.QRect(10, 620, 421, 148)) self.layoutWidget2.setObjectName("layoutWidget2") self.verticalLayout_5 = QtWidgets.QVBoxLayout(self.layoutWidget2) self.verticalLayout_5.setContentsMargins(0, 0, 0, 0) self.verticalLayout_5.setObjectName("verticalLayout_5") self.gridLayout = QtWidgets.QGridLayout() self.gridLayout.setObjectName("gridLayout") self.filtLab = QtWidgets.QLabel(self.layoutWidget2) font = QtGui.QFont() font.setBold(True) font.setWeight(75) self.filtLab.setFont(font) self.filtLab.setObjectName("filtLab") self.gridLayout.addWidget(self.filtLab, 0, 0, 1, 1) self.label = QtWidgets.QLabel(self.layoutWidget2) font = QtGui.QFont() font.setBold(True) font.setWeight(75) self.label.setFont(font) self.label.setObjectName("label") self.gridLayout.addWidget(self.label, 0, 1, 1, 1) self.filterChan = QtWidgets.QComboBox(self.layoutWidget2) self.filterChan.setEditable(False) self.filterChan.setObjectName("filterChan") self.gridLayout.addWidget(self.filterChan, 1, 0, 1, 1) self.label_3 = QtWidgets.QLabel(self.layoutWidget2) font = QtGui.QFont() font.setBold(True) font.setWeight(75) self.label_3.setFont(font) self.label_3.setObjectName("label_3") self.gridLayout.addWidget(self.label_3, 2, 0, 1, 1) self.label_4 = QtWidgets.QLabel(self.layoutWidget2) font = QtGui.QFont() font.setBold(True) font.setWeight(75) self.label_4.setFont(font) self.label_4.setObjectName("label_4") self.gridLayout.addWidget(self.label_4, 2, 1, 1, 1) self.outClass = QtWidgets.QLineEdit(self.layoutWidget2) self.outClass.setObjectName("outClass") self.gridLayout.addWidget(self.outClass, 3, 0, 1, 1) self.outError = QtWidgets.QLineEdit(self.layoutWidget2) self.outError.setObjectName("outError") self.gridLayout.addWidget(self.outError, 3, 1, 1, 1) self.filterVal = QtWidgets.QLineEdit(self.layoutWidget2) self.filterVal.setObjectName("filterVal") self.gridLayout.addWidget(self.filterVal, 1, 1, 1, 1) self.verticalLayout_5.addLayout(self.gridLayout) self.gridLayout_2 = QtWidgets.QGridLayout() self.gridLayout_2.setObjectName("gridLayout_2") self.classButton = QtWidgets.QPushButton(self.layoutWidget2) font = QtGui.QFont() font.setBold(True) font.setWeight(75) self.classButton.setFont(font) self.classButton.setObjectName("classButton") self.gridLayout_2.addWidget(self.classButton, 0, 0, 1, 1) self.progLabel = QtWidgets.QLabel(self.layoutWidget2) self.progLabel.setObjectName("progLabel") self.gridLayout_2.addWidget(self.progLabel, 0, 1, 1, 1) self.stopButton = QtWidgets.QPushButton(self.layoutWidget2) font = QtGui.QFont() font.setBold(True) font.setWeight(75) self.stopButton.setFont(font) self.stopButton.setObjectName("stopButton") self.gridLayout_2.addWidget(self.stopButton, 1, 0, 1, 1) self.progressBar = QtWidgets.QProgressBar(self.layoutWidget2) self.progressBar.setProperty("value", 0) self.progressBar.setTextVisible(False) self.progressBar.setObjectName("progressBar") self.gridLayout_2.addWidget(self.progressBar, 1, 1, 1, 1) self.verticalLayout_5.addLayout(self.gridLayout_2) self.line = QtWidgets.QFrame(som_om) self.line.setGeometry(QtCore.QRect(12, 511, 421, 16)) self.line.setFrameShape(QtWidgets.QFrame.HLine) self.line.setFrameShadow(QtWidgets.QFrame.Sunken) self.line.setObjectName("line") self.layoutWidget3 = QtWidgets.QWidget(som_om) self.layoutWidget3.setGeometry(QtCore.QRect(14, 57, 311, 450)) self.layoutWidget3.setObjectName("layoutWidget3") self.verticalLayout_4 = QtWidgets.QVBoxLayout(self.layoutWidget3) self.verticalLayout_4.setContentsMargins(0, 0, 0, 0) self.verticalLayout_4.setObjectName("verticalLayout_4") self.label_9 = QtWidgets.QLabel(self.layoutWidget3) self.label_9.setText("") self.label_9.setObjectName("label_9") self.verticalLayout_4.addWidget(self.label_9) self.label_2 = QtWidgets.QLabel(self.layoutWidget3) font = QtGui.QFont() font.setBold(True) font.setWeight(75) self.label_2.setFont(font) self.label_2.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter) self.label_2.setObjectName("label_2") self.verticalLayout_4.addWidget(self.label_2) self.chan_1 = QtWidgets.QComboBox(self.layoutWidget3) self.chan_1.setObjectName("chan_1") self.verticalLayout_4.addWidget(self.chan_1) self.chan_2 = QtWidgets.QComboBox(self.layoutWidget3) self.chan_2.setObjectName("chan_2") self.verticalLayout_4.addWidget(self.chan_2) self.chan_3 = QtWidgets.QComboBox(self.layoutWidget3) self.chan_3.setObjectName("chan_3") self.verticalLayout_4.addWidget(self.chan_3) self.chan_4 = QtWidgets.QComboBox(self.layoutWidget3) self.chan_4.setObjectName("chan_4") self.verticalLayout_4.addWidget(self.chan_4) self.chan_5 = QtWidgets.QComboBox(self.layoutWidget3) self.chan_5.setObjectName("chan_5") self.verticalLayout_4.addWidget(self.chan_5) self.chan_6 = QtWidgets.QComboBox(self.layoutWidget3) self.chan_6.setObjectName("chan_6") self.verticalLayout_4.addWidget(self.chan_6) self.chan_7 = QtWidgets.QComboBox(self.layoutWidget3) self.chan_7.setObjectName("chan_7") self.verticalLayout_4.addWidget(self.chan_7) self.chan_8 = QtWidgets.QComboBox(self.layoutWidget3) self.chan_8.setObjectName("chan_8") self.verticalLayout_4.addWidget(self.chan_8) self.chan_9 = QtWidgets.QComboBox(self.layoutWidget3) self.chan_9.setObjectName("chan_9") self.verticalLayout_4.addWidget(self.chan_9) self.chan_10 = QtWidgets.QComboBox(self.layoutWidget3) self.chan_10.setObjectName("chan_10") self.verticalLayout_4.addWidget(self.chan_10) self.chan_11 = QtWidgets.QComboBox(self.layoutWidget3) self.chan_11.setObjectName("chan_11") self.verticalLayout_4.addWidget(self.chan_11) self.chan_12 = QtWidgets.QComboBox(self.layoutWidget3) self.chan_12.setObjectName("chan_12") self.verticalLayout_4.addWidget(self.chan_12) self.chan_13 = QtWidgets.QComboBox(self.layoutWidget3) self.chan_13.setObjectName("chan_13") self.verticalLayout_4.addWidget(self.chan_13) self.chan_14 = QtWidgets.QComboBox(self.layoutWidget3) self.chan_14.setObjectName("chan_14") self.verticalLayout_4.addWidget(self.chan_14) self.chan_15 = QtWidgets.QComboBox(self.layoutWidget3) self.chan_15.setObjectName("chan_15") self.verticalLayout_4.addWidget(self.chan_15) self.chan_16 = QtWidgets.QComboBox(self.layoutWidget3) self.chan_16.setObjectName("chan_16") self.verticalLayout_4.addWidget(self.chan_16) self.layoutWidget4 = QtWidgets.QWidget(som_om) self.layoutWidget4.setGeometry(QtCore.QRect(88, 530, 238, 76)) self.layoutWidget4.setObjectName("layoutWidget4") self.horizontalLayout = QtWidgets.QHBoxLayout(self.layoutWidget4) self.horizontalLayout.setContentsMargins(0, 0, 0, 0) self.horizontalLayout.setObjectName("horizontalLayout") self.verticalLayout_7 = QtWidgets.QVBoxLayout() self.verticalLayout_7.setObjectName("verticalLayout_7") self.similarity = QtWidgets.QLabel(self.layoutWidget4) self.similarity.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.similarity.setObjectName("similarity") self.verticalLayout_7.addWidget(self.similarity) self.label_7 = QtWidgets.QLabel(self.layoutWidget4) self.label_7.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.label_7.setObjectName("label_7") self.verticalLayout_7.addWidget(self.label_7) self.label_8 = QtWidgets.QLabel(self.layoutWidget4) self.label_8.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.label_8.setObjectName("label_8") self.verticalLayout_7.addWidget(self.label_8) self.horizontalLayout.addLayout(self.verticalLayout_7) self.verticalLayout_6 = QtWidgets.QVBoxLayout() self.verticalLayout_6.setObjectName("verticalLayout_6") self.similarity_func = QtWidgets.QComboBox(self.layoutWidget4) self.similarity_func.setCurrentText("") self.similarity_func.setObjectName("similarity_func") self.verticalLayout_6.addWidget(self.similarity_func) self.nClasses = QtWidgets.QComboBox(self.layoutWidget4) self.nClasses.setObjectName("nClasses") self.verticalLayout_6.addWidget(self.nClasses) self.anomPercent = QtWidgets.QLineEdit(self.layoutWidget4) self.anomPercent.setObjectName("anomPercent") self.verticalLayout_6.addWidget(self.anomPercent) self.horizontalLayout.addLayout(self.verticalLayout_6) self.layoutWidget.raise_() self.layoutWidget.raise_() self.layoutWidget.raise_() self.layoutWidget.raise_() self.layoutWidget.raise_() self.line.raise_() self.retranslateUi(som_om) self.similarity_func.setCurrentIndex(-1) QtCore.QMetaObject.connectSlotsByName(som_om) def retranslateUi(self, som_om): _translate = QtCore.QCoreApplication.translate som_om.setWindowTitle(_translate("som_om", "Self Organizing Classification")) self.label_5.setText(_translate("som_om", "Database:")) self.databaseName.setText(_translate("som_om", "database name...")) self.label_6.setText(_translate("som_om", "normalize")) self.filtLab.setText(_translate("som_om", "Filter on:")) self.label.setText(_translate("som_om", "Filter value:")) self.label_3.setText(_translate("som_om", "Save classification to:")) self.label_4.setText(_translate("som_om", "Save fit to:")) self.classButton.setText(_translate("som_om", "Classify")) self.progLabel.setText(_translate("som_om", "...")) self.stopButton.setText(_translate("som_om", "Stop")) self.label_2.setText(_translate("som_om", "Analyse data:")) self.similarity.setText(_translate("som_om", "Similarity function")) self.label_7.setText(_translate("som_om", "Base classes")) self.label_8.setText(_translate("som_om", "Anomalous %")) self.similarity_func.setToolTip(_translate("som_om", "Method to determine similarity")) self.nClasses.setToolTip(_translate("som_om", "Number of base classes")) <file_sep>/geosoft/gxapi/GXEXP.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXEXP(gxapi_cy.WrapEXP): """ GXEXP class. `GXEXP <geosoft.gxapi.GXEXP>` objects are created from text strings that contain C-like math to be applied to channels in a database. It is used with the `GXDU.math <geosoft.gxapi.GXDU.math>` function (see `GXDU <geosoft.gxapi.GXDU>`). See also `GXIEXP <geosoft.gxapi.GXIEXP>` for applying math expressions to images (grids). See also `GXDU.math <geosoft.gxapi.GXDU.math>` applies expressions to the database """ def __init__(self, handle=0): super(GXEXP, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXEXP <geosoft.gxapi.GXEXP>` :returns: A null `GXEXP <geosoft.gxapi.GXEXP>` :rtype: GXEXP """ return GXEXP() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, db, formula, unused): """ This method creates an `GXEXP <geosoft.gxapi.GXEXP>` object. :param db: Database Object :param formula: Expression using channel names :param unused: Legacy parameter, no longer used. :type db: GXDB :type formula: str :type unused: int :returns: `GXEXP <geosoft.gxapi.GXEXP>` Object :rtype: GXEXP .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Expressions are strings that contain C-like math to be applied to channels in a database. For example, following an expression: :: "@a = mag-64000; @b = gravity*100; $sRatio = @a/@b; MULT = @a *@b;" Rules: ``;`` terminates a sub-expression ``@`` prefix to a temporary name, which is a double precision floating point number to be used later in the same expression. ``$`` prefix to a local GX variable name. Such names will be evaluated to the variable value at the time `create <geosoft.gxapi.GXEXP.create>` is called. All other tokens are assumed to be channel names. """ ret_val = gxapi_cy.WrapEXP._create(GXContext._get_tls_geo(), db, formula.encode(), unused) return GXEXP(ret_val) @classmethod def create_file(cls, db, file): """ This method creates an `GXEXP <geosoft.gxapi.GXEXP>` object from a file :param db: Database Object :param file: File name :type db: GXDB :type file: str :returns: `GXEXP <geosoft.gxapi.GXEXP>` Object :rtype: GXEXP .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapEXP._create_file(GXContext._get_tls_geo(), db, file.encode()) return GXEXP(ret_val) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/README.md # Geosoft GX for Python Repository This is the repository for Seequent's Geosoft GX Developer support for Python development. Refer to the documentation for more information. [GX Developer documentation](https://geosoftgxdev.atlassian.net/wiki/display/GD/Python+in+GX+Developer) From release 9.6, tutorials for various subjects are available as Jupyter Notebooks, and are added as subjects are developed. See the following github page for what is currently available: https://github.com/GeosoftInc/gxpy/tree/master/examples/jupyter_notebooks/Tutorials (Select the branch that matches the GX Developer version you are working with.) If you are new to GX Developer, refer to the [GX Developer tutorial](https://geosoftgxdev.atlassian.net/wiki/spaces/GXD93/pages/103153671/Python+Tutorial+for+Geosoft+GX+Developer) that is documented as part of the 9.3 release. [Python Packages](https://github.com/GeosoftInc/gxpy/wiki) Also see the [Geosoft organization on Github](https://github.com/GeosoftInc) for the other programming language specific repos. Quick Start ----------- ### Configuration ### See [Python Configuration Menu](https://github.com/GeosoftInc/gxpy/wiki/Python-menu-for-Geosoft-Desktop) to install a Python menu that simplifies Python configuration for an Oasis montaj installation. To update an existing Python installation, load the Python menu from your User Menus and select Python > Configure Python... > update geosoft package. If you encounter problems due to a non-standard installation you can also update Python manually (see below).  ### Manual Configuration ### Uninstall Geosoft from Python, then install version 2023.1 as follows (you must have the Geosoft Desktop 2023.1 platform installed). ``` pip uninstall geosoft pip install geosoft ``` Or, alternately: ``` pip install geosoft --upgrade ``` ### Version Compatibility ### The base GX API, which is exposed to Python by the ___geosoft.gxapi___ module, is consistent across versions. This means that earlier versions of ___geosoft.pxpy___ will work with Geosoft Desktop 2023.1. While we recommend that older scripts be updated to conform to the 2023.1 API, should you need support for multiple versions of ___geosoft.gxpy___ you can create separate Anaconda Python environments for each version. For example, you might create an environment ___'py35_gx91'___ for Python 3.5 and the GX API version 9.1, ___'py36_gx92'___ for Python 3.6 and GX Developer 9.2 and 'py36_gx96' for GX Developer 9.6. If you do not depend on earlier versions of the GX Developer Python API it is best to use only the most recently released API. Version 2023.1 supports Python 3.7, 3.8, 3.9 and 3.10. If you need Python 3.4 support, install geosoft version 9.2.1, which will work with both Geosoft Desktop versions 9.2 and 9.5, but will not contain any methods and classes introduced since version 9.2. If you need Python 3.5 support, install geosoft version 9.5, which will work with both Geosoft Desktop and redistributable versions 9.5 and 9.6, but will not contain any methods and classes introduced since version 9.6. License ------- Any source code found here are released under the [BSD 2-clause license](https://github.com/GeosoftInc/gxpy/blob/master/LICENSE). Core functionality exposed by the GX API may have additional license implications. For more information consult the [License page in the GX Developer Wiki](https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License) <file_sep>/geosoft/gxapi/GXTIN.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXTIN(gxapi_cy.WrapTIN): """ GXTIN class. The `GXTIN <geosoft.gxapi.GXTIN>` class calculates the Delaunay triangulation of the positions in a database. This is the "best" set of triangles that can be formed from irregularly distributed points. The serialized `GXTIN <geosoft.gxapi.GXTIN>` files can be used for gridding using the Tin-based Nearest Neighbour Algorithm, or for plotting the Delaunay triangles or Voronoi cells to a map. """ def __init__(self, handle=0): super(GXTIN, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXTIN <geosoft.gxapi.GXTIN>` :returns: A null `GXTIN <geosoft.gxapi.GXTIN>` :rtype: GXTIN """ return GXTIN() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def copy(self, source): """ Copy `GXTIN <geosoft.gxapi.GXTIN>` :param source: Source `GXTIN <geosoft.gxapi.GXTIN>` :type source: GXTIN .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._copy(source) @classmethod def create(cls, vv_x, vv_y, vv_z): """ This method creates a `GXTIN <geosoft.gxapi.GXTIN>` object. :param vv_x: X positions :param vv_y: Y positions :param vv_z: Z values (optional) :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :returns: `GXTIN <geosoft.gxapi.GXTIN>` Object :rtype: GXTIN .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** CreateTIN does the `GXTIN <geosoft.gxapi.GXTIN>` calculation. The Z values are not required, and a 0-length `GXVV <geosoft.gxapi.GXVV>` can be used to indicate the values are not to be used. """ ret_val = gxapi_cy.WrapTIN._create(GXContext._get_tls_geo(), vv_x, vv_y, vv_z) return GXTIN(ret_val) @classmethod def create_s(cls, bf): """ Create `GXTIN <geosoft.gxapi.GXTIN>` from a serialized source :param bf: `GXBF <geosoft.gxapi.GXBF>` from which to read `GXTIN <geosoft.gxapi.GXTIN>` :type bf: GXBF :returns: `GXTIN <geosoft.gxapi.GXTIN>` Object :rtype: GXTIN .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapTIN._create_s(GXContext._get_tls_geo(), bf) return GXTIN(ret_val) @classmethod def export_xml(cls, tin, crc, file): """ Export a `GXTIN <geosoft.gxapi.GXTIN>` object as XML :param tin: `GXTIN <geosoft.gxapi.GXTIN>` file :param crc: CRC returned (Currently this is not implemented) :param file: Output XML file :type tin: str :type crc: int_ref :type file: str .. versionadded:: 6.0.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ crc.value = gxapi_cy.WrapTIN._export_xml(GXContext._get_tls_geo(), tin.encode(), crc.value, file.encode()) def get_convex_hull(self, ply): """ Get the convex hull of the `GXTIN <geosoft.gxapi.GXTIN>`. :param ply: `GXPLY <geosoft.gxapi.GXPLY>` object :type ply: GXPLY .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The convex hull is the outside boundary of the triangulated region. """ self._get_convex_hull(ply) def get_ipj(self, ipj): """ Get the projection. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` in which to place the `GXTIN <geosoft.gxapi.GXTIN>` projection :type ipj: GXIPJ .. versionadded:: 5.0.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._get_ipj(ipj) def get_joins(self, vv_joins, vv_index, vv_num): """ Get joins from a `GXTIN <geosoft.gxapi.GXTIN>` mesh. :param vv_joins: Joins `GXVV <geosoft.gxapi.GXVV>` (adjacent nodes) :param vv_index: Index `GXVV <geosoft.gxapi.GXVV>` :param vv_num: Number `GXVV <geosoft.gxapi.GXVV>` :type vv_joins: GXVV :type vv_index: GXVV :type vv_num: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The join information is returned in three VVs. - The joins `GXVV <geosoft.gxapi.GXVV>` is a list off the adjacent nodes for each node, arranged for 1st node, 2nd node etc. - The index `GXVV <geosoft.gxapi.GXVV>` gives the starting index in the joins `GXVV <geosoft.gxapi.GXVV>` for the adjacent nodes to each node. - The number `GXVV <geosoft.gxapi.GXVV>` gives the number of adjacent nodes for each node. All VVs must be type `GS_LONG <geosoft.gxapi.GS_LONG>`. """ self._get_joins(vv_joins, vv_index, vv_num) def get_mesh(self, vv): """ Get lines from a `GXTIN <geosoft.gxapi.GXTIN>` mesh. :param vv: `GXVV <geosoft.gxapi.GXVV>` of type GS_D2LINE (returned) :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._get_mesh(vv) def get_nodes(self, vvx, vvy, vvz): """ Get the X,Y locations and Z values of the `GXTIN <geosoft.gxapi.GXTIN>` nodes. :param vvx: X `GXVV <geosoft.gxapi.GXVV>` :param vvy: Y `GXVV <geosoft.gxapi.GXVV>` :param vvz: Z `GXVV <geosoft.gxapi.GXVV>` :type vvx: GXVV :type vvy: GXVV :type vvz: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If this is not a Z-valued `GXTIN <geosoft.gxapi.GXTIN>`, the Z values will be dummies. """ self._get_nodes(vvx, vvy, vvz) def get_triangles(self, tri_vv_pt1, tri_vv_pt2, tri_vv_pt3): """ Get the triangle nodes. :param tri_vv_pt1: Node 1 `GXVV <geosoft.gxapi.GXVV>` :param tri_vv_pt2: Node 2 `GXVV <geosoft.gxapi.GXVV>` :param tri_vv_pt3: Node3 `GXVV <geosoft.gxapi.GXVV>` :type tri_vv_pt1: GXVV :type tri_vv_pt2: GXVV :type tri_vv_pt3: GXVV .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._get_triangles(tri_vv_pt1, tri_vv_pt2, tri_vv_pt3) def get_triangle(self, index, x0, y0, x1, y1, x2, y2): """ Get the locations of the vertices of a specific triangle :param index: Triangle index [0...N-1] :param x0: X0 :param y0: Y0 :param x1: X1 :param y1: Y1 :param x2: X2 :param y2: Y2 :type index: int :type x0: float_ref :type y0: float_ref :type x1: float_ref :type y1: float_ref :type x2: float_ref :type y2: float_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ x0.value, y0.value, x1.value, y1.value, x2.value, y2.value = self._get_triangle(index, x0.value, y0.value, x1.value, y1.value, x2.value, y2.value) def get_voronoi_edges(self, vv): """ Get line segments defining Voronoi cells. :param vv: `GXVV <geosoft.gxapi.GXVV>` of GS_D2LINE type (create with type -32) :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._get_voronoi_edges(vv) def is_z_valued(self): """ Does the `GXTIN <geosoft.gxapi.GXTIN>` contain Z values with each X,Y? :returns: Returns 1 if Z values are defined in the `GXTIN <geosoft.gxapi.GXTIN>` :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._is_z_valued() return ret_val def locate_triangle(self, t, x, y): """ Get the index of the triangle containing X, Y. :param t: Seed triangle (can be iDummy or <0) :param x: Target X location :param y: Target Y location :type t: int :type x: float :type y: float :returns: The index of the triangle containing X, Y. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Index returned begins at 0, but could be negative. -1: If X,Y is not contained in a triangle (or triangle not found) -2: If the location is on an edge This is for "fall-back" purposes only. Frequently edge positions are located as being part of a triangle, so do not rely on this result to determine if a node position is on an edge. -3: If the location is a vertex. This is for "fall-back" purposes only in the code. Normal operation is to include a node position inside a triangle, so do not rely on this result to determine if a node position is input. """ ret_val = self._locate_triangle(t, x, y) return ret_val def nodes(self): """ Returns the number of nodes in the `GXTIN <geosoft.gxapi.GXTIN>` :returns: The number of nodes in the `GXTIN <geosoft.gxapi.GXTIN>` :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._nodes() return ret_val def interp_vv(self, vvx, vvy, vvz): """ Interp TINned values using the natural neighbour method. :param vvx: `GXVV <geosoft.gxapi.GXVV>` X locations to interpolate (`GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`) :param vvy: `GXVV <geosoft.gxapi.GXVV>` Y locations to interpolate (`GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`) :param vvz: `GXVV <geosoft.gxapi.GXVV>` Interpolated Z values (`GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`) :type vvx: GXVV :type vvy: GXVV :type vvz: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The `GXTIN <geosoft.gxapi.GXTIN>` have been created using max length = `rDUMMY <geosoft.gxapi.rDUMMY>` to ensure that the `GXTIN <geosoft.gxapi.GXTIN>` has a convex hull (otherwise the routine that locates the triangle for a given location may fail). The `GXTIN <geosoft.gxapi.GXTIN>` must also have been created using the Z values. Values located outside the convex hull are set to `rDUMMY <geosoft.gxapi.rDUMMY>`. The method is based on the following paper: <NAME>., <NAME>., and <NAME>., 1995, Geophysical parameterization and interpolation of irregular data using natural neighbours: Geophysical Journal International, 122 p. 837-857. """ self._interp_vv(vvx, vvy, vvz) def triangles(self): """ Returns the number of triangles in the `GXTIN <geosoft.gxapi.GXTIN>`. :returns: The number of triangles in the `GXTIN <geosoft.gxapi.GXTIN>` :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._triangles() return ret_val def linear_interp_vv(self, vvx, vvy, vvz): """ Interp TINned values using the linear interpolation :param vvx: `GXVV <geosoft.gxapi.GXVV>` X locations to interpolate (`GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`) :param vvy: `GXVV <geosoft.gxapi.GXVV>` Y locations to interpolate (`GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`) :param vvz: `GXVV <geosoft.gxapi.GXVV>` Interpolated Z values (`GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`) :type vvx: GXVV :type vvy: GXVV :type vvz: GXVV .. versionadded:: 5.1.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The `GXTIN <geosoft.gxapi.GXTIN>` have been created using max length = `rDUMMY <geosoft.gxapi.rDUMMY>` to ensure that the `GXTIN <geosoft.gxapi.GXTIN>` has a convex hull (otherwise the routine that locates the triangle for a given location may fail). The `GXTIN <geosoft.gxapi.GXTIN>` must also have been created using the Z values. Values located outside the convex hull are set to `rDUMMY <geosoft.gxapi.rDUMMY>`. The values are set assuming that each `GXTIN <geosoft.gxapi.GXTIN>` triangle defines a plane. """ self._linear_interp_vv(vvx, vvy, vvz) def nearest_vv(self, vvx, vvy, vvz): """ Interp TINned values using the nearest neighbour. :param vvx: `GXVV <geosoft.gxapi.GXVV>` X locations to interpolate (`GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`) :param vvy: `GXVV <geosoft.gxapi.GXVV>` Y locations to interpolate (`GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`) :param vvz: `GXVV <geosoft.gxapi.GXVV>` Interpolated Z values (`GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`) :type vvx: GXVV :type vvy: GXVV :type vvz: GXVV .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The `GXTIN <geosoft.gxapi.GXTIN>` have been created using max length = `rDUMMY <geosoft.gxapi.rDUMMY>` to ensure that the `GXTIN <geosoft.gxapi.GXTIN>` has a convex hull (otherwise the routine that locates the triangle for a given location may fail). The `GXTIN <geosoft.gxapi.GXTIN>` must also have been created using the Z values. Values located outside the convex hull are set to `rDUMMY <geosoft.gxapi.rDUMMY>`. Within each voronoi triangle, the Z value of node closest to the input X,Y location is returned. """ self._nearest_vv(vvx, vvy, vvz) def range_xy(self, x_min, y_min, x_max, y_max): """ Find the range in X and Y of the TINned region. :param x_min: Min X (returned) :param y_min: Min Y :param x_max: Max X :param y_max: Max Y :type x_min: float_ref :type y_min: float_ref :type x_max: float_ref :type y_max: float_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The TINned range is the range of X and Y covered by the `GXTIN <geosoft.gxapi.GXTIN>` triangles. It can thus be less than the full X and Y range of the nodes themselves, if a full convex hull is not calculated. """ x_min.value, y_min.value, x_max.value, y_max.value = self._range_xy(x_min.value, y_min.value, x_max.value, y_max.value) def serial(self, bf): """ Serialize `GXTIN <geosoft.gxapi.GXTIN>` :param bf: `GXBF <geosoft.gxapi.GXBF>` in which to write `GXTIN <geosoft.gxapi.GXTIN>` :type bf: GXBF .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._serial(bf) def set_ipj(self, ipj): """ Set the projection. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` to place in the `GXTIN <geosoft.gxapi.GXTIN>` :type ipj: GXIPJ .. versionadded:: 5.0.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_ipj(ipj) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXUSERMETA.rst .. _GXUSERMETA: GXUSERMETA class ================================== .. autoclass:: geosoft.gxapi.GXUSERMETA :members: .. _USERMETA_FORMAT: USERMETA_FORMAT constants ----------------------------------------------------------------------- `GXUSERMETA <geosoft.gxapi.GXUSERMETA>` Format Types .. autodata:: geosoft.gxapi.USERMETA_FORMAT_DEFAULT :annotation: .. autoattribute:: geosoft.gxapi.USERMETA_FORMAT_DEFAULT .. autodata:: geosoft.gxapi.USERMETA_FORMAT_ISO :annotation: .. autoattribute:: geosoft.gxapi.USERMETA_FORMAT_ISO .. autodata:: geosoft.gxapi.USERMETA_FORMAT_FGDC :annotation: .. autoattribute:: geosoft.gxapi.USERMETA_FORMAT_FGDC <file_sep>/docs/GXSURFACEITEM.rst .. _GXSURFACEITEM: GXSURFACEITEM class ================================== .. autoclass:: geosoft.gxapi.GXSURFACEITEM :members: .. _SURFACERENDER_MODE: SURFACERENDER_MODE constants ----------------------------------------------------------------------- Open Modes .. autodata:: geosoft.gxapi.SURFACERENDER_SMOOTH :annotation: .. autoattribute:: geosoft.gxapi.SURFACERENDER_SMOOTH .. autodata:: geosoft.gxapi.SURFACERENDER_FILL :annotation: .. autoattribute:: geosoft.gxapi.SURFACERENDER_FILL .. autodata:: geosoft.gxapi.SURFACERENDER_EDGES :annotation: .. autoattribute:: geosoft.gxapi.SURFACERENDER_EDGES <file_sep>/docs/GXLPT.rst .. _GXLPT: GXLPT class ================================== .. autoclass:: geosoft.gxapi.GXLPT :members: <file_sep>/geosoft/gxapi/GX3DV.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXMVIEW import GXMVIEW ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GX3DV(gxapi_cy.Wrap3DV): """ GX3DV class. TODO... """ def __init__(self, handle=0): super(GX3DV, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GX3DV <geosoft.gxapi.GX3DV>` :returns: A null `GX3DV <geosoft.gxapi.GX3DV>` :rtype: GX3DV """ return GX3DV() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def open_mview(self, mode): """ Open `GX3DV <geosoft.gxapi.GX3DV>`'s 3D `GXMVIEW <geosoft.gxapi.GXMVIEW>` :param mode: :ref:`GEO3DV_OPEN` :type mode: int :returns: `GXMVIEW <geosoft.gxapi.GXMVIEW>`, aborts on failure :rtype: GXMVIEW .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._open_mview(mode) return GXMVIEW(ret_val) def copy_to_map(self, map, mview, min_x, min_y, max_x, max_y, force_overwrite, new_view, problem_files): """ Copy the `GX3DV <geosoft.gxapi.GX3DV>`'s 3D `GXMVIEW <geosoft.gxapi.GXMVIEW>` into a map. :param map: `GXMAP <geosoft.gxapi.GXMAP>` Object :param mview: Desired new view name :param min_x: X minimum in mm :param min_y: Y minimun in mm :param max_x: X maximum in mm :param max_y: Y maximum in mm :param force_overwrite: (0 - Produce errors for conflicting unpacked files, 1 - Force overwrites of conflicting unpacked files) :param new_view: New view name created :param problem_files: List of files that are problematic returned :type map: GXMAP :type mview: str :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type force_overwrite: int :type new_view: str_ref :type problem_files: str_ref .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A `GX3DV <geosoft.gxapi.GX3DV>` packs all source files. This functions creates an unpacked map and unpacks the packed files in the same way that UnPackFilesEx in the `GXMAP <geosoft.gxapi.GXMAP>` class does. """ new_view.value, problem_files.value = self._copy_to_map(map, mview.encode(), min_x, min_y, max_x, max_y, force_overwrite, new_view.value.encode(), problem_files.value.encode()) @classmethod def create_new(cls, file_name, mview): """ Create a new `GX3DV <geosoft.gxapi.GX3DV>`. :param file_name: `GX3DV <geosoft.gxapi.GX3DV>` file name :param mview: 3D `GXMVIEW <geosoft.gxapi.GXMVIEW>` to create new `GX3DV <geosoft.gxapi.GX3DV>` from :type file_name: str :type mview: GXMVIEW :returns: `GX3DV <geosoft.gxapi.GX3DV>` Object :rtype: GX3DV .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.Wrap3DV._create_new(GXContext._get_tls_geo(), file_name.encode(), mview) return GX3DV(ret_val) @classmethod def open(cls, file_name): """ Open an existing `GX3DV <geosoft.gxapi.GX3DV>`. :param file_name: `GX3DV <geosoft.gxapi.GX3DV>` file name :type file_name: str :returns: `GX3DV <geosoft.gxapi.GX3DV>` Object :rtype: GX3DV .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.Wrap3DV._open(GXContext._get_tls_geo(), file_name.encode()) return GX3DV(ret_val) @classmethod def from_map(cls, map): """ Get an `GX3DV <geosoft.gxapi.GX3DV>` from `GXMAP <geosoft.gxapi.GXMAP>` handle (e.g. from `GXEMAP.lock <geosoft.gxapi.GXEMAP.lock>` on open geosoft_3dv document in project) :param map: `GXMAP <geosoft.gxapi.GXMAP>` Object :type map: GXMAP :returns: `GX3DV <geosoft.gxapi.GX3DV>` Object :rtype: GX3DV .. versionadded:: 9.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.Wrap3DV._from_map(GXContext._get_tls_geo(), map) return GX3DV(ret_val) def crc_3dv(self, crc, file): """ Generate an XML CRC of a `GX3DV <geosoft.gxapi.GX3DV>` :param crc: CRC returned :param file: Name of xml to generate (.zip added) :type crc: int_ref :type file: str .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ crc.value = self._crc_3dv(crc.value, file.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXITR.rst .. _GXITR: GXITR class ================================== .. autoclass:: geosoft.gxapi.GXITR :members: .. _ITR_COLOR_MODEL: ITR_COLOR_MODEL constants ----------------------------------------------------------------------- `GXITR <geosoft.gxapi.GXITR>` Color Model defines .. autodata:: geosoft.gxapi.ITR_COLOR_MODEL_HSV :annotation: .. autoattribute:: geosoft.gxapi.ITR_COLOR_MODEL_HSV .. autodata:: geosoft.gxapi.ITR_COLOR_MODEL_RGB :annotation: .. autoattribute:: geosoft.gxapi.ITR_COLOR_MODEL_RGB .. autodata:: geosoft.gxapi.ITR_COLOR_MODEL_CMY :annotation: .. autoattribute:: geosoft.gxapi.ITR_COLOR_MODEL_CMY .. _ITR_POWER: ITR_POWER constants ----------------------------------------------------------------------- Power Zoning defines .. autodata:: geosoft.gxapi.ITR_POWER_10 :annotation: .. autoattribute:: geosoft.gxapi.ITR_POWER_10 .. autodata:: geosoft.gxapi.ITR_POWER_EXP :annotation: .. autoattribute:: geosoft.gxapi.ITR_POWER_EXP .. _ITR_ZONE: ITR_ZONE constants ----------------------------------------------------------------------- Zoning Methods .. autodata:: geosoft.gxapi.ITR_ZONE_DEFAULT :annotation: .. autoattribute:: geosoft.gxapi.ITR_ZONE_DEFAULT .. autodata:: geosoft.gxapi.ITR_ZONE_LINEAR :annotation: .. autoattribute:: geosoft.gxapi.ITR_ZONE_LINEAR .. autodata:: geosoft.gxapi.ITR_ZONE_NORMAL :annotation: .. autoattribute:: geosoft.gxapi.ITR_ZONE_NORMAL .. autodata:: geosoft.gxapi.ITR_ZONE_EQUALAREA :annotation: .. autoattribute:: geosoft.gxapi.ITR_ZONE_EQUALAREA .. autodata:: geosoft.gxapi.ITR_ZONE_SHADE :annotation: .. autoattribute:: geosoft.gxapi.ITR_ZONE_SHADE .. autodata:: geosoft.gxapi.ITR_ZONE_LOGLINEAR :annotation: .. autoattribute:: geosoft.gxapi.ITR_ZONE_LOGLINEAR .. _ITR_ZONE_MODEL: ITR_ZONE_MODEL constants ----------------------------------------------------------------------- `GXITR <geosoft.gxapi.GXITR>` Zone Model defines .. autodata:: geosoft.gxapi.ITR_ZONE_MODEL_NOZONE :annotation: .. autoattribute:: geosoft.gxapi.ITR_ZONE_MODEL_NOZONE .. autodata:: geosoft.gxapi.ITR_ZONE_MODEL_NONE :annotation: .. autoattribute:: geosoft.gxapi.ITR_ZONE_MODEL_NONE .. autodata:: geosoft.gxapi.ITR_ZONE_MODEL_LINEAR :annotation: .. autoattribute:: geosoft.gxapi.ITR_ZONE_MODEL_LINEAR .. autodata:: geosoft.gxapi.ITR_ZONE_MODEL_NORMAL :annotation: .. autoattribute:: geosoft.gxapi.ITR_ZONE_MODEL_NORMAL .. autodata:: geosoft.gxapi.ITR_ZONE_MODEL_EQUAL :annotation: .. autoattribute:: geosoft.gxapi.ITR_ZONE_MODEL_EQUAL .. autodata:: geosoft.gxapi.ITR_MODEL_LOGLIN :annotation: .. autoattribute:: geosoft.gxapi.ITR_MODEL_LOGLIN .. autodata:: geosoft.gxapi.ITR_ZONE_MODEL_LOGLIN :annotation: .. autoattribute:: geosoft.gxapi.ITR_ZONE_MODEL_LOGLIN <file_sep>/docs/GXHTTP.rst .. _GXHTTP: GXHTTP class ================================== .. autoclass:: geosoft.gxapi.GXHTTP :members: <file_sep>/docs/GXARCMAP.rst .. _GXARCMAP: GXARCMAP class ================================== .. autoclass:: geosoft.gxapi.GXARCMAP :members: .. _ARCMAP_LOAD_FLAGS: ARCMAP_LOAD_FLAGS constants ----------------------------------------------------------------------- Flags that can be combined and passed to iLoadMap_ARCMAP .. autodata:: geosoft.gxapi.ARCMAP_LOAD_DELFRAME :annotation: .. autoattribute:: geosoft.gxapi.ARCMAP_LOAD_DELFRAME .. autodata:: geosoft.gxapi.ARCMAP_LOAD_DELLAYER :annotation: .. autoattribute:: geosoft.gxapi.ARCMAP_LOAD_DELLAYER .. autodata:: geosoft.gxapi.ARCMAP_LOAD_EXISTFRAME :annotation: .. autoattribute:: geosoft.gxapi.ARCMAP_LOAD_EXISTFRAME .. autodata:: geosoft.gxapi.ARCMAP_LOAD_COPYLAYER :annotation: .. autoattribute:: geosoft.gxapi.ARCMAP_LOAD_COPYLAYER .. autodata:: geosoft.gxapi.ARCMAP_LOAD_HIDESIBLINGS :annotation: .. autoattribute:: geosoft.gxapi.ARCMAP_LOAD_HIDESIBLINGS .. autodata:: geosoft.gxapi.ARCMAP_LOAD_PREFIXMAPFRAME :annotation: .. autoattribute:: geosoft.gxapi.ARCMAP_LOAD_PREFIXMAPFRAME .. autodata:: geosoft.gxapi.ARCMAP_LOAD_PREFIXMAPLAYER :annotation: .. autoattribute:: geosoft.gxapi.ARCMAP_LOAD_PREFIXMAPLAYER .. autodata:: geosoft.gxapi.ARCMAP_LOAD_MERGETOSINGLEVIEW :annotation: .. autoattribute:: geosoft.gxapi.ARCMAP_LOAD_MERGETOSINGLEVIEW .. autodata:: geosoft.gxapi.ARCMAP_LOAD_INTOCURRENTFRAME :annotation: .. autoattribute:: geosoft.gxapi.ARCMAP_LOAD_INTOCURRENTFRAME .. autodata:: geosoft.gxapi.ARCMAP_LOAD_NOMAPLAYERS :annotation: .. autoattribute:: geosoft.gxapi.ARCMAP_LOAD_NOMAPLAYERS .. autodata:: geosoft.gxapi.ARCMAP_LOAD_ACTIVATE :annotation: .. autoattribute:: geosoft.gxapi.ARCMAP_LOAD_ACTIVATE .. autodata:: geosoft.gxapi.ARCMAP_LOAD_NEW :annotation: .. autoattribute:: geosoft.gxapi.ARCMAP_LOAD_NEW .. autodata:: geosoft.gxapi.ARCMAP_LOAD_NAMETAGISPREFIX :annotation: .. autoattribute:: geosoft.gxapi.ARCMAP_LOAD_NAMETAGISPREFIX <file_sep>/geosoft/gxpy/gdb.py """ Geosoft databases for line-oriented spatial data. :Classes: :`Geosoft_gdb`: Geosoft line database :`Line`: line handling :`Channel`: channel handling :Constants: :LINE_TYPE_NORMAL: `geosoft.gxapi.DB_LINE_TYPE_NORMAL` :LINE_TYPE_BASE: `geosoft.gxapi.DB_LINE_TYPE_BASE` :LINE_TYPE_TIE: `geosoft.gxapi.DB_LINE_TYPE_TIE` :LINE_TYPE_TEST: `geosoft.gxapi.DB_LINE_TYPE_TEST` :LINE_TYPE_TREND: `geosoft.gxapi.DB_LINE_TYPE_TREND` :LINE_TYPE_SPECIAL: `geosoft.gxapi.DB_LINE_TYPE_SPECIAL` :LINE_TYPE_RANDOM: `geosoft.gxapi.DB_LINE_TYPE_RANDOM` :LINE_CATEGORY_FLIGHT: `geosoft.gxapi.DB_CATEGORY_LINE_FLIGHT` :LINE_CATEGORY_GROUP: `geosoft.gxapi.DB_CATEGORY_LINE_GROUP` :LINE_CATEGORY_NORMAL: `geosoft.gxapi.DB_CATEGORY_LINE_NORMAL` :FORMAT_NORMAL: `geosoft.gxapi.DB_CHAN_FORMAT_NORMAL` :FORMAT_EXP: `geosoft.gxapi.DB_CHAN_FORMAT_EXP` :FORMAT_TIME: `geosoft.gxapi.DB_CHAN_FORMAT_TIME` :FORMAT_DATE: `geosoft.gxapi.DB_CHAN_FORMAT_DATE` :FORMAT_GEOGR: `geosoft.gxapi.DB_CHAN_FORMAT_GEOGR` :FORMAT_SIGDIG: `geosoft.gxapi.DB_CHAN_FORMAT_SIGDIG` :FORMAT_HEX: `geosoft.gxapi.DB_CHAN_FORMAT_HEX` :CHAN_ALL: None :CHAN_NORMAL: 0 :CHAN_ARRAY: 1 :CHAN_DISPLAYED: 2 :SYMB_LINE_NORMAL: `geosoft.gxapi.DB_CATEGORY_LINE_NORMAL` :SYMB_LINE_FLIGHT: `geosoft.gxapi.DB_CATEGORY_LINE_FLIGHT` :SYMB_LINE_GROUP: `geosoft.gxapi.DB_CATEGORY_LINE_GROUP` :SELECT_INCLUDE: `geosoft.gxapi.DB_LINE_SELECT_INCLUDE` :SELECT_EXCLUDE: `geosoft.gxapi.DB_LINE_SELECT_EXCLUDE` :COMP_NONE: `geosoft.gxapi.DB_COMP_NONE` :COMP_SPEED: `geosoft.gxapi.DB_COMP_SPEED` :COMP_SIZE: `geosoft.gxapi.DB_COMP_SIZE` :READ_REMOVE_DUMMYROWS: 1 :READ_REMOVE_DUMMYCOLUMNS: 2 :SYMBOL_LOCK_NONE: `geosoft.gxapi.DB_LOCK_NONE` :SYMBOL_LOCK_READ: `geosoft.gxapi.DB_LOCK_READONLY` :SYMBOL_LOCK_WRITE: `geosoft.gxapi.DB_LOCK_READWRITE` :DRAW_AS_POINTS: 0 :DRAW_AS_LINES: 1 .. seealso:: `geosoft.gxapi.GXGB`, `geosoft.gxapi.GXEDB`, `geosoft.gxapi.GXDBREAD`, `geosoft.gxapi.GXDBWRITE` .. note:: Regression tests provide usage examples: `Tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_gdb.py>`_ """ import os import sys import math import numpy as np import pandas as pd import geosoft import geosoft.gxapi as gxapi from . import vv as gxvv from . import va as gxva from . import utility as gxu from . import gx as gx from . import coordinate_system as gxcs from . import metadata as gxmeta from . import map as gxmap from . import view as gxview from . import group as gxgroup from . import geometry as gxgeo __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) LINE_TYPE_NORMAL = gxapi.DB_LINE_TYPE_NORMAL LINE_TYPE_BASE = gxapi.DB_LINE_TYPE_BASE LINE_TYPE_TIE = gxapi.DB_LINE_TYPE_TIE LINE_TYPE_TEST = gxapi.DB_LINE_TYPE_TEST LINE_TYPE_TREND = gxapi.DB_LINE_TYPE_TREND LINE_TYPE_SPECIAL = gxapi.DB_LINE_TYPE_SPECIAL LINE_TYPE_RANDOM = gxapi.DB_LINE_TYPE_RANDOM LINE_CATEGORY_FLIGHT = gxapi.DB_CATEGORY_LINE_FLIGHT LINE_CATEGORY_GROUP = gxapi.DB_CATEGORY_LINE_GROUP LINE_CATEGORY_NORMAL = gxapi.DB_CATEGORY_LINE_NORMAL FORMAT_NORMAL = gxapi.DB_CHAN_FORMAT_NORMAL FORMAT_EXP = gxapi.DB_CHAN_FORMAT_EXP FORMAT_TIME = gxapi.DB_CHAN_FORMAT_TIME FORMAT_DATE = gxapi.DB_CHAN_FORMAT_DATE FORMAT_GEOGR = gxapi.DB_CHAN_FORMAT_GEOGR FORMAT_SIGDIG = gxapi.DB_CHAN_FORMAT_SIGDIG FORMAT_HEX = gxapi.DB_CHAN_FORMAT_HEX CHAN_ALL = None CHAN_NORMAL = 0 CHAN_ARRAY = 1 CHAN_DISPLAYED = 2 SYMB_LINE_NORMAL = gxapi.DB_CATEGORY_LINE_NORMAL SYMB_LINE_FLIGHT = gxapi.DB_CATEGORY_LINE_FLIGHT SYMB_LINE_GROUP = gxapi.DB_CATEGORY_LINE_GROUP SELECT_INCLUDE = gxapi.DB_LINE_SELECT_INCLUDE SELECT_EXCLUDE = gxapi.DB_LINE_SELECT_EXCLUDE COMP_NONE = gxapi.DB_COMP_NONE COMP_SPEED = gxapi.DB_COMP_SPEED COMP_SIZE = gxapi.DB_COMP_SIZE READ_REMOVE_DUMMYROWS = 1 READ_REMOVE_DUMMYCOLUMNS = 2 SYMBOL_LOCK_NONE = gxapi.DB_LOCK_NONE SYMBOL_LOCK_READ = gxapi.DB_LOCK_READONLY SYMBOL_LOCK_WRITE = gxapi.DB_LOCK_READWRITE DRAW_AS_POINTS = 0 DRAW_AS_LINES = 1 class GdbException(geosoft.GXRuntimeError): """ Exceptions from `geosoft.gxpy.gdb`. .. versionadded:: 9.1 """ pass def _gdb_name(name): name = name.strip() name_ext = os.path.splitext(name) if name_ext[1].lower() == '.gdb': return name else: return os.path.normpath(name + ".gdb") def _va_width(data): if len(data.shape) == 1: width = 1 elif len(data.shape) == 2: width = data.shape[1] else: raise GdbException(_t("Only one or two-dimensional data allowed.")) return width def is_valid_line_name(name): """ Return True if this is a valid line name. See also `create_line_name` .. versionadded:: 9.3 """ name = str(name) try: int(name) return False except ValueError: return bool(gxapi.GXDB.is_line_name(name)) def create_line_name(number=0, line_type=LINE_TYPE_NORMAL, version=0): """ Returns a valid database line name constructed from the component parts. :param number: line number, or a string, default is 0 :param line_type: one of LINE_TYPE constants, default is LINE_TYPE_NORMAL :param version: version number, default is 0 :return: string line name Line name strings are constructed using the line naming convention as in the following: ====== ======================================= L10.4 LINE_TYPE_NORMAL, number 10, version 4 B10.4 LINE_TYPE_BASE, number 10, version 4 D10.4 LINE_TYPE_RANDOM, number 10, version 4 P10.4 LINE_TYPE_SPECIAL, number 10, version 4 T10.4 LINE_TYPE_TIE, number 10, version 4 S10.4 LINE_TYPE_TEST, number 10, version 4 R10.4 LINE_TYPE_TREND, number 10, version 4 ====== ======================================= .. versionadded:: 9.3 """ sr = gxapi.str_ref() gxapi.GXDB.set_line_name2(str(number), line_type, version, sr) return sr.value def delete_files(file_name): """ Delete all files associates with this database name. :param file_name: name of the database .. versionadded:: 9.3 """ if file_name is not None: path = _gdb_name(file_name) root, ext = os.path.splitext(os.path.basename(path)) if ext.lower() != '.gdb': raise GdbException(_t('File is not a Geosoft database file (no gdb extension): {}'.format(file_name))) gxu.delete_file(file_name) gxu.delete_file(file_name + '.xml') class Geosoft_gdb(gxgeo.Geometry): """ Class to work with Geosoft databases. This class wraps many of the functions found in `geosoft.gxapi.GXDB`. :Constructors: ========= ========================================================================= `open` open an existing file, or if not specified open/lock the current database `new` create a new database ========= ========================================================================= **Some typical programming patterns** Python Oasis extension opens and reads through all data in the current database: .. code:: import os,sys import numpy as np import gxpy.gx as gxp import gxpy.gdb as gxdb # open the current database in the open project gdb = gxdb.Geosoft_gdb.open() for line in gdb.list_lines(): npd,ch,fid = gdb.read_line(line) # npd is a 2D numpy array to all data in this line. # ch is a list of the channels, one channel for each column in npd. # Array channels are expanded with channel names "name[0]", "name[1]" ... # fid is a tuple (start,increment) fiducial, which will be the minimum start and smallest increment. # ... do something with the data in npd ... External Python program to open and read through all data in a database: .. code:: import os,sys import numpy as np import gxpy.gx as gx import gxpy.gdb as gxdb # initalize the gx environment - required for external programs. gxp = gx.GXpy() # open a database gdb = gxdb.Geosoft_gdb.open('test.gdb') for line in gdb.list_lines(): npd,ch,fid = gdb.read_line(line) # npd is a 2D numpy array to all data in this line. # ch is a list of the channels, one channel for each column in npd. # Array channels are expanded with channel names "name[0]", "name[1]" ... # fid is a tuple (start,increment) fiducial, which will be the minimum start and smallest increment. # ... do something with the data in npd ... The following creates a new channel that is the distance from the origin to the X,Y,Z location of every point. .. code:: ... gdb = gxdb.Geosoft_gdb.open('test.gdb') for line in gdb.list_lines(): npd,ch,fid = gdb.read_line(line, channels=['X','Y','Z']) npd = np.square(npd) distance_from_origin = np.sqrt(npd[0] + npd[1] + npd[2]) gdb.write_channel(line, 'distance', distance_from_origin, fid) .. versionadded:: 9.1 .. versionchanged:: 9.3 float numpy arrays use np.nan for dummies so dummy filtering no longer necessary. .. versionchanged:: 9.3.1 inherits from `geosoft.gxpy.geometry.Geometry` """ def __enter__(self): return self def __exit__(self, _type, _value, _traceback): self.__del__() def __del__(self): if hasattr(self, '_close'): self._close() def _close(self, pop=True, discard=False): if hasattr(self, '_open'): if self._open: if self._db: if self._edb is not None: if self._edb.is_locked(): self._edb.un_lock() discard = False self._edb = None if not discard and self._xmlmetadata_changed: with open(self._file_name + '.xml', 'w+') as f: f.write(gxu.xml_from_dict(self._xmlmetadata)) self._db.sync() self._db = None if discard: gxu.delete_files_by_root(self._file_name) if pop: gx.pop_resource(self._open) self._open = None def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): return '{}({} lines, {} channels)'.format(os.path.basename(self.name), self.used_lines, self.used_channels) def __init__(self, name=None, db=None): self._lst = gxapi.GXLST.create(2000) self._file_name = None self._db = db self._edb = None self._xmlmetadata = None self._xmlmetadata_changed = False self._xmlmetadata_root = '' self._extent = {'xyz': None, 'extent': None} if name is None: if self._db: s = gxapi.str_ref() self._db.get_name(gxapi.DB_NAME_FILE, s) self._file_name = os.path.normpath(s.value) name = os.path.basename(self._file_name) else: name = '_gdb_' else: name = os.path.basename(name) super().__init__(name=name) self._open = gx.track_resource(self.__class__.__name__, self._file_name) def close(self, discard=False): """ Close the database and free resources :param discard: True to discard the database files(s) after closing. .. versionadded:: 9.4 """ self._close(discard=discard) @classmethod def open(cls, name=None): """ Open an existing database. :param name: name of the database, default is the current project database :returns: `Geosoft_gdb` instance .. versionadded:: 9.1 """ gdb = cls(name) if name is None: gdb._edb = gxapi.GXEDB.current() gdb._db = gxapi.GXEDB.lock(gdb._edb) else: gdb._edb = None gdb._db = gxapi.GXDB.open(_gdb_name(name), 'SUPER', '') sr = gxapi.str_ref() gdb._db.get_name(gxapi.DB_NAME_FILE, sr) gdb._file_name = os.path.normpath(sr.value) return gdb @classmethod def new(cls, name=None, max_lines=500, max_channels=200, max_blobs=0, page_size=1024, comp=None, overwrite=False): """ Create a new database. :param name: database name, if None a temporary database is created :param max_lines: maximum number of lines, default 500 :param max_channels: maximum number of channels, default 200 :param max_blobs: maximum number of blobs, default lines*channels+20 :param comp: compression: | COMP_NONE | COMP_SPEED (default) | COMP_SIZE :param overwrite: `True` to overwrite existing database. Default is `False`, GdbException if file exists. :param page_size: page size (default is 1024), which limits the amount of compressed data that can be stored in a single channel on a line. The maximum compressed data size for a channel will be this number * 65534 (default 1024 * 65534 = 64 MB of compressed data). This will be forced to a power of 2 between 64 and 4096, which would allow for a maximum of 256 MB compressed data per channel per line. :returns: `Geosoft_gdb` instance .. versionadded:: 9.1 .. versionchanged:: 9.3 added parameter `overwrite=False` .. versionchanged:: 9.4 `name=None` creates a temporary database """ max_lines = max(10, max_lines) max_channels = max(25, max_channels) min_blobs = max_channels + max_lines + 20 max_blobs = max(min_blobs, max_blobs) if not comp: comp = COMP_SPEED # validate page_size: ps = 64 while ps < page_size: ps *= 2 if ps > 4096: raise GdbException(_t('Page size cannot be larger than 4096 (256 MB per line-channel).')) page_size = ps if name is None: name = gx.gx().temp_file('gdb') name = _gdb_name(name) if not overwrite and os.path.isfile(name): raise GdbException(_t('Cannot overwrite existing database \'{}\''.format(name))) gxu.delete_files_by_root(name) gxapi.GXDB.create_comp(name, max_lines, max_channels, max_blobs, 10, 100, 'SUPER', '', page_size, comp) return cls.open(name) def commit(self): """ Commit database changes. .. versionadded:: 9.1 """ self._db.commit() def discard(self): """ Discard database changes. .. versionadded:: 9.1 """ self._db.discard() # ============================================================================ # internal helper functions def exist_symb_(self, symb, symb_type): """ Check if a symbol exists of the required type. :param symb: symbol name, number or instance :param symb_type: one of DB_SYMB_TYPE :returns: `True` if the symbol exists and is the expected symbol type, `False` otherwise .. versionadded:: 9.1 """ if isinstance(symb, str): return self._db.exist_symb(symb, symb_type) elif isinstance(symb, int): return self._db.valid_symb(symb, symb_type) elif isinstance(symb, Line) and (symb_type == gxapi.DB_SYMB_LINE): return True elif isinstance(symb, Channel) and (symb_type == gxapi.DB_SYMB_CHAN): return True return False # ============================================================================ # Information @property def gxdb(self): """The `geosoft.gxapi.GXDB` instance handle""" return self._db @property def xyz_channels(self): """ The currently identified (x, y, z) channels. Methods that work on spatial locations will use these channels for locating the data at each fiducial of the data. Can be set using a tuple of two or three strings. For example: .. code:: gdb.xyz_channels = ('Easting', 'Northing') gdb.xyz_channels = ('Easting', 'Northing', 'Elevation') .. versionadded:: 9.2 """ sr = gxapi.str_ref() self.gxdb.get_xyz_chan(0, sr) x = sr.value self.gxdb.get_xyz_chan(1, sr) y = sr.value self.gxdb.get_xyz_chan(2, sr) z = sr.value if not self.is_channel(x): x = None if not self.is_channel(y): y = None if not self.is_channel(z): z = None return x, y, z @xyz_channels.setter def xyz_channels(self, xyz): if len(xyz) >= 3: x, y, z = xyz self.is_channel(z, True) else: x, y = xyz z = None self.is_channel(x, True) self.is_channel(y, True) self.gxdb.set_xyz_chan(0, x) self.gxdb.set_xyz_chan(1, y) if z: self.gxdb.set_xyz_chan(2, z) self.clear_extent() def _init_xmlmetadata(self): if not self._xmlmetadata: self._xmlmetadata = gxu.geosoft_metadata(self._file_name) self._xmlmetadata_root = tuple(self._xmlmetadata.items())[0][0] @property def metadata(self): """ Return the database XML metadata as a dictionary. Can be set, in which case the dictionary items passed will be added to, or replace existing XML metadata. .. versionadded:: 9.2 """ self._init_xmlmetadata() return self._xmlmetadata[self._xmlmetadata_root] @metadata.setter def metadata(self, meta): self._init_xmlmetadata() self._xmlmetadata[self._xmlmetadata_root] = gxu.merge_dict(self._xmlmetadata[self._xmlmetadata_root], meta) self._xmlmetadata_changed = True def get_gx_metadata(self): """ Return the database Geosoft metadata as a Geosoft `geosoft.gxpy.metadata.Metadata` instance. The internal database metadata is used to store various database properties that are not intended to be part of the exposed dataset metadata exposed by the :attr:metadata property. If you wish to add your own metadata to the internal properties you can use the `geosoft.gxpy.metadata` module to add metadata and save it to the database using `geosoft.gxapi.GXDB.set_meta`. .. versionadded:: 9.3 """ gxm = gxapi.GXMETA.create() self.gxdb.get_meta(gxm) return gxmeta.Metadata(gxm) def update_gxmeta(self, new_meta): """ Update the database Geosoft metadata as a Geosoft `geosoft.gxpy.metadata.Metadata` instance. :param meta: the new metadata as a `geosoft.gxpy.Metadata` instance or a nested dict. .. versionadded:: 9.3.1 """ current_meta = self.get_gx_metadata() if isinstance(new_meta, gxmeta.Metadata): new_meta = new_meta.meta_dict() current_meta.update_dict(new_meta) self.gxdb.set_meta(current_meta.gxmeta) @property def file_name(self): """Database file name.""" return os.path.abspath(self._file_name) @property def coordinate_system(self): """ Coordinate system of the current `xyz_channels`. Can be set from any `geosoft.gxpy.coordinate_system.Coordinate_system` constructor. .. versionchanged:: 9.3 added setter """ try: x, y, z = self.xyz_channels ipj = gxapi.GXIPJ.create() self.gxdb.get_ipj(self.channel_name_symb(x)[1], ipj) return gxcs.Coordinate_system(ipj) except GdbException: return gxcs.Coordinate_system() @coordinate_system.setter def coordinate_system(self, cs): if not isinstance(cs, gxcs.Coordinate_system): cs = gxcs.Coordinate_system(cs) x, y, z = self.xyz_channels self.gxdb.set_ipj(self.channel_name_symb(x)[1], self.channel_name_symb(y)[1], cs.gxipj) x, _, z = self.xyz_channels if z: z = Channel(self, z) if not z.unit_of_measure: z.unit_of_measure = Channel(self, x).unit_of_measure @property def max_blobs(self): """maximum blobs allowed""" return self._db.get_info(gxapi.DB_INFO_BLOBS_MAX) @property def max_lines(self): """maximum number of lines allowed""" return self._db.get_info(gxapi.DB_INFO_LINES_MAX) @property def max_channels(self): """maximum number of channels allowed""" return self._db.get_info(gxapi.DB_INFO_CHANS_MAX) @property def used_blobs(self): """number of blobs used""" return self._db.get_info(gxapi.DB_INFO_BLOBS_USED) @property def used_lines(self): """number of lines used""" return self._db.get_info(gxapi.DB_INFO_LINES_USED) @property def used_channels(self): """number of channels used""" return self._db.get_info(gxapi.DB_INFO_CHANS_USED) @property def max_compressed_channel_bytes(self): """maximum compressed data per channel per line in bytes""" ps = self._db.get_info(gxapi.DB_INFO_PAGE_SIZE) return ps * 65534 @property def number_of_blocks(self): """number of blocks""" return self._db.get_info(gxapi.DB_INFO_DATA_SIZE) @property def lost_blocks(self): """lost blocks that might be freed""" return self._db.get_info(gxapi.DB_INFO_LOST_SIZE) @property def free_blocks(self): """number of free blocks""" return self._db.get_info(gxapi.DB_INFO_FREE_SIZE) @property def compression(self): """database compression setting""" return self._db.get_info(gxapi.DB_INFO_COMP_LEVEL) @property def pages_for_blobs(self): """pages consumed by blobs""" return self._db.get_info(gxapi.DB_INFO_BLOB_SIZE) @property def db_size_kb(self): """database size in kb""" return self._db.get_info(gxapi.DB_INFO_FILE_SIZE) @property def index_size_kb(self): """index size in kb""" return self._db.get_info(gxapi.DB_INFO_INDEX_SIZE) @property def max_block_size_bytes(self): """maximum block size in bytes""" return self._db.get_info(gxapi.DB_INFO_MAX_BLOCK_SIZE) @property def data_has_changed(self): """`True` if data has changed""" return self._db.get_info(gxapi.DB_INFO_CHANGESLOST) def is_line(self, line, raise_err=False): """ Returns `True` if the named line exists in the database. :param line: line name :param raise_err: True to raise an error if it does not exist .. versionadded:: 9.1 """ exist = self._db.find_symb(str(line), gxapi.DB_SYMB_LINE) != gxapi.NULLSYMB if raise_err and not exist: raise GdbException(_t('"{}" is not a line in the database'.format(line))) return exist def is_channel(self, chan, raise_err=False): """ Returns `True` if the channel name exists in the database. :param chan: channel name :param raise_err: True to raise an error if it does not exist .. versionadded:: 9.1 """ exist = self._db.find_chan(chan) != gxapi.NULLSYMB if raise_err and not exist: raise GdbException(_t('"{}" is not a channel in the database'.format(chan))) return exist @property def extent(self): """ Return the spatial extent of all selected data in the database as a `geosoft.gxpy.geometry.Point2`. :returns: `geosoft.gxpy.geometry.Point2` of minimum, maximum, or None if no spatial information. .. versionadded:: 9.2 """ def expand(_min, _max, _data): if np.isnan(_data).all(): return _min, _max mdata = np.nanmin(_data) if _min is None: _min = mdata _max = np.nanmax(_data) return _min, _max if mdata < _min: _min = mdata return _min, _max mdata = np.nanmax(_data) if mdata > _max: _max = mdata return _min, _max lines = self.lines() if len(lines): xyz = self.xyz_channels if str(xyz) == self._extent['xyz']: return self._extent['extent'] xmin = xmax = ymin = ymax = zmin = zmax = None if None in xyz: if None in xyz[0:2]: return None xyz = xyz[0:2] for l in lines: data = self.read_line(l, channels=xyz)[0] xmin, xmax = expand(xmin, xmax, data[:, 0]) ymin, ymax = expand(ymin, ymax, data[:, 1]) if data.shape[1] > 2: zmin, zmax = expand(zmin, zmax, data[:, 2]) ext = gxgeo.Point2((xmin, ymin, zmin, xmax, ymax, zmax), coordinate_system=self.coordinate_system) self._extent['xyz'] = str(xyz) self._extent['extent'] = ext return ext return None def _get(self, s, fn): self.lock_read_(s) try: v = fn(s) finally: self.unlock_(s) return v def lock_set_(self, s, fn, v): self.lock_write_(s) try: fn(s, v) finally: self.unlock_(s) def line_name_symb(self, line, create=False): """ Return line name, symbol :param line: line name, or symbol number :param create: `True` to create a line if one does not exist :returns: line name, symbol :raises: GdbException if line not found or cannot be created .. versionadded:: 9.1 """ if isinstance(line, Line): return line.name, line.symbol elif isinstance(line, str): if self.exist_symb_(line, gxapi.DB_SYMB_LINE): symb = self._db.find_symb(line, gxapi.DB_SYMB_LINE) return line, symb if create: return line, self.new_line(line) else: raise GdbException(_t('Line \'{}\' not found'.format(line))) else: sr = gxapi.str_ref() self._db.get_symb_name(line, sr) return sr.value, line def channel_name_symb(self, chan): """ Return channel name, symbol :param chan: channel name, or symbol number or Channel instance :returns: line name, symbol, returns ('',-1) if invalid :raises: GdbException if channel does not exist .. versionadded:: 9.1 """ if isinstance(chan, Channel): return chan.name, chan.symbol if isinstance(chan, str): symb = self._db.find_symb(chan, gxapi.DB_SYMB_CHAN) if symb == -1: raise GdbException(_t('Channel \'{}\' not found'.format(chan))) return chan, symb if not self.exist_symb_(chan, gxapi.DB_SYMB_CHAN): raise GdbException(_t('Channel symbol \'{}\' not found'.format(chan))) sr = gxapi.str_ref() self._db.get_symb_name(chan, sr) return sr.value, chan def channel_width(self, channel): """ Channel array width, 1 for normal channels, >1 for VA channels. :param channel: channel symbol or name :returns: array dimension, 1 for non-array channels .. versionadded:: 9.1 """ return self._get(self.channel_name_symb(channel)[1], self._db.get_col_va) def list_channels(self, chan=None): """ Return a dict of channels in the database. :param chan: channel filter, default CHAN_ALL: =============== ============================ CHAN_ALL all channels, normal and VA CHAN_NORMAL normal channels only CHAN_ARRAY VA channels only =============== ============================ :returns: dictionary {channel_names: channel_symbols} .. versionadded:: 9.1 """ def clean_chan_dict(): """ returns list without any temporary VA sliced channels """ self._db.chan_lst(self._lst) _dct = gxu.dict_from_lst(self._lst) cdct = {} for ck in _dct: if '[' in ck: continue cdct[ck] = _dct.get(ck) return cdct if chan == CHAN_ALL: dct = clean_chan_dict() else: self._db.array_lst(self._lst) va = gxu.dict_from_lst(self._lst) if chan == CHAN_ARRAY: dct = va else: # filter VA channels out of the list allc = clean_chan_dict() va = list(va) dct = {} for k in allc: if not(k in va): dct[k] = allc.get(k) # convert symbol strings to ints for k in dct: dct[k] = int(dct.get(k)) return dct def lines(self, select=True): """ .. deprecated:: 9.2 use list_lines() """ return self.list_lines(select) def list_lines(self, select=True): """ List of lines in the database, returned as a {name: symbol} dictionary :param select: `True` to return selected lines, `False` to return all lines :returns: dictionary (line name: symbol) .. versionadded:: 9.1 """ if select: self._db.selected_line_lst(self._lst) else: self._db.line_lst(self._lst) dct = gxu.dict_from_lst(self._lst) for k in dct: dct[k] = int(dct.get(k)) return dct def line_details(self, line): """ Return dictionary of line details :param line: channel name or symbol :returns: dictionary: =========== ============================================================== Key Meaning =========== ============================================================== name line name symbol line symbol type line type, one of gxapi.DB_LINE_TYPE category one of SYMB_LINE date date of the line number numeric line number flight flight number version line version number groupclass class name for grouped lines, None if not a grouped line =========== ============================================================== .. versionadded:: 9.1 """ def get_detail(fn): try: sr = gxapi.str_ref() fn(ls, sr) return sr.value except geosoft.gxapi.GXAPIError: return '' ln, ls = self.line_name_symb(line) detail = {} self.lock_read_(ls) try: detail['name'] = ln detail['symbol'] = ls detail['category'] = self._db.line_category(ls) detail['date'] = self._db.line_date(ls) detail['flight'] = self._db.line_flight(ls) detail['number'] = self._db.line_number(ls) detail['version'] = self._db.line_version(ls) detail['type'] = self._db.line_type(ls) if self._db.line_category(ls) == gxapi.DB_CATEGORY_LINE_GROUP: detail['groupclass'] = get_detail(self._db.get_group_class) else: detail['groupclass'] = None finally: self.unlock_(ls) return detail def channel_details(self, channel): """ Return dictionary of channel details :param channel: channel name or symbol :returns: dictionary: ======= ============================================================== Key Meaning ======= ============================================================== name channel name symbol channel symbol class class name format format, one of gxapi.DB_CHAN_FORMAT constants width display width in characters decimal decimal places to display unit measurement unit label channel label, which can be different from the channel name protect protection: 0 can be modified; 1 protected from modification columns number data columns, 1 for normal channels, n for VA channels type data type, one of gxapi.DB_CATEGORY_CHAN constants ======= ============================================================== .. versionadded:: 9.1 """ def get_detail(fn): sr = gxapi.str_ref() fn(cs, sr) return sr.value cn, cs = self.channel_name_symb(channel) detail = {} self.lock_read_(cs) try: detail['name'] = cn detail['symbol'] = cs detail['class'] = get_detail(self._db.get_chan_class) detail['format'] = self._db.get_chan_format(cs) detail['width'] = self._db.get_chan_width(cs) detail['decimal'] = self._db.get_chan_decimal(cs) detail['unit'] = get_detail(self._db.get_chan_unit) detail['label'] = get_detail(self._db.get_chan_label) detail['protect'] = self._db.get_chan_protect(cs) detail['array'] = self.channel_width(cs) detail['type'] = self._db.get_chan_type(cs) finally: self.unlock_(cs) return detail def set_channel_details(self, channel, detail): """ Set/change channel details from dictionary :param channel: channel name or symbol :param detail: dictionary, see chan_details .. versionadded:: 9.1 """ def set_detail(what, fn): det = detail.get(what) if det is not None: fn(cs, det) cs = self.channel_name_symb(channel)[1] self.lock_write_(cs) try: set_detail('class', self._db.set_chan_class) set_detail('format', self._db.set_chan_format) set_detail('width', self._db.set_chan_width) set_detail('decimal', self._db.set_chan_decimal) set_detail('unit', self._db.set_chan_unit) set_detail('label', self._db.set_chan_label) protect = detail.get('protect') if protect is not None: self._db.set_chan_protect(cs, protect) finally: self.unlock_(cs) def channel_dtype(self, channel): """ Returns channel numpy dtype :param channel: channel name or symbol :returns: numpy dtype .. versionadded:: 9.1 """ return gxu.dtype_gx(self._db.get_chan_type(self.channel_name_symb(channel)[1])) def channel_fid(self, line, channel): """ Return the fiducial of a line, channel :param line: line name, symbol or Line :param channel: channel name, symbol or channel :returns: (start,increment) """ ls = self.line_name_symb(line)[1] cs = self.channel_name_symb(channel)[1] self.lock_read_(cs) try: fid_start = self._db.get_fid_start(ls, cs) fid_incr = self._db.get_fid_incr(ls, cs) finally: self.unlock_(cs) return fid_start, fid_incr # ======================================================================================== # management def new_channel(self, name, dtype=np.float64, array=1, dup=None, details=None): """ Return a channel symbol, create if it does not exist. :param name: channel name :param dtype: numpy dtype (ie. np.int64) :param array: array columns (default is 1) :param dup: duplicate properties of this channel (name, symbol, channel) :param details: dictionary containing channel details, see channel_details() :returns: channel symbol Examples: .. code:: symb = gdb.newChan('X') symb = gdb.newChan('X', dtype=np.float64, details={'decimal':4}) .. versionadded:: 9.1 .. versionchanged:: 9.3 added support for duplication an existing channel via dup= """ symb = self._db.find_symb(name, gxapi.DB_SYMB_CHAN) if array < 1: array = 1 if symb == gxapi.NULLSYMB: if dup: symb = self._db.dup_symb_no_lock(self.channel_name_symb(dup)[1], name) else: symb = self._db.create_symb_ex(name, gxapi.DB_SYMB_CHAN, gxapi.DB_OWN_SHARED, gxu.gx_dtype(dtype), array) if details: self.set_channel_details(symb, details) elif not dup: self.set_channel_details(symb, {'width': 12, 'decimal': 2}) return symb def new_line(self, line, linetype=None, group=None, dup=None): """ Create a new line symbol. If line exists an error is raised. :param line: line name :param linetype: line type for creating a new line, ignored if group defines ================= ========================================= SYMB_LINE_NORMAL normal lines, name is a string SYMB_LINE_FLIGHT flight lines, first letter is line type ================= ========================================= :param group: group name for a grouped class :param dup: duplicate from an existing line (name, symbol of Line) :returns: line symbol .. seealso:: function `create_line_name` to create a valid line name. .. versionadded:: 9.1 """ if group is None and dup is None and not is_valid_line_name(line): raise GdbException(_t('Invalid line name \'{}\'. Use create_line_name() to create a valid name.'. format(line))) symb = self._db.find_symb(line, gxapi.DB_SYMB_LINE) if symb != gxapi.NULLSYMB: raise GdbException(('Cannot create existing line \'{}\''.format(line))) if dup: dup_symb = self.line_name_symb(dup)[1] symb = self._db.dup_line_symb(dup_symb, line) else: if group: linetype = SYMB_LINE_GROUP elif not linetype: linetype = SYMB_LINE_NORMAL symb = self._db.create_symb_ex(line, gxapi.DB_SYMB_LINE, gxapi.DB_OWN_SHARED, linetype, 0) if group: Line(self, symb).group = group self.clear_extent() return symb def clear_extent(self): """ Clear the extent cache. .. versionadded:: 9.3.1 """ self._extent['xyz'] = None def delete_channel(self, channels): """ Delete channel(s) by name or symbol. :param channels: channel name or symbol, or a list of channel names or symbols .. versionadded:: 9.1 """ if isinstance(channels, str) or isinstance(channels, int): channels = [channels] protected_channels = [] for s in channels: try: c = Channel(self, s) if c.protect: protected_channels.append(c.name) else: c.delete() except GdbException: continue if len(protected_channels): raise GdbException(_t('Cannot delete protected channels: {}'.format(protected_channels))) def delete_line(self, lines): """ Delete line(s) by name or symbol. :param lines: line name/symbol, or a list of names/symbols .. versionadded:: 9.1 """ if isinstance(lines, str) or isinstance(lines, int): lines = [lines] for s in lines: if type(s) is str and not self.exist_symb_(s, gxapi.DB_SYMB_LINE): continue ls = self.line_name_symb(s)[1] if type(s) is str else s self.unlock_(ls) self.lock_write_(ls) self._db.delete_symb(ls) def delete_line_data(self, lines): """ Delete all data in line(s) by name or symbol but keep the line. :param lines: line name/symbol, or a list of names/symbols .. versionadded:: 9.6 """ if isinstance(lines, str) or isinstance(lines, int): lines = [lines] for s in lines: ls = self.line_name_symb(s)[1] if type(s) is str else s self._delete_line_data(ls) def _delete_line_data(self, ls): channels = self.sorted_chan_list() for ch in channels: cn, cs = self.channel_name_symb(ch) dtype = self.channel_dtype(cs) w = self.channel_width(cs) if w == 1: vv = gxvv.GXvv(dtype=dtype) self.write_channel_vv(ls, cs, vv) else: va = gxva.GXva(width=w, dtype=dtype) self.write_channel_va(ls, cs, va) def select_lines(self, selection='', select=True): """ Change selected state of a line, or group of lines :param selection: string representing selection, comma-delimit multiple selections, or provide a list of selections. :param select: `True` to select, `False` to deselect "L99:800" will select all lines of type "L" in range 99 through 800. | Use a "T" prefix for Tie lines. | Use an "F" prefix to specify lines of a specific flight. | For example, "F10" would select all lines of flight 10. | Use an empty string ("") to select/deselect ALL lines. Invalid line names are ignored. .. versionadded:: 9.1 """ if isinstance(selection, str): selection = selection.split(',') for s in selection: if select: self._db.select(s, gxapi.DB_LINE_SELECT_INCLUDE) else: self._db.select(s, gxapi.DB_LINE_SELECT_EXCLUDE) self.clear_extent() # ===================================================================================== # reading and writing def _to_string_chan_list(self, channels): if isinstance(channels, str): if ',' in channels: channels = [c.strip() for c in channels.split(',')] else: channels = [channels] elif isinstance(channels, int): channels = [channels] return [self.channel_name_symb(c)[0] if isinstance(channels, int) else c for c in channels] def sorted_chan_list(self, channels=None): """ Get a list of sorted channels from Gdb, placing x, y and z channels (if defined) at front of list. :param channels: list of channels, strings or symbol number. If None, read all channels :returns: list containing channel names .. versionadded:: 9.6 """ if channels is not None: ch = self._to_string_chan_list(channels) else: ch = list(self.list_channels()) ch.sort(key=str.lower) ch_lower = [c.lower() for c in ch] channels = [] nxlower = nylower = nzlower = '' # put x,y,z at the front xch = self._db.get_xyz_chan_symb(gxapi.DB_CHAN_X) if xch != -1: nx, _ = self.channel_name_symb(xch) nxlower = nx.lower() if nxlower in ch_lower: channels.append(nx) ych = self._db.get_xyz_chan_symb(gxapi.DB_CHAN_Y) if ych != -1: ny, _ = self.channel_name_symb(ych) nylower = ny.lower() if nylower in ch_lower: channels.append(ny) zch = self._db.get_xyz_chan_symb(gxapi.DB_CHAN_Z) if zch != -1: nz, _ = self.channel_name_symb(zch) nzlower = nz.lower() if nzlower in ch_lower: channels.append(nz) for c in ch: clower = c.lower() if (clower == nxlower) or (clower == nylower) or (clower == nzlower): continue channels.append(c) return channels def _expand_chan_list(self, channels): """ expand VA channels and return lists of names, symbols and types""" ch_names = [] ch_symbs = [] c_type = [] for c in channels: cn, cs = self.channel_name_symb(c) w = self.channel_width(cs) if w == 1: ch_names.append(cn) ch_symbs.append(cs) c_type.append(self._db.get_chan_type(cs)) else: for i in range(w): ccn, ccs = self.channel_name_symb("{}[{}]".format(cn, i)) ch_names.append(ccn) ch_symbs.append(ccs) c_type.append(self._db.get_chan_type(cs)) return ch_names, ch_symbs, c_type def lock_read_(self, s): """internal function to lock a symbol for read""" try: self._db.lock_symb(s, SYMBOL_LOCK_READ, gxapi.DB_WAIT_INFINITY) except GdbException: raise GdbException(_t('Cannot read lock symbol {}'.format(s))) def lock_write_(self, s): """internal function to lock a symbol for write""" try: self._db.lock_symb(s, SYMBOL_LOCK_WRITE, gxapi.DB_WAIT_INFINITY) except GdbException: raise GdbException(_t('Cannot write lock symbol {}'.format(s))) def unlock_(self, s): """internal_function to unlock a symbol""" if self._db.get_symb_lock(s) != SYMBOL_LOCK_NONE: self._db.un_lock_symb(s) def unlock_all(self): """ Unlock all locked symbols. .. versionadded:: 9.3 """ self._db.un_lock_all_symb() def read_channel_vv(self, line, channel, dtype=None): """ Read data from a single channel, return in a vv. :param line: line name or symbol :param channel: channel name or symbol :param dtype: type wanted, default same as the channel data :returns: vv .. versionadded:: 9.2 """ ln, ls = self.line_name_symb(line, create=True) cn, cs = self.channel_name_symb(channel) if self.channel_width(cs) != 1: raise GdbException(_t("Cannot read a VA channel into a VV.")) if dtype is None: dtype = self.channel_dtype(cs) vv = gxvv.GXvv(dtype=dtype) self.lock_read_(cs) try: self._db.get_chan_vv(ls, cs, vv.gxvv) finally: self.unlock_(cs) vv.unit_of_measure = Channel(self, cs).unit_of_measure return vv def read_channel_va(self, line, channel, dtype=None): """ Read VA data from a single channel, return in a va. :param line: line name or symbol :param channel: channel name or symbol :param dtype: type wanted, default same as the channel data :returns: va .. versionadded:: 9.2 """ ln, ls = self.line_name_symb(line, create=True) cn, cs = self.channel_name_symb(channel) if dtype is None: dtype = self.channel_dtype(cs) w = self.channel_width(cs) va = gxva.GXva(width=w, dtype=dtype) self.lock_read_(cs) try: self._db.get_chan_va(ls, cs, va.gxva) finally: self.unlock_(cs) va.unit_of_measure = Channel(self, cs).unit_of_measure return va def read_channel(self, line, channel, dtype=None): """ Read data from a single channel. :param line: line name or symbol :param channel: channel name or symbol :param dtype: type wanted, default same as the channel data :returns: numpy data, fid (start, increment) For dtype=np.float, dummy values will be np.nan. For integer types dummy values will be the Geosoft dummy values. .. versionadded:: 9.1 """ if self.channel_width(channel) == 1: vv = self.read_channel_vv(line, channel, dtype) return vv.get_data(vv.dtype)[0], vv.fid else: va = self.read_channel_va(line, channel, dtype) return va.get_data(va.dtype)[0], va.fid def read_line_vv(self, line, channels=None, dtype=None, fid=None, common_fid=False, chan_dtypes=False): """ Read a line of data into VVs stored in a dictionary by channel. :param line: line to read, string or symbol number :param channels: list of channels, strings or symbol number. If None, read all channels :param dtype: numpy data type for the array, default np.float64 for multi-channel data (unless chan_dtypes is `True`), data type for single channel data. Use "<Unnn" for string type. :param common_fid: `True` to resample all channels to a common fiducial :param chan_dtypes: `True` to determine dtype for each vv from channel type, default `False` :returns: list of tuples [(channel_name, vv), ...] If a requested channel is a VA, it is with channel names 'name[0]', 'name[1]', etc. Examples: .. code:: # npd - returned numpy array shape (n, number of channels) # ch - list of returned channels names, array channels expanded to array[0], array[1], ... # fid - tuple (fidStart,fidIncrement), channels resampled as necessary data = gdb.read_line_vv('L100') # read all channels in line "L100" data = gdb.read_line_vv(681) # read all channels in line symbol 681 data = gdb.read_line_vv('L100','X') # read channel 'X' from line 'L100' data = gdb.read_line_vv('L100',2135) # read channel symbol 2135 from 'L100" data = gdb.read_line_vv('L100',channels=['X','Y','Z']) # read a list of channels to (n,3) array data = gdb.read_line_vv('L100','X',np.int32) # read channel 'X' into integer array .. versionadded:: 9.2 """ ln, ls = self.line_name_symb(line) if channels is None: channels = self.sorted_chan_list() else: channels = self._to_string_chan_list(channels) # make up channel list, expanding VA channels ch_names, ch_symb, c_type = self._expand_chan_list(channels) if chan_dtypes: dtype = None elif dtype is None: dtype = np.float64 # read the data into vv chvv = [] for c in ch_names: cs = self._db.find_symb(c, gxapi.DB_SYMB_CHAN) vv = self.read_channel_vv(ls, cs, dtype=dtype) chvv.append((c, vv)) # resample? if common_fid: # determine fiducial range from data start = gxapi.GS_R8MX incr = gxapi.GS_R8MX fend = gxapi.GS_R8MN for vv in chvv: if vv[1].length > 0: fd = vv[1].fid if fd[0] != gxapi.rDUMMY: if fd[0] < start: start = fd[0] if fd[1] < incr: incr = fd[1] dend = start + incr * (vv[1].length - 1) if dend > fend: fend = dend if fid is None: if start == gxapi.GS_R8MX: fid = (0.0, 1.0) else: fid = (start, incr) if start == gxapi.GS_R8MX: nvd = 0 else: nvd = math.ceil(max((fend - fid[0] - sys.float_info.epsilon), 0) / fid[1]) + 1 for vv in chvv: vv[1].refid(fid, nvd) return chvv def scan_line_fid(self, line, channels=None): """ Scan channels in a line and return the smallest common fid, line length, data width, list of channels :param line: line to read, string or symbol number :param channels: list of channels, strings or symbol number. If empty, read all channels :returns: (fid_start, fid_increment, fid_last, data_width, channel_list) .. versionadded:: 9.4 """ if channels is None: channels = self.sorted_chan_list() else: channels = self._to_string_chan_list(channels) if len(channels) == 0: return 0, 1., 0, 0, [] ln, ls = self.line_name_symb(line) cs = self.channel_name_symb(channels[0])[1] fid_start, fid_increment = self.channel_fid(ls, cs) self.lock_read_(cs) nrows = self.gxdb.get_channel_length(ls, cs) self.unlock_(cs) if nrows == 0: fid_last = fid_start else: fid_last = fid_start + fid_increment * (nrows - 1) n_width = self.channel_width(cs) for c in channels[1:]: cs = self.channel_name_symb(c)[1] n_width += self.channel_width(cs) c_start, c_increment = self.channel_fid(ls, cs) if c_start != gxapi.rDUMMY: self.lock_read_(cs) c_last = c_start + c_increment * (self.gxdb.get_channel_length(ls, cs) - 1) self.unlock_(cs) if fid_start == gxapi.rDUMMY or c_start < fid_start: fid_start = c_start if fid_increment == gxapi.rDUMMY or c_increment < fid_increment: fid_increment = c_increment if c_last > fid_last: fid_last = c_last if fid_start == gxapi.rDUMMY or fid_increment == gxapi.rDUMMY: return 0., 1., 0., 0, channels return fid_start, fid_increment, fid_last, n_width, channels def readLine(self, *args, **kwargs): """ .. deprecated:: 9.2 use read_line() """ return self.read_line(*args, **kwargs) @classmethod def _num_rows_from_fid(cls, src_fid_start, src_fid_last, fid): return int((src_fid_last - fid[0])/fid[1] + 1.5) def read_line(self, line, channels=None, dtype=None, fid=None, dummy=None): """ Read a line of data into a numpy array. :param line: line to read, string or symbol number :param channels: list of channels, strings or symbol number. If empty, read all channels :param dtype: numpy data type for the array, default np.float64 for multi-channel data, data type for single channel data. Use "<Unnn" for string type. :param fid: required fiducial as tuple (start,incr), default smallest in data :param dummy: dummy_handling for multi-channel read, default leaves dummies in place.: ======================== =================================================== READ_REMOVE_DUMMYROWS remove rows with dummies, fiducials lose meaning READ_REMOVE_DUMMYCOLUMNS remove columns with dummies ======================== =================================================== :returns: 2D numpy array shape(records,channels), list of channel names, (fidStart,fidIncr) :raises: GdbException if first channel requested is empty VA channels are expanded by element with channel names name[0], name[1], etc. This method is intended for relatively simple databases in relatively simple applications. If your database has a lot of channels, or wide array channels it will be more efficient to read and work with just the channels you need. See `read_channel`, `read_channel_vv` and `read_channel_va`. Examples: .. code:: # npd - returned numpy array shape (n, number of channels) # ch - list of returned channels names, array channels expanded to array[0], array[1], ... # fid - tuple (fidStart,fidIncrement), channels resampled as necessary npd,ch,fid = gdb.read_line('L100') # read all channels in line "L100" npd,ch,fid = gdb.read_line(681) # read all channels in line symbol 681 npd,ch,fid = gdb.read_line('L100','X') # read channel 'X' from line 'L100' npd,ch,fid = gdb.read_line('L100',2135) # read channel symbol 2135 from 'L100" npd,ch,fid = gdb.read_line('L100',channels=['X','Y','Z']) # read a list of channels to (n,3) array npd,ch,fid = gdb.read_line('L100','X',np.int32) # read channel 'X' into integer array .. versionadded:: 9.1 """ ls = self.line_name_symb(line)[1] fid_start, fid_incr, fid_last, ncols, channels = self.scan_line_fid(line, channels) if fid is None: fid = (fid_start, fid_incr) nrows = self._num_rows_from_fid(fid_start, fid_last, fid) if nrows == 0 or ncols == 0: if len(channels) == 0: data = np.array([], dtype=dtype) else: data = np.array([], dtype=dtype).reshape((-1, len(channels))) return data, channels, fid # read to a numpy array npd = np.empty((nrows, ncols), dtype=dtype) if npd.dtype == np.float32 or npd.dtype == np.float64: dummy_value = np.nan else: dummy_value = gxu.gx_dummy(npd.dtype) all_empty = True ch_names = [] icol = 0 for ch in channels: cn, cs = self.channel_name_symb(ch) w = self.channel_width(cs) if w == 1: vv = self.read_channel_vv(ls, cs, dtype=npd.dtype) if vv.length > 0: all_empty = False vv.refid(fid, nrows) npd[:, icol] = vv.np icol += 1 ch_names.append(cn) else: va = self.read_channel_va(ls, cs, dtype=npd.dtype) if va.length > 0: all_empty = False va.refid(fid, nrows) npd[:, icol:icol+w] = va.np icol += w for i in range(w): ch_names.append('{}[{}]'.format(cn, str(i))) nch = len(ch_names) if all_empty: npd = np.empty((0, ncols), dtype=dtype) elif dummy: # dummy handling if dummy == READ_REMOVE_DUMMYCOLUMNS: n_ok = 0 # shift data and channel names to remove columns containing a dummy for i in range(nch): if np.isnan(dummy_value): if np.isnan(npd[:, i]).any(): continue elif dummy_value in npd[:, i]: continue if n_ok != i: npd[:, n_ok] = npd[:, i] ch_names[n_ok] = ch_names[i] n_ok += 1 if n_ok != nch: npd = npd[:, 0:n_ok] ch_names = ch_names[0:n_ok] elif dummy == READ_REMOVE_DUMMYROWS: if np.isnan(dummy_value): mask = np.apply_along_axis(lambda a: not (np.isnan(a).any()), 1, npd) else: mask = np.apply_along_axis(lambda a: not (dummy_value in a), 1, npd) npd = npd[mask, :] fid = (0.0, 1.0) else: raise GdbException(_t('Unrecognized dummy={}').format(dummy)) return npd, ch_names, fid def read_line_dataframe(self, line, channels=None, fid=None): """ Read a line of data into a Pandas DataFrame :param line: line to read, string or symbol number :param channels: list of channels, strings or symbol number. If empty, read all channels :param fid: required fiducial as tuple (start,incr), default smallest in data :returns: Pandas DataFrame, list of channel names, (fidStart,fidIncr) :raises: GdbException if first channel requested is empty VA channels are expanded by element with channel names name[0], name[1], etc. This method can be used to conveniently get a table structure of all data corresponding to the native types of the channels. It is however not necessarily the most efficient way to get at the data. If your database has a lot of channels, or wide array channels it will be more efficient to read and work with just the channels you need. See `read_channel`, `read_channel_vv` and `read_channel_va`. This method also does not currently support dummy removal in the same way as `read_line`. Examples: .. code:: # df - Pandas DataFrame # ch - list of returned channels names # fid - tuple (fidStart,fidIncrement), channels resampled as necessary df,ch,fid = gdb.read_line('L100') # read all channels in line "L100" df,ch,fid = gdb.read_line(681) # read all channels in line symbol 681 df,ch,fid = gdb.read_line('L100','X') # read channel 'X' from line 'L100' df,ch,fid = gdb.read_line('L100',2135) # read channel symbol 2135 from 'L100" df,ch,fid = gdb.read_line('L100',channels=['X','Y','Z']) # read a list of channels to (n,3) array .. versionadded:: 9.5 """ df = pd.DataFrame() ls = self.line_name_symb(line)[1] fid_start, fid_incr, fid_last, ncols, channels = self.scan_line_fid(line, channels) ch_names = [] if fid is None: fid = (fid_start, fid_incr) nrows = self._num_rows_from_fid(fid_start, fid_last, fid) if nrows == 0 or ncols == 0: for ch in channels: cn, cs = self.channel_name_symb(ch) w = self.channel_width(cs) if w == 1: df[cn] = () ch_names.append(cn) else: for i in range(w): va_cn = '{}[{}]'.format(cn, str(i)) df[va_cn] = () ch_names.append(va_cn) return df, ch_names, fid icol = 0 all_empty = True for ch in channels: cn, cs = self.channel_name_symb(ch) w = self.channel_width(cs) if w == 1: vv = self.read_channel_vv(ls, cs) if vv.length > 0: all_empty = False vv.refid(fid, nrows) df[cn] = vv.np icol += 1 ch_names.append(cn) else: va = self.read_channel_va(ls, cs) if va.length > 0: all_empty = False va.refid(fid, nrows) icol += w for i in range(w): va_cn = '{}[{}]'.format(cn, str(i)) df[va_cn] = va.np[:, i] ch_names.append(va_cn) if all_empty: # Delete one and only row df = df.drop([0]) return df, ch_names, fid def write_channel_vv(self, line, channel, vv): """ Write data to a single channel. :param line: line name or symbol :param channel: channel name or symbol :param vv: vv data to write .. versionadded:: 9.2 """ ln, ls = self.line_name_symb(line, create=True) try: cn, cs = self.channel_name_symb(channel) except GdbException: if type(channel) is str: cs = self.new_channel(channel, vv.dtype) cn = channel else: raise if cn in self.xyz_channels: self.clear_extent() self.lock_write_(cs) try: self._db.put_chan_vv(ls, cs, vv.gxvv) finally: self.unlock_(cs) if vv.unit_of_measure: Channel(self, cs).unit_of_measure = vv.unit_of_measure def write_channel_va(self, line, channel, va): """ Write VA data to a single channel. :param line: line name or symbol :param channel: channel name or symbol :param va: va data to write .. versionadded:: 9.2 """ ln, ls = self.line_name_symb(line, create=True) try: cn, cs = self.channel_name_symb(channel) except GdbException: if type(channel) is str: cs = self.new_channel(channel, va.dtype, array=va.width) else: raise self.lock_write_(cs) try: self._db.put_chan_va(ls, cs, va.gxva) finally: self.unlock_(cs) if va.unit_of_measure: Channel(self, cs).unit_of_measure = va.unit_of_measure def writeDataChan(self, *args, **kwargs): """ .. deprecated:: 9.2 use `write_channel` """ self.write_channel(*args, **kwargs) def write_channel(self, line, channel, data, fid=(0.0, 1.0), unit_of_measure=None): """ Write data to a single channel. :param line: line name or symbol :param channel: channel name or symbol :param data: numpy array (2D for VA channel), or a list :param fid: tuple (fid start, increment), default (0.0,1.0) :param unit_of_measure: data unit of measurement .. versionchanged:: 9.3 support for setting channel from a list added unit_of_measure .. versionadded:: 9.1 """ ln, ls = self.line_name_symb(line, create=True) if not isinstance(data, np.ndarray): data = np.array(data) if isinstance(channel, str): cn = channel cs = self.new_channel(channel, data.dtype, array=_va_width(data)) else: cn, cs = self.channel_name_symb(channel) if cn in self.xyz_channels: self.clear_extent() if _va_width(data) == 0: # no data to write return w = self.channel_width(cs) if w != _va_width(data): raise GdbException( _t("Array data width {} does not fit into channel '{}' with width {}"). format(_va_width(data), cn, w)) # 1D channel if w == 1: # get a VV of the data vv = gxvv.GXvv(data, fid=fid) self.lock_write_(cs) try: self._db.put_chan_vv(ls, cs, vv.gxvv) finally: self.unlock_(cs) else: # get a VA of the data va = gxva.GXva(data, fid=fid) self.lock_write_(cs) try: self._db.put_chan_va(ls, cs, va.gxva) finally: self.unlock_(cs) if unit_of_measure: Channel(self, cs).unit_of_measure = unit_of_measure def write_line_vv(self, line, chan_data): """ Write data to multiple channels in a line. If no channel list is provided it assumes that the data is for all channels from the line, the compliment of read_line(). :param line: line to write to, name or symbol :param chan_data: numpy array shape (records,channels). If single dimension, one channel. Channels are created if they do not exist. VA channels must exist. :param chan_data: list of tuples [(channel_name, vv), ] .. note:: chan_data may contain VA data, which is defined by slice (ie. name[0], name[4]...). If VA data is included the VA channels must already exist. .. versionadded:: 9.2 """ for chvv in chan_data: ch = chvv[0] vv = chvv[1] self.write_channel_vv(line, ch, vv) def write_line(self, line, data, channels=None, fid=(0.0, 1.0)): """ Write data to a multiple channels in a line. If no channel list is provided it assumes that the data is for all channels from the line, the compliment of read_line(). :param line: line to write to, name or symbol :param data: numpy array shape (records,channels). If single dimension, one channel :param channels: channel name or symbol list, or a single name/symbol. If a single name is specified for multi-column data, a VA channel is assumed. If None, a sorted list of all channels is assumed. :param fid: option fid tuple (start, increment), default (0.0,1.0) .. versionadded:: 9.1 """ if type(channels) is str: self.write_channel(line, channels, data, fid=fid) else: if channels is None: channels = self.sorted_chan_list() else: channels = self._to_string_chan_list(channels) if not isinstance(data, np.ndarray): data = np.array(data) if data.ndim == 1: data = data.reshape((-1, 1)) # ensure data matches channels np_data = 0 for chan in channels: try: ch, cs = self.channel_name_symb(chan) w = self.channel_width(cs) except GdbException: w = 1 np_data += w # channel - data mismatch if data.shape[1] != np_data: raise GdbException(_t('Data dimension ({}) does not match data required by channels ({}).'). format(data.shape, channels)) # all good, write the data np_index = 0 for chan in channels: try: ch, cs = self.channel_name_symb(chan) w = self.channel_width(cs) except GdbException: w = 1 cs = chan self.write_channel(line, cs, data[:, np_index: np_index + w], fid=fid) np_index += w def list_values(self, chan, umax=1000, selected=True, dupl=50, progress=None, stop=None): """ Build a list of unique values in a channel. Uniqueness depends on the current display format for the field. :param chan: channel to scan :param umax: maximum values allowed, once this maximum is reached scanning stops, default 1000 :param selected: `True` to scan only selected lines :param dupl: Stop growing list after this many lines fail to grow the list, 0 scans all lines :param progress: progress reporting function :param stop: stop check function :returns: list of values, represented as a string .. versionadded:: 9.1 """ lines = list(self.list_lines(select=selected)) cn, cs = self.channel_name_symb(chan) details = self.channel_details(cs) dtype = np.dtype('<U{}'.format(details.get('width'))) lines.sort(key=str.lower) vset = [] n = 0 nset = -1 ndup = 0 for l in lines: try: d, c, f = self.read_line(l, cs, dtype=dtype) except GdbException: continue if d.shape[0] == 0: continue d = np.unique(d) vset = np.append(vset, d) vset = np.unique(vset) if vset.shape[0] > umax: break if dupl > 0: if vset.shape[0] == nset: ndup += 1 if ndup > dupl: break else: ndup = 0 nset = vset.shape[0] n += 1 if progress: progress('Scanning unique values in "{}", {}'.format(cn, str(l)), (n * 100.0) / len(lines)) if stop: if stop(): return vset.tolist() if vset.shape[0] > umax: vset = vset[:umax] return vset.tolist() def figure_map(self, file_name=None, overwrite=False, title=None, draw=DRAW_AS_POINTS, features=None, **kwargs): """ Create a figure map file from selected lines in the database. :param file_name: the name of the map, if None a temporary default map is created. :param overwrite: `True` to overwrite map file should it exist :param title: Title added to the image :param draw: `DRAW_AS_POINTS` to draw a dot at each point (default). Long lines are decimated. `DRAW_AS_LINES` to draw lines with a line label at each end. :param features: list of features to place on the map, default is ('SCALE', 'NEATLINE') =========== =========================================== 'ALL' include all features. This is the default. 'SCALE' show a scale bar 'NEATLINE' draw a neat-line around the image 'ANNOT_XY' annotate map coordinates 'ANNOT_LL' annotate map Latitude, Longitude =========== =========================================== :param kwargs: passed to `geosoft.gxpy.map.Map.new` .. versionadded:: 9.3 """ # uppercase features, use a dict so we pop things we use and report error if features is None: features = ['ALL'] if isinstance(features, str): features = (features,) feature_list = {} if features is not None: for f in features: feature_list[f.upper()] = None features = list(feature_list.keys()) # setup margins if not ('margins' in kwargs): bottom_margin = 1.0 if title: bottom_margin += len(title.split('\n')) * 1.0 if 'ALL' in feature_list or 'SCALE' in feature_list: bottom_margin += 1.2 kwargs['margins'] = (1, 1, bottom_margin, 1) kwargs['coordinate_system'] = self.coordinate_system # work out some non-zero extents ex = self.extent_xyz if ex[0] is None or ex[1] is None or ex[3] is None or ex[4] is None: raise GdbException(_t('Invalid data extent: {}').format(ex)) mnx, mny, mxx, mxy = (ex[0], ex[1], ex[3], ex[4]) dx = mxx - mnx dy = mxy - mny if dx == 0 and dy == 0: ex = (mnx - 50., mny - 50., mxx + 50., mxy + 50.) else: if dx < dy * 0.1: d = dy * 0.05 mnx -= d mxx += d elif dy < dx * 0.1: d = dx * 0.05 mny -= d mxy += d ex = (mnx, mny, mxx, mxy) if 'inside_margin' not in kwargs: kwargs['inside_margin'] = 1 gmap = gxmap.Map.figure(ex, file_name=file_name, overwrite=overwrite, features=features, title=title, **kwargs) x, y, _ = self.xyz_channels with gxview.View.open(gmap, "data") as v: with gxgroup.Draw(v, 'lines') as g: for line in self.list_lines(): xvv = self.read_channel_vv(line, x) yvv = self.read_channel_vv(line, y) if draw == DRAW_AS_LINES: g.polyline(gxgeo.PPoint((xvv, yvv)), pen=gxgroup.Pen(line_thick=0.03 * v.units_per_map_cm)) else: g.polypoint(gxgeo.PPoint((xvv, yvv)), pen=gxgroup.Pen(line_thick=0.03 * v.units_per_map_cm)) return gmap class Channel: """ Class to work with database channels. Use constructor `Channel.new` to create a new channel. Use instance properties to work with channel properties. :param gdb: database instance :param name: channel name string, must exist - see `new()` to create a new channel .. versionadded:: 9.3 """ def _get(self, fn): self.gdb.lock_read_(self._symb) try: return fn(self._symb) finally: self.gdb.unlock_(self._symb) def _get_str(self, fn): self.gdb.lock_read_(self._symb) try: sr = gxapi.str_ref() fn(self._symb, sr) return sr.value finally: self.gdb.unlock_(self._symb) def lock_set_(self, fn, v): self.gdb.lock_write_(self._symb) try: fn(self._symb, v) finally: self.gdb.unlock_(self._symb) def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): return self.name def __init__(self, gdb, name): self.gdb = gdb name, self._symb = gdb.channel_name_symb(name) @classmethod def new(cls, gdb, name, dtype=np.float64, array=1, dup=None, details=None, replace=False, unit_of_measure=None): """ Create a new channel. :param gdb: Geosoft_gdb instance :param name: channel name :param dtype: numpy data type, defaule np.float64 :param array: array size, default 1 :param dup: duplicate properties of this channal (name, symbol or Channel) :param details: dictionary of other channel properties - see `Geosoft_gdb.set_channel_details` :param replace: `True` to replace existing channel. Existing channel information and data is lost. default is `False`. :param unit_of_measure: unit of measurement of the data :return: Channel instance """ if gdb.exist_symb_(name, gxapi.DB_SYMB_CHAN): if replace: gdb.delete_channel(name) else: raise GdbException(_t("Cannot replace existing channel '{}'".format(name))) symb = gdb.new_channel(name, dtype, array=array, dup=dup) if details: gdb.set_channel_details(symb, details) chan = cls(gdb, name) if unit_of_measure: chan.unit_of_measure = unit_of_measure return chan @property def name(self): """ Channel name. .. versionadded:: 9.3 """ return self._get_str(self.gdb.gxdb.get_chan_name) @name.setter def name(self, name): name = str(name) if name != self.name: if not self.gdb.gxdb.is_chan_name(name): raise GdbException(_t('Invalid channel name \'{}\''.format(name))) if self.gdb.exist_symb_(name, gxapi.DB_SYMB_CHAN): raise GdbException(_t('Cannot rename to an existing channel name \'{}\''.format(name))) self.lock_set_(self.gdb.gxdb.set_chan_name, name) @property def symbol(self): """ Channel symbol .. versionadded:: 9.3 """ return self._symb @property def array(self): """ Array channel width, 1 for non-array channels .. versionadded:: 9.3 """ return self.gdb.channel_width(self._symb) @property def is_array(self): """ `True` if this is an array channel .. versionadded:: 9.3 """ return bool(self.array > 1) @property def decimal(self): """ Number of displayed decimal places, can be set. .. versionadded:: 9.3 """ return self.gdb.gxdb.get_chan_decimal(self._symb) @decimal.setter def decimal(self, value): self.lock_set_(self.gdb.gxdb.set_chan_decimal, value) @property def format(self): """ Channel display format: ============= ======================================== FORMAT_NORMAL normal decimal or integer format FORMAT_EXP exponential FORMAT_TIME geosoft time (HH:MM:SS.ssss) FORMAT_DATE date (YYYY/MM/DD) FORMAT_GEOGR geographic (deg.mm.ss.ssss) FORMAT_SIGDIG decimals is number of significant digits FORMAT_HEX hexadecimal ============= ======================================== .. versionadded:: 9.3 """ return self.gdb.gxdb.get_chan_format(self._symb) @format.setter def format(self, value): self.lock_set_(self.gdb.gxdb.set_chan_format, value) @property def label(self): """ Channel label used in display graphics, normally the same as the channel name. Can be set. .. versionadded:: 9.3 """ sr = gxapi.str_ref() self.gdb.gxdb.get_chan_label(self._symb, sr) return sr.value @label.setter def label(self, value): self.lock_set_(self.gdb.gxdb.set_chan_label, value) @property def type(self): """ Geosoft data type. .. versionadded:: 9.3 """ return self.gdb.gxdb.get_chan_type(self._symb) @property def unit_of_measure(self): """ Unit of measure, can be set. .. versionadded:: 9.3 """ sr = gxapi.str_ref() self.gdb.gxdb.get_chan_unit(self._symb, sr) return sr.value @unit_of_measure.setter def unit_of_measure(self, value): self.lock_set_(self.gdb.gxdb.set_chan_unit, value) @property def width(self): """ Display window width in characters. Can be set. .. versionadded:: 9.3 """ return self.gdb.gxdb.get_chan_width(self._symb) @width.setter def width(self, value): self.lock_set_(self.gdb.gxdb.set_chan_width, value) @property def class_(self): """ Class name to which this channel is associated. Can be set. .. versionadded:: 9.3 """ sr = gxapi.str_ref() self.gdb.gxdb.get_chan_class(self._symb, sr) return sr.value @class_.setter def class_(self, value): self.lock_set_(self.gdb.gxdb.set_chan_class, value) @property def protect(self): """ `True` if this channel is protected from modification. Can be set. .. versionadded:: 9.3 """ return bool(self.gdb.gxdb.get_chan_protect(self._symb)) @protect.setter def protect(self, value): if value: value = 1 else: value = 0 self.lock_set_(self.gdb.gxdb.set_chan_protect, value) @property def locked(self): """ True if symbol is locked. Use property :any:`lock` to determine if read or write lock, or to set the lock. Setting to `False` unlocks the symbol. .. versionadded:: 9.3 """ return self.lock != SYMBOL_LOCK_NONE @locked.setter def locked(self, value): if not value: self.gdb.unlock_(self._symb) else: raise GdbException(_t('Use property \'lock\' to set SYMBOL_READ or SYMBOL_WRITE lock.')) @property def lock(self): """ Lock setting: | -1 unlocked (SYMBOL_LOCK_NONE) | 0 read-locked (SYMBOL_LOCK_READ) | 1 write-locked (SYMBOL_LOCK_WRITE) Can be set. .. versionadded 9.3 """ return self.gdb.gxdb.get_symb_lock(self.symbol) @lock.setter def lock(self, value): if self.lock != value: self.gdb.unlock_(self.symbol) self.gdb.gxdb.lock_symb(self.symbol, value, gxapi.DB_WAIT_INFINITY) def delete(self): """ Delete the channel and all associated data. After calling this method this channel instance is no longer valid. .. versionadded:: 9.3 """ if self.protect: raise GdbException(_t("Cannot delete protected channel '{}'".format(self.name))) self.lock = SYMBOL_LOCK_WRITE self.gdb.gxdb.delete_symb(self._symb) self._symb = gxapi.NULLSYMB class Line: """ Class to work with database lines. Use constructor `Line.new` to create a new line. Use instance properties to work with line properties. :param gdb: `Geosoft_gdb` instance :param name: line name string, must exist - see `new()` to create a new line .. versionadded:: 9.3 """ def _get(self, fn): self.gdb.lock_read_(self._symb) try: return fn(self._symb) finally: self.gdb.unlock_(self._symb) def _get_str(self, fn): self.gdb.lock_read_(self._symb) try: sr = gxapi.str_ref() fn(self._symb, sr) return sr.value finally: self.gdb.unlock_(self._symb) def lock_set_(self, fn, v): """write_lock, set and release a gdb attribute that requires locking to write.""" self.gdb.lock_write_(self._symb) try: fn(self._symb, v) finally: self.gdb.unlock_(self._symb) def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): return self.name def __init__(self, gdb, name): self.gdb = gdb name, self._symb = gdb.line_name_symb(name) @classmethod def new(cls, gdb, name, linetype=None, group=None, dup=None, replace=False): """ Create a new line. :param gdb: `Geosoft_gdb` instance :param name: line name :param linetype: line type for creating a new line, ignored if group defines ================= ========================================= SYMB_LINE_NORMAL normal lines, name is a string SYMB_LINE_FLIGHT flight lines, first letter is line type ================= ========================================= :param group: group name for a grouped class :param dup: duplicate properties of this line (name, symbol or Line). :param replace: `True` to replace line if it exists. Default is `False` . :returns: Line instance .. versionadded:: 9.3 """ if group is None and dup is None and not is_valid_line_name(name): raise GdbException(_t('Invalid line name: {}'.format(name))) if gdb.exist_symb_(name, gxapi.DB_SYMB_LINE): if replace: gdb.delete_line(name) else: raise GdbException(_t("Cannot replace existing line '{}'".format(name))) gdb.new_line(name, linetype, group=group, dup=dup) return cls(gdb, name) @property def name(self): """ Line name, consistent with names constructed by `create_line_name`. To change a line name change the type, number or version. .. versionadded:: 9.3 """ return self._get_str(self.gdb.gxdb.get_symb_name) @property def symbol(self): """ Line symbol .. versionadded:: 9.3 """ return self._symb @property def type(self): """ Line type, which can be set: | LINE_TYPE_NORMAL | LINE_TYPE_BASE | LINE_TYPE_TIE | LINE_TYPE_TEST | LINE_TYPE_TREND | LINE_TYPE_SPECIAL | LINE_TYPE_RANDOM .. versionadded:: 9.3 """ return self._get(self.gdb.gxdb.line_type) @type.setter def type(self, value): self.lock_set_(self.gdb.gxdb.set_line_type, value) @property def category(self): """ Line category, which can be set: | LINE_CATAGORY_FLIGHT | LINE_CATEGORY_GROUP | LINE_CATEGORY_NORMAL .. versionadded:: 9.3 """ return self._get(self.gdb.gxdb.line_category) @property def date(self): """ Line date. Can be set. .. versionadded:: 9.3 """ return self._get(self.gdb.gxdb.line_date) @date.setter def date(self, value): self.lock_set_(self.gdb.gxdb.set_line_date, value) @property def flight(self): """ Line flight number (flight/cruise/survey event). Can be set. .. versionadded:: 9.3 """ return self._get(self.gdb.gxdb.line_flight) @flight.setter def flight(self, value): self.lock_set_(self.gdb.gxdb.set_line_flight, value) @property def number(self): """ Line number. Can be set .. versionadded:: 9.3 """ return self._get(self.gdb.gxdb.line_number) @number.setter def number(self, value): self.lock_set_(self.gdb.gxdb.set_line_num, int(value)) @property def version(self): """ Line version number. Can be set. .. versionadded:: 9.3 """ return self._get(self.gdb.gxdb.line_version) @version.setter def version(self, value): self.lock_set_(self.gdb.gxdb.set_line_ver, value) @property def grouped(self): """ True if this is a grouped line. .. versionadded:: 9.3 """ return self.category == LINE_CATEGORY_GROUP @property def group(self): """ The lines group class name, '' for a group lines (LINE_CATEGORY_GROUP). Only works for lines that are part of a group. Can be set. .. versionadded:: 9.3 """ if self.category == LINE_CATEGORY_GROUP: return self._get_str(self.gdb.gxdb.get_group_class) else: return None @group.setter def group(self, value): if self.category == LINE_CATEGORY_GROUP: self.lock_set_(self.gdb.gxdb.set_group_class, value) else: raise GdbException(_t('Line \'{}\' is not a grouped line.'.format(self.name))) @property def selected(self): """True if this line is selected, can be set.""" return self.gdb.gxdb.get_line_selection(self._symb) == gxapi.DB_LINE_SELECT_INCLUDE @selected.setter def selected(self, value): if bool(value): self.gdb.gxdb.set_line_selection(self._symb, gxapi.DB_LINE_SELECT_INCLUDE) else: self.gdb.gxdb.set_line_selection(self._symb, gxapi.DB_LINE_SELECT_EXCLUDE) @property def locked(self): """ True if symbol is locked. Use property :any:`lock` to determine if read or write lock, or to set the lock. Setting to `False` unlocks the symbol. .. versionadded:: 9.3 """ return self.lock != SYMBOL_LOCK_NONE @locked.setter def locked(self, value): if not value: self.gdb.unlock_(self._symb) else: raise GdbException(_t('Use property \'lock\' to set SYMBOL_READ or SYMBOL_WRITE lock.')) @property def lock(self): """ Lock setting: | -1 unlocked (SYMBOL_LOCK_NONE) | 0 read-locked (SYMBOL_LOCK_READ) | 1 write-locked (SYMBOL_LOCK_WRITE) Can be set. .. versionadded 9.3 """ return self.gdb.gxdb.get_symb_lock(self.symbol) @lock.setter def lock(self, value): if self.lock != value: self.gdb.unlock_(self.symbol) self.gdb.gxdb.lock_symb(self.symbol, value, gxapi.DB_WAIT_INFINITY) def delete(self): """ Delete the line and all data associated with the line. After calling this method this line instance is no longer valid. .. versionadded:: 9.3 """ self.gdb.delete_line(self.symbol) self._symb = gxapi.NULLSYMB def delete_data(self): """ Delete all data in a line but keep the line .. versionadded:: 9.6 """ self.gdb.delete_line_data(self.symbol) # ================================= # methods that work with line data def bearing(self): """ Return bearing of a line based on location of the first and last point in the line. Returns None if the line is empty or first and last points are the same. .. versionadded:: 9.3 """ x, y, z = self.gdb.xyz_channels x = self.gdb.channel_name_symb(x)[1] y = self.gdb.channel_name_symb(y)[1] self.gdb.lock_read_(x) self.gdb.lock_read_(y) try: bearing = gxapi.GXDU.direction(self.gdb.gxdb, self._symb, x, y) finally: self.gdb.unlock_(y) self.gdb.unlock_(x) self.lock_set_(self.gdb.gxdb.set_line_bearing, bearing) if bearing == gxapi.rDUMMY: return None return bearing <file_sep>/geosoft/gxapi/GXDU.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXDU(gxapi_cy.WrapDU): """ GXDU class. `GXDU <geosoft.gxapi.GXDU>` functions provide a variety of common utilities that can be applied efficiently to the contents of a database. Most `GXDU <geosoft.gxapi.GXDU>` library functions take as their first argument a `GXDB <geosoft.gxapi.GXDB>` object, and apply standard processes to data stored in an OASIS database, including import and export functions. **Note:** The following defines are used by GX functions but are not required for any methods: :ref:`DU_LINES` """ def __init__(self, handle=0): super(GXDU, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXDU <geosoft.gxapi.GXDU>` :returns: A null `GXDU <geosoft.gxapi.GXDU>` :rtype: GXDU """ return GXDU() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def table_look1(cls, db, line, i_ch, o_ch, ref_field, l_field, mode, close, tb): """ Create a new channel using a single reference table :param db: Database :param line: Line Handle :param i_ch: Lookup reference channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Output Channel Token [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param ref_field: Reference field name in table :param l_field: Lookup output name in table :param mode: :ref:`DU_LOOKUP` :param close: CLOSE lookup distance. If 0.0, distance is calculated from lookup reference channel. :param tb: `GXTB <geosoft.gxapi.GXTB>` table Object :type db: GXDB :type line: int :type i_ch: int :type o_ch: int :type ref_field: str :type l_field: str :type mode: int :type close: float :type tb: GXTB .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Fails if table does not contain requested fields. The nominal data sample spacing for the CLOSE options is calculated by finding the fiducial increment the - primary index channel for Lookup1C_DU; - secondary index channel for Lookup2C_DU, LookupIValC_DU and LookupRValC_DU """ gxapi_cy.WrapDU._table_look1(GXContext._get_tls_geo(), db, line, i_ch, o_ch, ref_field.encode(), l_field.encode(), mode, close, tb) @classmethod def table_look2(cls, db, line, r1_ch, r2_ch, o_ch, r1_field, r2_field, l_field, mode, close, tb): """ Create a new channel using a double reference table. :param db: Database :param line: Line Handle :param r1_ch: Primary reference channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param r2_ch: Secondary reference channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Output channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param r1_field: Primary reference field name in table :param r2_field: Secondary reference field name in table :param l_field: Lookup result field name in table :param mode: :ref:`DU_LOOKUP` :param close: CLOSE lookup distance. If 0.0, distance is calculated from secondary reference channel. :param tb: Table Object :type db: GXDB :type line: int :type r1_ch: int :type r2_ch: int :type o_ch: int :type r1_field: str :type r2_field: str :type l_field: str :type mode: int :type close: float :type tb: GXTB .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Fails if table does not contain requested fields. The nominal data sample spacing for the CLOSE options is calculated by finding the fiducial increment the - primary index channel for Lookup1C_DU; - secondary index channel for Lookup2C_DU, LookupIValC_DU and LookupRValC_DU """ gxapi_cy.WrapDU._table_look2(GXContext._get_tls_geo(), db, line, r1_ch, r2_ch, o_ch, r1_field.encode(), r2_field.encode(), l_field.encode(), mode, close, tb) @classmethod def table_look_i2(cls, db, line, val, i_ch, o_ch, r1, r2, field, mode, dist, tb): """ Create a new channel using constant integer primary reference and a secondary reference table. :param db: Database :param line: Line Handle :param val: Lookup primary reference value :param i_ch: Lookup secondary reference channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Output Channel Token [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param r1: Primary reference field name in table :param r2: Secondary reference field name in table :param field: Lookup result field name in table :param mode: :ref:`DU_LOOKUP` :param dist: CLOSE lookup distance. If 0.0, distance calculated from secondary reference channel. :param tb: Table Object :type db: GXDB :type line: int :type val: int :type i_ch: int :type o_ch: int :type r1: str :type r2: str :type field: str :type mode: int :type dist: float :type tb: GXTB .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Fails if table does not contain requested fields. The nominal data sample spacing for the CLOSE options is calculated by finding the fiducial increment the - primary index channel for Lookup1C_DU; - secondary index channel for Lookup2C_DU, LookupIValC_DU and LookupRValC_DU """ gxapi_cy.WrapDU._table_look_i2(GXContext._get_tls_geo(), db, line, val, i_ch, o_ch, r1.encode(), r2.encode(), field.encode(), mode, dist, tb) @classmethod def table_look_r2(cls, db, line, val, i_ch, o_ch, r1, r2, field, mode, dist, tb): """ Create a new channel using a constant real primary reference and a secondary reference table. :param db: Database :param line: Line Handle :param val: Primary reference value :param i_ch: Secondary reference value [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Output Channel Token [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param r1: Primary reference field name in table :param r2: Secondary reference field name in table :param field: Lookup result field name in table :param mode: :ref:`DU_LOOKUP` :param dist: CLOSE lookup distance. If 0.0, distance calculated from secondary reference channel. :param tb: Table Object :type db: GXDB :type line: int :type val: float :type i_ch: int :type o_ch: int :type r1: str :type r2: str :type field: str :type mode: int :type dist: float :type tb: GXTB .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Fails if table does not contain requested fields. The nominal data sample spacing for the CLOSE options is calculated by finding the fiducial increment the - primary index channel for Lookup1C_DU; - secondary index channel for Lookup2C_DU, LookupIValC_DU and LookupRValC_DU """ gxapi_cy.WrapDU._table_look_r2(GXContext._get_tls_geo(), db, line, val, i_ch, o_ch, r1.encode(), r2.encode(), field.encode(), mode, dist, tb) @classmethod def ado_table_names(cls, connect, vv): """ Scans a ADO-compliant database and returns the table names in a `GXVV <geosoft.gxapi.GXVV>` :param connect: Database connection string :param vv: `GXVV <geosoft.gxapi.GXVV>` to return names in :type connect: str :type vv: GXVV .. versionadded:: 5.0.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The `GXVV <geosoft.gxapi.GXVV>` must be created to hold strings of length `STR_DB_SYMBOL <geosoft.gxapi.STR_DB_SYMBOL>`; i.e. use Creat_VV(-`STR_DB_SYMBOL <geosoft.gxapi.STR_DB_SYMBOL>`, 0), or it will assert. """ gxapi_cy.WrapDU._ado_table_names(GXContext._get_tls_geo(), connect.encode(), vv) @classmethod def an_sig(cls, db, line, i_ch, o_ch): """ Calculate the Analytic Signal of a channel. :param db: Database :param line: Line handle :param i_ch: Input channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Output Analytic Signal channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type db: GXDB :type line: int :type i_ch: int :type o_ch: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._an_sig(GXContext._get_tls_geo(), db, line, i_ch, o_ch) @classmethod def append(cls, d_bi, d_bo, ignore): """ Append a source database onto a destination database. :param d_bi: Source Database :param d_bo: Destination Database :param ignore: Ignore write protection on channels? (TRUE or FALSE) :type d_bi: GXDB :type d_bo: GXDB :type ignore: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If the source database and destination database have channels with the same name, then data is appended onto the end of the channel in lines which have the same number. If a channel in the destination database is not also in the source database, it is ignored. """ gxapi_cy.WrapDU._append(GXContext._get_tls_geo(), d_bi, d_bo, ignore) @classmethod def avg_azimuth(cls, db, precision, azimuth): """ Returns average azimuth of selected lines. :param db: Database Object :param precision: Precision in degrees (1 to 45) :param azimuth: Azimuth value returned :type db: GXDB :type precision: float :type azimuth: float_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Direction in degrees azimuth (clockwise relative the +Y direction). The result is in the range -90 < azimuth <= 90. The method handles lines going in opposite directions (they do not average to 0!) The method takes a precision, which is used to generate a series of "test" angles. The dot product of the line directions is taken with each of the test angles, and the absolute values summed. The maximum value occurs at the angle which most closely approximates the trend direction of the lines. """ azimuth.value = gxapi_cy.WrapDU._avg_azimuth(GXContext._get_tls_geo(), db, precision, azimuth.value) @classmethod def avg_azimuth2(cls, db, xCh, yCh, precision, azimuth): """ Returns average azimuth of selected lines. :param db: Database Object :param xCh: X channel name :param yCh: Y channel name :param precision: Precision in degrees (1 to 45) :param azimuth: Azimuth value returned :type db: GXDB :type xCh: str :type yCh: str :type precision: float :type azimuth: float_ref .. versionadded:: 2023.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Same as AvgAzimuth, but input the X and Y channels to use. """ azimuth.value = gxapi_cy.WrapDU._avg_azimuth2(GXContext._get_tls_geo(), db, xCh.encode(), yCh.encode(), precision, azimuth.value) @classmethod def average_spacing(cls, db, line, xCh, yCh): """ Returns the average spacing along a line. This is a simple average of the individual point separations after dummies are removed. Returns DUMMY if there are fewer than two valid locations. :param db: Database :param line: Line handle :param xCh: X channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param yCh: Y channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :type db: GXDB :type line: int :type xCh: int :type yCh: int :rtype: float .. versionadded:: 2022.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapDU._average_spacing(GXContext._get_tls_geo(), db, line, xCh, yCh) return ret_val @classmethod def base_data(cls, db, line, in_ch, time_ch, out_ch, tb): """ This method corrects an entire database line using a time-based correction table. It is given 2 input channel tokens and 1 output channel token as well as the table object to use. :param db: Database :param line: Line Handle to apply correction to :param in_ch: Input Channel Token [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param time_ch: Time Channel Token [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param out_ch: Output Channel Token [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param tb: Table Object (a Date/Time/Correction Table) :type db: GXDB :type line: int :type in_ch: int :type time_ch: int :type out_ch: int :type tb: GXTB .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._base_data(GXContext._get_tls_geo(), db, line, in_ch, time_ch, out_ch, tb) @classmethod def base_data_ex(cls, db, line, in_ch, time_ch, out_ch, tb, flag): """ This method corrects an entire database line using a time-based correction table. It is given 2 input channel tokens and 1 output channel token as well as the table object to use (table sort flag=1 for sort, =0 for no sort). :param db: Database :param line: Line Handle to apply correction to :param in_ch: Input Channel Token [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param time_ch: Time Channel Token [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param out_ch: Output Channel Token [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param tb: Table Object (a Date/Time/Correction Table) :param flag: Table sort flag: 0 - do not sort, 1 - do sort. :type db: GXDB :type line: int :type in_ch: int :type time_ch: int :type out_ch: int :type tb: GXTB :type flag: int .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._base_data_ex(GXContext._get_tls_geo(), db, line, in_ch, time_ch, out_ch, tb, flag) @classmethod def bound_line(cls, db, line, x_chan, y_chan, pply): """ Set map boundary clip limits. :param db: Database :param line: Line Handle [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param x_chan: X Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_chan: Y Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param pply: Polygon Object to use :type db: GXDB :type line: int :type x_chan: int :type y_chan: int :type pply: GXPLY .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._bound_line(GXContext._get_tls_geo(), db, line, x_chan, y_chan, pply) @classmethod def bp_filt(cls, db, line, i_ch, o_ch, sw, lw, filt_len): """ This method applies a band-pass filter to the specified line/channel and places the output in the output channel. :param db: Database :param line: Line handle :param i_ch: Input channel to filter [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Output filtered channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param sw: Short wavelength cutoff, 0 for highpass :param lw: Long wavelength cutoff, 0 for lowpass :param filt_len: Filter Length, 0 for default length :type db: GXDB :type line: int :type i_ch: int :type o_ch: int :type sw: float :type lw: float :type filt_len: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If the short and long wavelengths are <= 0, the input channel is simply copied to the output channel without filtering. """ gxapi_cy.WrapDU._bp_filt(GXContext._get_tls_geo(), db, line, i_ch, o_ch, sw, lw, filt_len) @classmethod def break_line(cls, db, line, chan): """ Break up a line based on line numbers in a channel. :param db: Database :param line: Line to be broken up [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param chan: Channel containing line numbers [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :type db: GXDB :type line: int :type chan: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._break_line(GXContext._get_tls_geo(), db, line, chan) @classmethod def break_line2(cls, db, line, chan, reset_fi_ds): """ Break up a line based on line numbers in a channel. :param db: Database :param line: Line to be broken up [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param chan: Channel containing line numbers [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param reset_fi_ds: Reset starting fiducials to zero (0: No, 1: Yes) :type db: GXDB :type line: int :type chan: int :type reset_fi_ds: int .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The same as BreakLine, but with an option to reset each line's starting fiducial to zero. """ gxapi_cy.WrapDU._break_line2(GXContext._get_tls_geo(), db, line, chan, reset_fi_ds) @classmethod def break_line_to_groups(cls, db, line, chan, cl): """ Break up a line into group-lines based on a channel. :param db: Database :param line: Line to be broken up [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param chan: Channel containing line numbers [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param cl: Class name for new group lines (can be "") :type db: GXDB :type line: int :type chan: int :type cl: str .. versionadded:: 5.1.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The original line will be deleted. This is similar to `break_line <geosoft.gxapi.GXDU.break_line>`, but the output lines are "group" lines, without the line type letters at the start. (See db.gxh for information of Group Lines). All channels are associated with each group line, and the input class name is assigned to each group. Class names for groups ensure that (for instance) if you add a new channel to one group of a given class, it will get added to all other groups in the same class. If the class name is left empty, then this will NOT be true. (Groups without class names are treated as isolated entities for the purposes of channel loading). """ gxapi_cy.WrapDU._break_line_to_groups(GXContext._get_tls_geo(), db, line, chan, cl.encode()) @classmethod def break_line_to_groups2(cls, db, line, chan, cl, reset_fi_ds): """ Break up a line into group-lines based on a channel. :param db: Database :param line: Line to be broken up [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param chan: Channel containing line numbers [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param cl: Class name for new group lines (can be "") :param reset_fi_ds: Reset starting fiducials to zero (0: No, 1: Yes) :type db: GXDB :type line: int :type chan: int :type cl: str :type reset_fi_ds: int .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The same as BreakLineToGroups, but with an option to reset each line's starting fiducial to zero. """ gxapi_cy.WrapDU._break_line_to_groups2(GXContext._get_tls_geo(), db, line, chan, cl.encode(), reset_fi_ds) @classmethod def b_spline(cls, db, line, i_ch, o_ch, sd, rou, tau): """ B-spline Interpolate a Channel. :param db: Database :param line: Line handle :param i_ch: Channel to interpolate [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Output interpolated channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param sd: Data error (Std Dev > 0.0) :param rou: Roughness (Rou > 0.0) :param tau: Tension (0.<= Tension <=1.) :type db: GXDB :type line: int :type i_ch: int :type o_ch: int :type sd: float :type rou: float :type tau: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ .. seealso:: `trend <geosoft.gxapi.GXDU.trend>` """ gxapi_cy.WrapDU._b_spline(GXContext._get_tls_geo(), db, line, i_ch, o_ch, sd, rou, tau) @classmethod def closest_point(cls, db, x, y, xp, yp, line, fid): """ Return closest data point to input location. :param x: X location :param y: Y location :param xp: Located X location :param yp: Located Y location :param line: Line for located point :param fid: Fiducial of located point :type db: GXDB :type x: float :type y: float :type xp: float_ref :type yp: float_ref :type line: int_ref :type fid: float_ref .. versionadded:: 6.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Selected lines are scanned for the (X, Y) location which is closest to the input location. The line and fiducial of the point are returned. Will register an error if no valid (X, Y) locations are found. """ xp.value, yp.value, line.value, fid.value = gxapi_cy.WrapDU._closest_point(GXContext._get_tls_geo(), db, x, y, xp.value, yp.value, line.value, fid.value) @classmethod def copy_line(cls, db, i_line, o_line): """ Copy a line. :param db: Database :param i_line: Input Line [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_line: Output Line [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type db: GXDB :type i_line: int :type o_line: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Existing channels in the output line will be replaced by copied channels. """ gxapi_cy.WrapDU._copy_line(GXContext._get_tls_geo(), db, i_line, o_line) @classmethod def copy_line_across(cls, idb, i_line, odb, o_line): """ Copy a line from one database to another. :param idb: Input Database :param i_line: Input Line [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param odb: Output Database :param o_line: Output Line [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type idb: GXDB :type i_line: int :type odb: GXDB :type o_line: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Existing channels in the output line will be replaced by copied channels. .. seealso:: `copy_line_chan_across <geosoft.gxapi.GXDU.copy_line_chan_across>` function """ gxapi_cy.WrapDU._copy_line_across(GXContext._get_tls_geo(), idb, i_line, odb, o_line) @classmethod def copy_line_chan_across(cls, idb, i_line, vv_chan, odb, o_line): """ Copy a list of channels in a line from one database to another. :param idb: Input Database :param i_line: Input Line [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param vv_chan: `GXVV <geosoft.gxapi.GXVV>` containing a list of channel symbols, must be of INT :param odb: Output Database :param o_line: Output Line [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type idb: GXDB :type i_line: int :type vv_chan: GXVV :type odb: GXDB :type o_line: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Existing channels in the output line will be replaced by copied channels. .. seealso:: `copy_line_across <geosoft.gxapi.GXDU.copy_line_across>` function """ gxapi_cy.WrapDU._copy_line_chan_across(GXContext._get_tls_geo(), idb, i_line, vv_chan, odb, o_line) @classmethod def copy_line_masked(cls, db, i_line, mask, prune, o_line): """ Copy a line, prune items based on a mask channel :param db: Database Object :param i_line: Input Line Symbol [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param mask: Mask Channel Symbol [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param prune: :ref:`VVU_PRUNE` :param o_line: Output Line Symbol [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type db: GXDB :type i_line: int :type mask: int :type prune: int :type o_line: int .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The input line's channel data is ReFidded to the mask channel, and then pruned from the output line data, based on the value of the VVU_PRUNE_XXX variable. For `VVU_PRUNE_DUMMY <geosoft.gxapi.VVU_PRUNE_DUMMY>`, only those items where the mask channel value is not a dummy are retained, while the complement is retained for VV_PRUNE_VALID. """ gxapi_cy.WrapDU._copy_line_masked(GXContext._get_tls_geo(), db, i_line, mask, prune, o_line) @classmethod def dao_table_names(cls, file, type, vv): """ Scans a DAO-compliant database and returns the table names in a `GXVV <geosoft.gxapi.GXVV>` :param file: Database file name :param type: Database Type :param vv: `GXVV <geosoft.gxapi.GXVV>` to return names in :type file: str :type type: str :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The `GXVV <geosoft.gxapi.GXVV>` must be created to hold strings of length `STR_DB_SYMBOL <geosoft.gxapi.STR_DB_SYMBOL>`; i.e. use Creat_VV(-`STR_DB_SYMBOL <geosoft.gxapi.STR_DB_SYMBOL>`, 0), or it will assert. """ gxapi_cy.WrapDU._dao_table_names(GXContext._get_tls_geo(), file.encode(), type.encode(), vv) @classmethod def decimate(cls, db, line, i_ch, o_ch, n): """ Copy and decimate a channel :param db: Database :param line: Line handle :param i_ch: Origin Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Destination Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param n: Decimation factor :type db: GXDB :type line: int :type i_ch: int :type o_ch: int :type n: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._decimate(GXContext._get_tls_geo(), db, line, i_ch, o_ch, n) @classmethod def diff(cls, db, line, i_ch, o_ch, n): """ Calculate differences within a channel. :param db: Database :param line: Line handle :param i_ch: Origin Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Destination Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param n: Number of differences :type db: GXDB :type line: int :type i_ch: int :type o_ch: int :type n: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Differences with dummies result in dummies. An even number of differences locates data accurately. An odd number of differences locates result 1/2 element lower in the `GXVV <geosoft.gxapi.GXVV>`. """ gxapi_cy.WrapDU._diff(GXContext._get_tls_geo(), db, line, i_ch, o_ch, n) @classmethod def distance(cls, db, line, x_ch, y_ch, o_ch): """ Create a distance channel from X and Y. :param db: Database :param line: Line symbol :param x_ch: X channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_ch: Y channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Output Distance channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :type o_ch: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._distance(GXContext._get_tls_geo(), db, line, x_ch, y_ch, o_ch) @classmethod def distance_3d(cls, db, line, x_ch, y_ch, z_ch, type, o_ch): """ Create a distance channel from XY or XYZ with direction options. :param db: Database :param line: Line symbol :param x_ch: X channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_ch: Y channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param z_ch: Z channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] (can be `NULLSYMB <geosoft.gxapi.NULLSYMB>`) :param type: :ref:`DU_DISTANCE_CHANNEL_TYPE` :param o_ch: Output Distance channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :type z_ch: int :type type: int :type o_ch: int .. versionadded:: 8.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._distance_3d(GXContext._get_tls_geo(), db, line, x_ch, y_ch, z_ch, type, o_ch) @classmethod def distline(cls, db, line, x_ch, y_ch, dist): """ Calculate cummulative distance for a line. :param db: Database :param line: Line symbol :param x_ch: X channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_ch: Y channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param dist: Cummulative distance (retruned) :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :type dist: float_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ dist.value = gxapi_cy.WrapDU._distline(GXContext._get_tls_geo(), db, line, x_ch, y_ch, dist.value) @classmethod def dup_chan_locks(cls, d_bi, d_bo): """ Duplicate all channels protect-info from input `GXDB <geosoft.gxapi.GXDB>`. :param d_bi: Input Database handle :param d_bo: Output Database handle. :type d_bi: GXDB :type d_bo: GXDB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapDU._dup_chan_locks(GXContext._get_tls_geo(), d_bi, d_bo) @classmethod def dup_chans(cls, d_bi, d_bo): """ Duplicate all channels from input `GXDB <geosoft.gxapi.GXDB>`. :param d_bi: Input Database handle :param d_bo: Output Database handle. :type d_bi: GXDB :type d_bo: GXDB .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapDU._dup_chans(GXContext._get_tls_geo(), d_bi, d_bo) @classmethod def edit_duplicates(cls, db, line, x_ch, y_ch, option, single, fid_num): """ Edit duplicate readings at individual location :param db: Database :param line: Line :param x_ch: Channel X, unlocked :param y_ch: Channel Y, unlocked :param option: :ref:`DB_DUP` :param single: :ref:`DB_DUPEDIT` :param fid_num: Fiducial number (required if `DB_DUPEDIT_SINGLE <geosoft.gxapi.DB_DUPEDIT_SINGLE>`) :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :type option: int :type single: int :type fid_num: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** All the channels must be of the same fid incr/start and length. Protected channels are modified automatically. """ gxapi_cy.WrapDU._edit_duplicates(GXContext._get_tls_geo(), db, line, x_ch, y_ch, option, single, fid_num) @classmethod def export1(cls, db, format, cur_line, chan_vv, chan, data, dummies, header): """ Export to a specific format. :param db: Database :param format: :ref:`DU_EXPORT` :param cur_line: Current line :param chan_vv: List of channels - channel symbols stored as INT :param chan: :ref:`DU_CHANNELS` :param data: Data file name :param dummies: Write out dummies? :param header: Include a header with channel names? :type db: GXDB :type format: int :type cur_line: str :type chan_vv: GXVV :type chan: int :type data: str :type dummies: int :type header: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** For databases with both groups and lines: If both lines and groups are selected, save only the lines. If no lines are selected, (only groups), save the current line if it is (1) a group and (2) selected, else save the first selected group. --- Option to filter out data where one of the channels has a dummy in it. Option to allow a header with the channel names. The `DU_CHANNELS_DISPLAYED <geosoft.gxapi.DU_CHANNELS_DISPLAYED>` option can be used to export any selection of channels, listed by the symbols (DB_SYMB) values, cast to int values and stored in a `GXVV <geosoft.gxapi.GXVV>`. """ gxapi_cy.WrapDU._export1(GXContext._get_tls_geo(), db, format, cur_line.encode(), chan_vv, chan, data.encode(), dummies, header) @classmethod def export2(cls, db, format, cur_line, chan_vv, chan, data, dummies, header, line_names): """ Like `export1 <geosoft.gxapi.GXDU.export1>`, but include line names as data. :param db: Database :param format: :ref:`DU_EXPORT` :param cur_line: Current line :param chan_vv: List of channels - channel symbols stored as INT :param chan: :ref:`DU_CHANNELS` :param data: Data file name :param dummies: Write out dummies? :param header: Include a header with channel names? :param line_names: Include line names as data? :type db: GXDB :type format: int :type cur_line: str :type chan_vv: GXVV :type chan: int :type data: str :type dummies: int :type header: int :type line_names: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See `export1 <geosoft.gxapi.GXDU.export1>`. The line names are printed as the first column of data exported. """ gxapi_cy.WrapDU._export2(GXContext._get_tls_geo(), db, format, cur_line.encode(), chan_vv, chan, data.encode(), dummies, header, line_names) @classmethod def export_amira(cls, db, wa, one_cols_ch, array_ch, time_ch, errors_ch, datatype, units, config, instrument, frequency): """ Export to database an AMIRA data file. :param db: Database :param wa: AMIRA data file handle :param one_cols_ch: Single column channel names, supporting comma (,) separated names of multiple channels, maximum 32 channels :param array_ch: `GXVA <geosoft.gxapi.GXVA>` channel name, required :param time_ch: Optional Time channel name (must be `GXVA <geosoft.gxapi.GXVA>` channel and same array size as above `GXVA <geosoft.gxapi.GXVA>` channel) :param errors_ch: Optional Errors channel name (must be `GXVA <geosoft.gxapi.GXVA>` channel and same array size as above `GXVA <geosoft.gxapi.GXVA>` channel) :param datatype: Mandatory fields: DATATYPE :param units: UNITS :param config: CONFIG :param instrument: INSTRUMENT :param frequency: FREQUENCY :type db: GXDB :type wa: GXWA :type one_cols_ch: str :type array_ch: str :type time_ch: str :type errors_ch: str :type datatype: str :type units: str :type config: str :type instrument: str :type frequency: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Other defined FIELDS stored in the database (see `import_amira <geosoft.gxapi.GXDU.import_amira>` function) will be automatically included in the export """ gxapi_cy.WrapDU._export_amira(GXContext._get_tls_geo(), db, wa, one_cols_ch.encode(), array_ch.encode(), time_ch.encode(), errors_ch.encode(), datatype.encode(), units.encode(), config.encode(), instrument.encode(), frequency.encode()) @classmethod def export_aseg(cls, db, cur_line, chan_vv, chan, defn, data): """ Export to ASEG-GDF format file(s). :param db: Database :param cur_line: Current line :param chan_vv: Displayed channels :param chan: :ref:`DU_CHANNELS` :param defn: Header file name :param data: Data file name :type db: GXDB :type cur_line: str :type chan_vv: GXVV :type chan: int :type defn: str :type data: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** At least one of the header file or data file names must be set. (Unset names will get the same file name, but with the extensions .dfn (header) or .dat (data). For databases with both groups and lines: If both lines and groups are selected, save only the lines. If no lines are selected, (only groups), save the current line if it is (1) a group and (2) selected, else save the first selected group. --- """ gxapi_cy.WrapDU._export_aseg(GXContext._get_tls_geo(), db, cur_line.encode(), chan_vv, chan, defn.encode(), data.encode()) @classmethod def export_aseg_proj(cls, db, cur_line, chan_vv, chan, defn, data, proj, ipj): """ Export to ASEG-GDF format file(s) (supports projections). :param db: Database :param cur_line: Current line :param chan_vv: Displayed channels :param chan: :ref:`DU_CHANNELS` :param defn: Export header file name :param data: Export data file name :param proj: Export projection file name :param ipj: Projection handle :type db: GXDB :type cur_line: str :type chan_vv: GXVV :type chan: int :type defn: str :type data: str :type proj: str :type ipj: GXIPJ .. versionadded:: 5.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** At least one of the header file or data file names must be set. (Unset names will get the same file name, but with the extensions .dfn (header) or .dat (data). For databases with both groups and lines: If both lines and groups are selected, save only the lines. If no lines are selected, (only groups), save the current line if it is (1) a group and (2) selected, else save the first selected group. --- This version supports projections """ gxapi_cy.WrapDU._export_aseg_proj(GXContext._get_tls_geo(), db, cur_line.encode(), chan_vv, chan, defn.encode(), data.encode(), proj.encode(), ipj) @classmethod def export_chan_crc(cls, db, symb, crc, file): """ Export a channel as XML and compute a CRC value. :param db: Database :param symb: Channel :param crc: CRC Value returned :param file: File name to generate with XML :type db: GXDB :type symb: int :type crc: int_ref :type file: str .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The output file is an XML describing the channel. The CRC is of the channel data ONLY. To compute a CRC of the full channel (include metadata) do a CRC of the generated file. """ crc.value = gxapi_cy.WrapDU._export_chan_crc(GXContext._get_tls_geo(), db, symb, crc.value, file.encode()) @classmethod def export_csv(cls, db, cur_line, chan_vv, chan, data, dummies, header): """ Export to a CSV file. :param db: Database :param cur_line: Current line :param chan_vv: Displayed channels :param chan: :ref:`DU_CHANNELS` :param data: Data file name :param dummies: Write out dummies? :param header: Include a header with channel names? :type db: GXDB :type cur_line: str :type chan_vv: GXVV :type chan: int :type data: str :type dummies: int :type header: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** For databases with both groups and lines: If both lines and groups are selected, save only the lines. If no lines are selected, (only groups), save the current line if it is (1) a group and (2) selected, else save the first selected group. --- Option to filter out data where one of the channels has a dummy in it. Option to allow a header with the channel names. """ gxapi_cy.WrapDU._export_csv(GXContext._get_tls_geo(), db, cur_line.encode(), chan_vv, chan, data.encode(), dummies, header) @classmethod def export_database_crc(cls, db, crc, file): """ Export a channel as XML and compute a CRC value. :param db: Database :param crc: CRC Value returned :param file: File name to generate with XML :type db: GXDB :type crc: int_ref :type file: str .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The output file is an XML describing the channel. The CRC is of the channel data ONLY. To compute a CRC of the full channel (include metadata) do a CRC of the generated file. """ crc.value = gxapi_cy.WrapDU._export_database_crc(GXContext._get_tls_geo(), db, crc.value, file.encode()) @classmethod def export_gbn(cls, db, vv, data): """ Export to a GBN data file. :param db: Database :param vv: List of channels to export :param data: Export data file name :type db: GXDB :type vv: GXVV :type data: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The iDispChanList_DBE or `GXDB.symb_list <geosoft.gxapi.GXDB.symb_list>` methods can be used to obtain a list of channels. """ gxapi_cy.WrapDU._export_gbn(GXContext._get_tls_geo(), db, vv, data.encode()) @classmethod def export_mdb(cls, db, cur_line, chan_vv, chan, single, data): """ Export to a Microsoft Access Database (MDB) file. :param db: Database :param cur_line: Current line :param chan_vv: Displayed channels :param chan: :ref:`DU_CHANNELS` :param single: :ref:`DU_LINEOUT` :param data: Export data file name :type db: GXDB :type cur_line: str :type chan_vv: GXVV :type chan: int :type single: int :type data: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Similar to `export_gbn <geosoft.gxapi.GXDU.export_gbn>`, with the addition that Groups go to individual tables, and lines go to a single table, or individual tables, based on the value of :ref:`DU_LINEOUT` """ gxapi_cy.WrapDU._export_mdb(GXContext._get_tls_geo(), db, cur_line.encode(), chan_vv, chan, single, data.encode()) @classmethod def export_geodatabase(cls, db, feature_class_name, cur_line, chan_vv, chan, output, single, data): """ Export to a ESRI Geodatabase file. :param db: Database :param feature_class_name: Feature class name :param cur_line: Current line :param chan_vv: Displayed channels :param chan: :ref:`DU_CHANNELS` :param output: :ref:`DU_FEATURE_TYPE_OUTPUT` :param single: :ref:`DU_LINEOUT` :param data: Export data file name :type db: GXDB :type feature_class_name: str :type cur_line: str :type chan_vv: GXVV :type chan: int :type output: int :type single: int :type data: str .. versionadded:: 8.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Similar to `export_gbn <geosoft.gxapi.GXDU.export_gbn>`, with the addition that Groups go to individual tables, and lines go to a single table, or individual tables, based on the value of :ref:`DU_LINEOUT` """ gxapi_cy.WrapDU._export_geodatabase(GXContext._get_tls_geo(), db, feature_class_name.encode(), cur_line.encode(), chan_vv, chan, output, single, data.encode()) @classmethod def get_existing_feature_classes_in_geodatabase(cls, db, geodatabase, lst, vv): """ Searches the geodatabases for an existing Feature class. :param db: Database :param geodatabase: File geodatabase :param lst: Feature class names to verify :param vv: Output list of existing feature class names :type db: GXDB :type geodatabase: str :type lst: GXLST :type vv: GXVV :returns: 0 - Feature class does not exist 1 - Feature class exists :rtype: int .. versionadded:: 8.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Searches the geodatabases for an existing Feature class """ ret_val = gxapi_cy.WrapDU._get_existing_feature_classes_in_geodatabase(GXContext._get_tls_geo(), db, geodatabase.encode(), lst, vv) return ret_val @classmethod def export_shp(cls, db, cur_line, chan_vv, chan, single, data, lst): """ Export to a shape file or files. :param db: Database :param cur_line: Current line :param chan_vv: Displayed channels :param chan: :ref:`DU_CHANNELS` :param single: :ref:`DU_LINEOUT` :param data: Export shape file name or base filename (shp assumed if no extension given) :param lst: `GXLST <geosoft.gxapi.GXLST>` object will be filled with shape files created :type db: GXDB :type cur_line: str :type chan_vv: GXVV :type chan: int :type single: int :type data: str :type lst: GXLST .. versionadded:: 6.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Similar to `export_mdb <geosoft.gxapi.GXDU.export_mdb>`, with the addition that groups go to indiviual files with group name suffixes, and lines go to a single file, or multiple files with line name suffixes, based on the value of :ref:`DU_LINEOUT`. """ gxapi_cy.WrapDU._export_shp(GXContext._get_tls_geo(), db, cur_line.encode(), chan_vv, chan, single, data.encode(), lst) @classmethod def export_xyz(cls, db, data, templ): """ Export XYZdata from a database to an XYZ file. :param db: Database :param data: Export data file name :param templ: Export template name :type db: GXDB :type data: str :type templ: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 1. The export template can be in the local directory or the GEOSOFT directory. The import data file must include the path if it is not in the local directory. 2. Both the import template and data file must exist. 3. Sample Template file [EXPORT XYZ] EXPORT CHAN {,FORMAT} {,WIDTH} {,DECIMAL} WRITEDUMMY YES CLIPMAP YES MAXPOINTS 1000 INCREMENT .5 4. This can be used to export a group, but the group must be the currently displayed line, and only that group will be exported. """ gxapi_cy.WrapDU._export_xyz(GXContext._get_tls_geo(), db, data.encode(), templ.encode()) @classmethod def export_xyz2(cls, db, wa, ra): """ Export XYZdata from a database to an XYZ file, using file handles. :param db: Database :param wa: Export data file `GXWA <geosoft.gxapi.GXWA>` handle :param ra: Export template file `GXRA <geosoft.gxapi.GXRA>` handle :type db: GXDB :type wa: GXWA :type ra: GXRA .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `export_xyz <geosoft.gxapi.GXDU.export_xyz>` """ gxapi_cy.WrapDU._export_xyz2(GXContext._get_tls_geo(), db, wa, ra) @classmethod def fft(cls, db, line, s_ch, r_ch, i_ch): """ Apply an `GXFFT <geosoft.gxapi.GXFFT>` to space data. :param db: Database :param line: Line handle :param s_ch: Space Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param r_ch: Real Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param i_ch: Imaginary Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type db: GXDB :type line: int :type s_ch: int :type r_ch: int :type i_ch: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._fft(GXContext._get_tls_geo(), db, line, s_ch, r_ch, i_ch) @classmethod def filter(cls, db, line, i_ch, o_ch, flt): """ Apply a convolution filter to a channel. :param db: Database :param line: Line handle :param i_ch: Input channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Output filtered channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param flt: Filter handle (`GXFLT <geosoft.gxapi.GXFLT>`) :type db: GXDB :type line: int :type i_ch: int :type o_ch: int :type flt: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._filter(GXContext._get_tls_geo(), db, line, i_ch, o_ch, flt) @classmethod def gen_lev(cls, db, in_file, out_file, max_dz, m0): """ Generate a Level table from an Intersection Table. :param db: Database :param in_file: Input Table file Name :param out_file: Output Table file Name :param max_dz: Max. gradient :param m0: :ref:`DU_LEVEL` :type db: GXDB :type in_file: str :type out_file: str :type max_dz: float :type m0: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._gen_lev(GXContext._get_tls_geo(), db, in_file.encode(), out_file.encode(), max_dz, m0) @classmethod def gen_lev_db(cls, db, out_file, max_dz, m0): """ Generate a Level table from an Intersection Database :param db: Input intersection database object :param out_file: Output Table File Name :param max_dz: Max. gradient :param m0: :ref:`DU_LEVEL` :type db: GXDB :type out_file: str :type max_dz: float :type m0: int .. versionadded:: 7.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Requires channels with the following names: ine, TFid, TZ, TDZ Line, LFid, LZ, LDZ Mask """ gxapi_cy.WrapDU._gen_lev_db(GXContext._get_tls_geo(), db, out_file.encode(), max_dz, m0) @classmethod def gen_xyz_temp(cls, xyz, temp): """ Generate default XYZ template for a XYZ file. :param xyz: Xyz file name :param temp: Template file name to create :type xyz: str :type temp: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapDU._gen_xyz_temp(GXContext._get_tls_geo(), xyz.encode(), temp.encode()) @classmethod def get_xyz_num_fields(cls, xyz, num_fields): """ Get the number of fields in the XYZ file. :param xyz: Xyz file name :param num_fields: Returned number of fields :type xyz: str :type num_fields: int_ref .. versionadded:: 9.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ num_fields.value = gxapi_cy.WrapDU._get_xyz_num_fields(GXContext._get_tls_geo(), xyz.encode(), num_fields.value) @classmethod def get_chan_data_lst(cls, db, chan, mask, lst): """ Populate a `GXLST <geosoft.gxapi.GXLST>` with unique items in a channel. :param db: Database :param chan: Data Channel :param mask: Mask Channel (can be `NULLSYMB <geosoft.gxapi.NULLSYMB>`) :param lst: `GXLST <geosoft.gxapi.GXLST>` object to populate :type db: GXDB :type chan: int :type mask: int :type lst: GXLST .. versionadded:: 6.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Items from all selected lines are collected, sorted, and duplicates removed. The output `GXLST <geosoft.gxapi.GXLST>` name and value are set to the item values. Non-string channels are converted internally to string values using Copy_VV, so results may differ from what you may expect given the current channel's display width and number of decimals. If a mask channel is selected, then only those items where the mask channel is not a dummy are collected. """ gxapi_cy.WrapDU._get_chan_data_lst(GXContext._get_tls_geo(), db, chan, mask, lst) @classmethod def get_chan_data_vv(cls, db, chan, mask, vv): """ Populate a `GXVV <geosoft.gxapi.GXVV>` with unique items in a channel. :param db: Database :param chan: Channel :param mask: Mask Channel (can be `NULLSYMB <geosoft.gxapi.NULLSYMB>`) :param vv: `GXVV <geosoft.gxapi.GXVV>` object to populate :type db: GXDB :type chan: int :type mask: int :type vv: GXVV .. versionadded:: 6.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Items from all selected lines are collected, sorted, and duplicates removed. The data is collected in the channel's data type, so normal `GXVV.sort <geosoft.gxapi.GXVV.sort>` rules apply. If the output `GXVV <geosoft.gxapi.GXVV>` and channel type are not the same, then the data is converted using the Copy_VV function, so see that for conversion rules. If a mask channel is selected, then only those items where the mask channel is not a dummy are collected. """ gxapi_cy.WrapDU._get_chan_data_vv(GXContext._get_tls_geo(), db, chan, mask, vv) @classmethod def get_gridding_azimuth_to_minimize_padding(cls, db, xCh, yCh, mCh, x1, y1, x2, y2, x3, y3, x4, y4): """ Return the gridding azimuth (degrees CW from north) that minimizes padding. :param db: Database :param xCh: X channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param yCh: Y channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param mCh: Data or mask channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param x1: Returned Corner 1 - X :param y1: Returned Corner 1 - Y :param x2: Returned Corner 2 - X :param y2: Returned Corner 2 - Y :param x3: Returned Corner 3 - X :param y3: Returned Corner 3 - Y :param x4: Returned Corner 4 - X :param y4: Returned Corner 4 - Y :type db: GXDB :type xCh: int :type yCh: int :type mCh: int :type x1: float_ref :type y1: float_ref :type x2: float_ref :type y2: float_ref :type x3: float_ref :type y3: float_ref :type x4: float_ref :type y4: float_ref :rtype: float .. versionadded:: 2023.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Especially if the survey lines do not run N-S or E-W, gridding parallel to the XY axes results in up to half the gridding area being dummies (padding), which is not only inefficient, but affects processes like filtering. This algorithm determines the data rotation required such that the gridding extents rectangle fit around the data minimizes the amount of padding. The result is good to the nearest degree. The corner points are returned and include a buffer around the edge equal to 1% of the maximum height/width. The returned azimuth will be in the range 0 to 89. Operates on all selected lines. """ ret_val, x1.value, y1.value, x2.value, y2.value, x3.value, y3.value, x4.value, y4.value = gxapi_cy.WrapDU._get_gridding_azimuth_to_minimize_padding(GXContext._get_tls_geo(), db, xCh, yCh, mCh, x1.value, y1.value, x2.value, y2.value, x3.value, y3.value, x4.value, y4.value) return ret_val @classmethod def get_angled_bounding_rectangle(cls, db, xCh, yCh, mCh, azimuth, x1, y1, x2, y2, x3, y3, x4, y4): """ Return the angled bounding rectangle for data to be gridded on an angle. :param db: Database :param xCh: X channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param yCh: Y channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param mCh: Data or mask channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param azimuth: Input Azimuth Angle - degrees CW from North :param x1: Returned Corner 1 - X :param y1: Returned Corner 1 - Y :param x2: Returned Corner 2 - X :param y2: Returned Corner 2 - Y :param x3: Returned Corner 3 - X :param y3: Returned Corner 3 - Y :param x4: Returned Corner 4 - X :param y4: Returned Corner 4 - Y :type db: GXDB :type xCh: int :type yCh: int :type mCh: int :type azimuth: float :type x1: float_ref :type y1: float_ref :type x2: float_ref :type y2: float_ref :type x3: float_ref :type y3: float_ref :type x4: float_ref :type y4: float_ref .. versionadded:: 2023.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Especially if the survey lines do not run N-S or E-W, gridding parallel to the XY axes results in up to half the gridding area being dummies (padding), which is not only inefficient, but affects processes like filtering. This routine returns the rotated extents rectangle for a given gridding azimuth. The data is rotated CCW by the input azimuth. The N-S and E-W extents of the rotated data are determined and the bounding points rotated back by the input azimuth to locate the angled bounding rectangle around the input data. As with GetGriddingAzimuthToMinimizePadding_DU the extents are padded in each direction by 1% of the maximum height/width Operates on all selected lines. """ x1.value, y1.value, x2.value, y2.value, x3.value, y3.value, x4.value, y4.value = gxapi_cy.WrapDU._get_angled_bounding_rectangle(GXContext._get_tls_geo(), db, xCh, yCh, mCh, azimuth, x1.value, y1.value, x2.value, y2.value, x3.value, y3.value, x4.value, y4.value) @classmethod def gradient(cls, dbi, dbo, ix_ch, iy_ch, iz_ch, ig_ch, ox_ch, oy_ch, oz_ch, angle, width): """ This method takes 4 channels from input database and duplicats each line twice to output database) (input and Output can be the same channel). :param dbi: Database InPut :param dbo: DAtabase Output :param ix_ch: X Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param iy_ch: Y Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param iz_ch: Z Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param ig_ch: G Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param ox_ch: X Output Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param oy_ch: Y Output Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param oz_ch: Z Output Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param angle: Angle :param width: Width :type dbi: GXDB :type dbo: GXDB :type ix_ch: int :type iy_ch: int :type iz_ch: int :type ig_ch: int :type ox_ch: int :type oy_ch: int :type oz_ch: int :type angle: float :type width: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._gradient(GXContext._get_tls_geo(), dbi, dbo, ix_ch, iy_ch, iz_ch, ig_ch, ox_ch, oy_ch, oz_ch, angle, width) @classmethod def grav_drift(cls, db, line, date, time, read, base, clos): """ Calculate base loop closure and correct for drift. :param db: Database :param line: Line [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param date: Date [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param time: Local time (on date) [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param read: Reading [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param base: Base [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param clos: Closure error [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type db: GXDB :type line: int :type date: int :type time: int :type read: int :type base: int :type clos: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._grav_drift(GXContext._get_tls_geo(), db, line, date, time, read, base, clos) @classmethod def grav_drift2(cls, db, line, date, time, read, base, clos, corr): """ Calculate base loop closure, calculate drift correction and correct for drift. :param db: Database :param line: Line [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param date: Date [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param time: Local time (on date) [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param read: Reading [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param base: Base [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param clos: Closure error [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param corr: Drift correction [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type db: GXDB :type line: int :type date: int :type time: int :type read: int :type base: int :type clos: int :type corr: int .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._grav_drift2(GXContext._get_tls_geo(), db, line, date, time, read, base, clos, corr) @classmethod def grav_tide(cls, db, line, lat, lon, date, time, gmt, tide): """ Calculate earth tide gravity correction. :param db: Database :param line: Line :param lat: Lat [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param lon: Long [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param date: Date [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param time: Local time (on date) [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param gmt: GMT difference (added to time to give GMT) :param tide: Calculated tide [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type db: GXDB :type line: int :type lat: int :type lon: int :type date: int :type time: int :type gmt: float :type tide: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._grav_tide(GXContext._get_tls_geo(), db, line, lat, lon, date, time, gmt, tide) @classmethod def grid_load(cls, db, img, xdec, ydec, trim_dum, create_index): """ Load grid data to a database. :param db: Database :param img: Grid img :param xdec: X decimation factor :param ydec: Y decimation factor :param trim_dum: 0 trim leading/trailing dummies (default), 1 trim all dummies, 2 leave all dummies :param create_index: Flag for creating index channel: 0 no (default), 1 yes. :type db: GXDB :type img: GXIMG :type xdec: int :type ydec: int :type trim_dum: int :type create_index: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._grid_load(GXContext._get_tls_geo(), db, img, xdec, ydec, trim_dum, create_index) @classmethod def grid_load_xyz(cls, db, img, ch_x, ch_y, ch_z, ch_data, xdec, ydec, trim_dum, index_ch): """ Load grid data to a database using specified channels :param db: Database :param img: Grid img :param ch_x: X Channel :param ch_y: Y Channel :param ch_z: Z Channel :param ch_data: Data Channel :param xdec: X decimation factor :param ydec: Y decimation factor :param trim_dum: 0 trim leading/trailing dummies (default), 1 trim all dummies, 2 leave all dummies :param index_ch: Flag for creating index channel: 0 no (default), 1 yes. :type db: GXDB :type img: GXIMG :type ch_x: int :type ch_y: int :type ch_z: int :type ch_data: int :type xdec: int :type ydec: int :type trim_dum: int :type index_ch: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._grid_load_xyz(GXContext._get_tls_geo(), db, img, ch_x, ch_y, ch_z, ch_data, xdec, ydec, trim_dum, index_ch) @classmethod def head(cls, db, line, i_ch, o_ch, tb, dir): """ Applies a heading correction. :param db: Database object :param line: Line Symbol :param i_ch: Channel to correct [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Corrected channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param tb: Heading table :param dir: Line direction :type db: GXDB :type line: int :type i_ch: int :type o_ch: int :type tb: GXTB :type dir: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Updates channel with Direction in degrees azimuth (counter-clockwise relative the +Y direction). `GS_R8DM <geosoft.gxapi.GS_R8DM>` if the line has no data, or if there is a problem. """ gxapi_cy.WrapDU._head(GXContext._get_tls_geo(), db, line, i_ch, o_ch, tb, dir) @classmethod def import_bin3(cls, db, data, templ, line, flight, date, wa): """ Same as `import_bin2 <geosoft.gxapi.GXDU.import_bin2>`, but returns the name of the imported line. :param db: Database :param data: Import data file name :param templ: Import template name :param line: Optional Line name (on return, the actual line) :param flight: Optional Flight number :param date: Optional date :type db: GXDB :type data: str :type templ: str :type line: str_ref :type flight: int :type date: float :type wa: GXWA .. versionadded:: 6.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See `import_bin2 <geosoft.gxapi.GXDU.import_bin2>`. Because the name of the created line is not necessarily the value passed in (and the value passed in can be blank), this version returns the name of the line to which the data is actually imported. .. seealso:: `import_bin2 <geosoft.gxapi.GXDU.import_bin2>` """ line.value = gxapi_cy.WrapDU._import_bin3(GXContext._get_tls_geo(), db, data.encode(), templ.encode(), line.value.encode(), flight, date, wa) @classmethod def imp_cb_ply(cls, db, pj, file, x_chan, y_chan): """ Import concession boundary polygon file into a database :param db: Database :param pj: Projection Files Object :param file: Import data file name :param x_chan: X channel handle :param y_chan: Y channel handle :type db: GXDB :type pj: GXPJ :type file: str :type x_chan: int :type y_chan: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The polygon file is provided by <NAME> in Brazil. """ gxapi_cy.WrapDU._imp_cb_ply(GXContext._get_tls_geo(), db, pj, file.encode(), x_chan, y_chan) @classmethod def import_ado(cls, db, connect, table, templ, line): """ Import an external database table into a group using ADO. :param db: Database :param connect: Import database connection string (overrides template value) :param table: Imported table in database file (overrides template value) :param templ: Import template name :param line: Oasis montaj line name to create (overrides template value) :type db: GXDB :type connect: str :type table: str :type templ: str :type line: str .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 1. The import template can be in the local directory or the GEOSOFT directory. 2. Only the import template must be specified. The database connection string, the database table and Oasis line name are normally taken from the template file itself, but if these values are provided, they will override those found in the template. 3. If the line already exists, the data will overwrite the existing data. """ gxapi_cy.WrapDU._import_ado(GXContext._get_tls_geo(), db, connect.encode(), table.encode(), templ.encode(), line.encode()) @classmethod def import_all_ado(cls, db, connect, storage): """ Import an entire external database using ADO. :param db: Database :param connect: Import database connection string :param storage: :ref:`DU_STORAGE` :type db: GXDB :type connect: str :type storage: int .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 1. For group storage, the table names are imported "as is". For line storage, if the table names are valid Geosoft line names, they are used as is. Otherwise, line names will be created with type LINE_NORMAL, starting at L0 and incrementing by 10 (L10, L20 etc.) 2. If the line exists, the data will overwrite the existing data. 3. All tables and fields will be imported. 4. If connection string is of type "FILENAME=..." the connection will attempt to resolve it as a file database. (see also ODBCFileConnect_GUI) """ gxapi_cy.WrapDU._import_all_ado(GXContext._get_tls_geo(), db, connect.encode(), storage) @classmethod def import_all_dao(cls, db, data, type, storage): """ Import an entire external database using DAO. :param db: Database :param data: Import data file name :param type: Database type :param storage: :ref:`DU_STORAGE` :type db: GXDB :type data: str :type type: str :type storage: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 1. The file is assumed to be a DAO compliant database. 2. The import data file must include the path if it is not in the local directory. 3. For group storage, the table names are imported "as is". For line storage, if the table names are valid Geosoft line names, they are used as is. Otherwise, line names will be created with type LINE_NORMAL, starting at L0 and incrementing by 10 (L10, L20 etc.) 4. If the line exists, the data will overwrite the existing data. 5. All tables and fields will be imported. 6. The following are valid type strings for DAO: MSJET : Microsoft Access ODBC : ODBC source dBASE III dBASE IV dBASE 5 FoxPro 2.0 FoxPro 2.5 FoxPro 2.6 Paradox 3.x Paradox 4.x Paradox 5.x """ gxapi_cy.WrapDU._import_all_dao(GXContext._get_tls_geo(), db, data.encode(), type.encode(), storage) @classmethod def import_amira(cls, db, ra, wa): """ Import an AMIRA data file. :param db: Database :param ra: AMIRA data file handle :param wa: Log file handle :type db: GXDB :type ra: GXRA :type wa: GXWA .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** All the constant declarations are stored within the database under \\TEM\\CONSTANTS. The format is as follows: 1. Lines stored in the file beginning with "/" are comments 2. Each constant occupies a line in the file. It uses the format: CONSTANT=VALUE """ gxapi_cy.WrapDU._import_amira(GXContext._get_tls_geo(), db, ra, wa) @classmethod def import_aseg(cls, db, templ, file, data, flc, chans): """ Import an ASEG-GDF data file. :param db: Database :param templ: Template file name :param file: Header file name :param data: Data file name :param flc: Flight Line Channel name :param chans: Number of channels to import at one time :type db: GXDB :type templ: str :type file: str :type data: str :type flc: str :type chans: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapDU._import_aseg(GXContext._get_tls_geo(), db, templ.encode(), file.encode(), data.encode(), flc.encode(), chans) @classmethod def import_aseg_proj(cls, db, templ, file, data, flc, chans, proj, x_ch, y_ch): """ Import an ASEG-GDF data file (supports projections). :param db: Database :param templ: Template file name :param file: Header file name :param data: Data file name :param flc: Flight Line Channel name :param chans: Number of channels to import at one time :param proj: Projection file name :param x_ch: Channel pair to associate projection :param y_ch: Channel pair to associate projection :type db: GXDB :type templ: str :type file: str :type data: str :type flc: str :type chans: int :type proj: str :type x_ch: str :type y_ch: str .. versionadded:: 5.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This version supports projections """ gxapi_cy.WrapDU._import_aseg_proj(GXContext._get_tls_geo(), db, templ.encode(), file.encode(), data.encode(), flc.encode(), chans, proj.encode(), x_ch.encode(), y_ch.encode()) @classmethod def import_bin(cls, db, data, templ, line, flight, date): """ Import blocked binary or archive ASCII data :param db: Database :param data: Import data file name :param templ: Import template name :param line: Optional Line name (see note 3.) :param flight: Optional Flight number :param date: Optional date :type db: GXDB :type data: str :type templ: str :type line: str :type flight: int :type date: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 1. Binary import templates have extension .I2 by convention. See BINARY.I2 for a description of the template format. Archive import templates have extension .I3 by convention. See ARCHIVE.I3 for a description of the template format. 2. Both the import template and data file must exist. 3. If a line already exists in the database, a new version is created unless a line name is passed in. In this case, the specified name is used and the imported channels on the previous line will be destroyed. .. seealso:: `lab_template <geosoft.gxapi.GXDU.lab_template>` """ gxapi_cy.WrapDU._import_bin(GXContext._get_tls_geo(), db, data.encode(), templ.encode(), line.encode(), flight, date) @classmethod def import_bin2(cls, db, data, templ, line, flight, date, wa): """ Import blocked binary or archive ASCII data with data error display :param db: Database :param data: Import data file name :param templ: Import template name :param line: Optional Line name (see note 3.) :param flight: Optional Flight number :param date: Optional date :type db: GXDB :type data: str :type templ: str :type line: str :type flight: int :type date: float :type wa: GXWA .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 1. Binary import templates have extension .I2 by convention. See BINARY.I2 for a description of the template format. Archive import templates have extension .I3 by convention. See ARCHIVE.I3 for a description of the template format. 2. Both the import template and data file must exist. 3. If a line already exists in the database, a new version is created unless a line name is passed in. In this case, the specified name is used and the imported channels on the previous line will be destroyed. .. seealso:: `lab_template <geosoft.gxapi.GXDU.lab_template>` """ gxapi_cy.WrapDU._import_bin2(GXContext._get_tls_geo(), db, data.encode(), templ.encode(), line.encode(), flight, date, wa) @classmethod def import_bin4(cls, db, mode, data, templ, line, flight, date, wa): """ Same as `import_bin2 <geosoft.gxapi.GXDU.import_bin2>` but with an import mode :param db: Database :param mode: :ref:`DU_IMPORT` :param data: Import data file name :param templ: Import template name :param line: Optional Line name (see note 3.) :param flight: Optional Flight number :param date: Optional date :type db: GXDB :type mode: int :type data: str :type templ: str :type line: str :type flight: int :type date: float :type wa: GXWA .. versionadded:: 9.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Same as `import_bin2 <geosoft.gxapi.GXDU.import_bin2>` but with an import mode .. seealso:: `import_bin2 <geosoft.gxapi.GXDU.import_bin2>` """ gxapi_cy.WrapDU._import_bin4(GXContext._get_tls_geo(), db, mode, data.encode(), templ.encode(), line.encode(), flight, date, wa) @classmethod def import_daarc500_serial(cls, db, line, file, channel, type): """ Import Serial data from the RMS Instruments DAARC500. :param db: Database object :param line: Output line (`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`) :param file: Name of file to import :param channel: Channel to import, 1-8 :param type: :ref:`GU_DAARC500_DATATYPE` :type db: GXDB :type line: int :type file: str :type channel: int :type type: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Imports data stored in a serial channel recorded by the RMS Instruments DAARC500 instrument, and outputs the data to a line in the database. The channels created depend on the input data type """ gxapi_cy.WrapDU._import_daarc500_serial(GXContext._get_tls_geo(), db, line, file.encode(), channel, type) @classmethod def import_daarc500_serial_gps(cls, db, line, file, channel): """ Import Serial GPS data from the RMS Instruments DAARC500. :param db: Database object :param line: Output line (`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`) :param file: Name of file to import :param channel: Channel to import, 1-8 :type db: GXDB :type line: int :type file: str :type channel: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Imports GPS data stored in a serial channel recorded by the RMS Instruments DAARC500 instrument, and outputs the data to a line in the database. Makes the following channels: Fid, UTC_Time, Latitude, Longitude, Altitude, GPS_Quality, NumSat (Number of satellites), GPS_HDOP (Horizontal Dilution of Position), Undulation, GPS_DiffAge (Age of differential channel). """ gxapi_cy.WrapDU._import_daarc500_serial_gps(GXContext._get_tls_geo(), db, line, file.encode(), channel) @classmethod def import_dao(cls, db, data, type, table, templ, line): """ Import an external database table into a group using DAO. :param db: Database :param data: Import database file name (overrides template value) :param type: Import data file type (overrides template value) :param table: Imported table in database file (overrides template value) :param templ: Import template name :param line: Oasis Montaj line name to create (overrides template value) :type db: GXDB :type data: str :type type: str :type table: str :type templ: str :type line: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 1. The import template can be in the local directory or the GEOSOFT directory. The import data file must include the path if it is not in the local directory. 2. Only the import template must be specified. The database file name, file type, the database table and Oasis line name are normally taken from the template file itself, but if these values are provided, they will override those found in the template. 3. If the line already exists, the data will overwrite the existing data. """ gxapi_cy.WrapDU._import_dao(GXContext._get_tls_geo(), db, data.encode(), type.encode(), table.encode(), templ.encode(), line.encode()) @classmethod def import_esri(cls, db, connect, templ, line): """ Import an ArcGIS Geodatabase table or feature class into a GDB group :param db: Database :param connect: Import database connection string (e.g. "d:\\Personal\\test.mdb|Table" or "d:\\File\\test.gdb|FeatureClass, overrides template value) :param templ: Import template name :param line: Oasis montaj line name to create (overrides template value) :type db: GXDB :type connect: str :type templ: str :type line: str .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 1. The import template can be in the local directory or the GEOSOFT directory. 2. Only the import template must be specified. The Geodatabase connection string and Oasis line name are normally taken from the template file itself, but if these values are provided, they will override those found in the template. 3. If the line already exists, the data will overwrite the existing data. """ gxapi_cy.WrapDU._import_esri(GXContext._get_tls_geo(), db, connect.encode(), templ.encode(), line.encode()) @classmethod def import_gbn(cls, db, file): """ Import GBN data file. :param db: Database :param file: File name of the GBN file to import :type db: GXDB :type file: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapDU._import_gbn(GXContext._get_tls_geo(), db, file.encode()) @classmethod def import_oddf(cls, db, file): """ Import ODDF data file. :param db: Database :param file: File name of the ODDF file to import :type db: GXDB :type file: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapDU._import_oddf(GXContext._get_tls_geo(), db, file.encode()) @classmethod def import_pico(cls, db, templ, data, chans): """ Import a Picodas data file. :param db: Database :param templ: Template file name :param data: Data file name :param chans: Number of channels to import at one time :type db: GXDB :type templ: str :type data: str :type chans: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapDU._import_pico(GXContext._get_tls_geo(), db, templ.encode(), data.encode(), chans) @classmethod def import_ubc_mod_msh(cls, db, mesh, mods, dir, dummy): """ Import UBC Mod and Msh files. :param db: Database Object :param mesh: Mesh file :param mods: 1-5 Mod files, delimited with "|" :param dir: Import slice direction (0-2 for X,Y and Z) :param dummy: Value to interpret as dummy :type db: GXDB :type mesh: str :type mods: str :type dir: int :type dummy: float .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Each slice in X,Y or Z is imported to its own line in the database beginning with L0. """ gxapi_cy.WrapDU._import_ubc_mod_msh(GXContext._get_tls_geo(), db, mesh.encode(), mods.encode(), dir, dummy) @classmethod def import_usgs_post(cls, db, file): """ Import USGS Post data file. :param db: Database :param file: File name of the USGS post file to import :type db: GXDB :type file: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapDU._import_usgs_post(GXContext._get_tls_geo(), db, file.encode()) @classmethod def import_xyz(cls, db, mode, data, templ): """ Import XYZ data into the database. :param db: Database :param mode: :ref:`DU_IMPORT` :param data: Import data file name :param templ: Import template name :type db: GXDB :type mode: int :type data: str :type templ: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 1. The import template can be in the local directory or the GEOSOFT directory. The import data file must include the path if it is not in the local directory. 2. Both the import template and data file must exist. """ gxapi_cy.WrapDU._import_xyz(GXContext._get_tls_geo(), db, mode, data.encode(), templ.encode()) @classmethod def import_xyz2(cls, db, mode, data, templ, wa): """ Import XYZ data into the database. :param db: Database :param mode: :ref:`DU_IMPORT` :param data: Import data file name :param templ: Import template name :type db: GXDB :type mode: int :type data: str :type templ: str :type wa: GXWA .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 1. The import template can be in the local directory or the GEOSOFT directory. The import data file must include the path if it is not in the local directory. 2. Both the import template and data file must exist. """ gxapi_cy.WrapDU._import_xyz2(GXContext._get_tls_geo(), db, mode, data.encode(), templ.encode(), wa) @classmethod def import_io_gas(cls, db, data_csv, templ): """ Import data columns from an ioGAS data file. :param db: Database :param data_csv: Input data.csv file name :param templ: Input template file name :type db: GXDB :type data_csv: str :type templ: str .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 1. All columns in the speficied ioGAS data file will be imported. 2. If a line already exists, the data will overwrite the existing data. """ gxapi_cy.WrapDU._import_io_gas(GXContext._get_tls_geo(), db, data_csv.encode(), templ.encode()) @classmethod def index_order(cls, db, line, in_ch, ch): """ Change the order of a channel using an index channel. :param db: Database :param line: Line symbol :param in_ch: Ordered index channel (should be int) [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param ch: Channel to reorder [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type db: GXDB :type line: int :type in_ch: int :type ch: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._index_order(GXContext._get_tls_geo(), db, line, in_ch, ch) @classmethod def interp(cls, db, line, i_ch, o_ch, inside, outside): """ Replace all dummies by interpolating from valid data. :param db: Database :param line: Line handle :param i_ch: Channel to interpolate [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Output interpolated channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param inside: :ref:`DU_INTERP` :param outside: :ref:`DU_INTERP_EDGE` :type db: GXDB :type line: int :type i_ch: int :type o_ch: int :type inside: int :type outside: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._interp(GXContext._get_tls_geo(), db, line, i_ch, o_ch, inside, outside) @classmethod def interp_gap(cls, db, line, i_ch, o_ch, inside, outside, gap, extend): """ Replace all dummies by interpolating from valid data. :param db: Database :param line: Line handle :param i_ch: Channel to interpolate [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Output interpolated channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param inside: :ref:`DU_INTERP` :param outside: :ref:`DU_INTERP_EDGE` :param gap: Maximum gap to interpolate (fiducials) :param extend: Maximum items to extend at ends. :type db: GXDB :type line: int :type i_ch: int :type o_ch: int :type inside: int :type outside: int :type gap: int :type extend: int .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._interp_gap(GXContext._get_tls_geo(), db, line, i_ch, o_ch, inside, outside, gap, extend) @classmethod def interp_gap_and_fill(cls, db, line, i_ch, o_ch, inside, outside, gap, extend, fill): """ Replace all dummies by interpolating from valid data. :param db: Database :param line: Line handle :param i_ch: Channel to interpolate [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Output interpolated channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param inside: :ref:`DU_INTERP` :param outside: :ref:`DU_INTERP_EDGE` :param gap: Maximum gap to interpolate (fiducials) :param extend: Maximum items to extend at ends. :param fill: Value used to fill dummy channels. :type db: GXDB :type line: int :type i_ch: int :type o_ch: int :type inside: int :type outside: int :type gap: int :type extend: int :type fill: float .. versionadded:: 2022.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._interp_gap_and_fill(GXContext._get_tls_geo(), db, line, i_ch, o_ch, inside, outside, gap, extend, fill) @classmethod def intersect(cls, db, x_chan, y_chan, z_chan, tol, file): """ Create Tie Line & Normal Line intersect table. :param db: Database :param x_chan: X Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_chan: Y Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param z_chan: Z Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param tol: Intersection tolerance :param file: Output Table file Name :type db: GXDB :type x_chan: int :type y_chan: int :type z_chan: int :type tol: float :type file: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._intersect(GXContext._get_tls_geo(), db, x_chan, y_chan, z_chan, tol, file.encode()) @classmethod def intersect_all(cls, db, x_chan, y_chan, z_chan, tol, file): """ Create line intersect table from all lines. :param db: Database :param x_chan: X Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_chan: Y Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param z_chan: Z Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param tol: Intersection tolerance :param file: Output Table file Name :type db: GXDB :type x_chan: int :type y_chan: int :type z_chan: int :type tol: float :type file: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._intersect_all(GXContext._get_tls_geo(), db, x_chan, y_chan, z_chan, tol, file.encode()) @classmethod def intersect_gd_bto_tbl(cls, db, tbl): """ Create a new intersection table from an intersection database. :param db: Input Intersection Database name :param tbl: Output intersection TBL :type db: str :type tbl: str .. versionadded:: 7.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If the TBL exists, it is overwritten. """ gxapi_cy.WrapDU._intersect_gd_bto_tbl(GXContext._get_tls_geo(), db.encode(), tbl.encode()) @classmethod def intersect_old(cls, db, x_chan, y_chan, z_chan, in_file, out_file): """ Use existing intersection table and re-calculate miss-ties. :param db: Database :param x_chan: X Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_chan: Y Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param z_chan: Z Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param in_file: Input Table file name :param out_file: Output Table file Name :type db: GXDB :type x_chan: int :type y_chan: int :type z_chan: int :type in_file: str :type out_file: str .. versionadded:: 5.1.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Reads intersection information from an existing intersect table and looks up the values at the intersections for the input Z channel. This makes it unnecessary to re-calculate the intersections every time if you want to determine miss-ties using different Z channels, or the same Z channel after processing levelling corrections. Existing intersections whose locations do not exist in the database are ignored. """ gxapi_cy.WrapDU._intersect_old(GXContext._get_tls_geo(), db, x_chan, y_chan, z_chan, in_file.encode(), out_file.encode()) @classmethod def intersect_tb_lto_gdb(cls, tbl, db): """ Create a new intersection database from an intersection table. :param tbl: Input intersection TBL :param db: Output Intersection Database name :type tbl: str :type db: str .. versionadded:: 7.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If the GDB exists, it is deleted, so it should not be loaded. The database is split by Tie lines (or whatever lines are found in column 3 of the TBL file. """ gxapi_cy.WrapDU._intersect_tb_lto_gdb(GXContext._get_tls_geo(), tbl.encode(), db.encode()) @classmethod def lab_template(cls, data, templ, type, delimit, name_off, unit_off, data_off, sample_type, data_type): """ Makes a default template from a lab assay file. :param data: Data file name :param templ: New template name :param type: :ref:`DU_LAB_TYPE` :param delimit: Delimiter string :param name_off: Offset to column labels line (0 for first line) :param unit_off: Offset to unit labels line, -1 if none :param data_off: Offset to first line that contains data :param sample_type: Sample channel element type, recommend -10 for 10-character ASCII, or `GS_LONG <geosoft.gxapi.GS_LONG>` for numbers. :param data_type: Default channel element type, recommend `GS_FLOAT <geosoft.gxapi.GS_FLOAT>` :type data: str :type templ: str :type type: int :type delimit: str :type name_off: int :type unit_off: int :type data_off: int :type sample_type: int :type data_type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The template can be used to import the file using sImportBIN_DU. The first column is assumed to be the sample number. If the unit label line is the same as the column label line, column labels are assummed to be followed by unit labels using the format "Au-ppm", "Au ppm" or "Au(ppm)". The number of channels is determined from the number of columns in the data channel. If there are more column labels or unit labels, the last labels are assumed to be correct. If there are fewer line labels, default labels "Col_n", where n is the column number, will be created and no unit labels will be defined. .. seealso:: `import_bin <geosoft.gxapi.GXDU.import_bin>` """ gxapi_cy.WrapDU._lab_template(GXContext._get_tls_geo(), data.encode(), templ.encode(), type, delimit.encode(), name_off, unit_off, data_off, sample_type, data_type) @classmethod def load_gravity(cls, db, reg, line, data): """ Load a gravity survey file :param db: Database :param reg: `GXREG <geosoft.gxapi.GXREG>` to hold constant data :param line: Line in which to load data :param data: Gravity data file :type db: GXDB :type reg: GXREG :type line: int :type data: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See GRAVITY.`GXDAT <geosoft.gxapi.GXDAT>` for a description of the file format. Existing data in the line will be replaced. The following `GXREG <geosoft.gxapi.GXREG>` parameters will be set if they appear in the data file: default OPERATOR "" DATE none INSTRUMENT "" INSTRUMENT_SCALE "1.0" BASE_GRAVITY "0.0" FORMULA "1967" GMT_DIFF "0.0" DISTANCE_UNITS "m" DENSITY_EARTH "2.67" DENSITY_WATER "1.0" DENSITY_ICE "0.95" MAP_PROJECTION "" If the corresponding constant is not specified and the `GXREG <geosoft.gxapi.GXREG>` already has the constant defined, it is not changed. If the constant is not defined and it is not already in the `GXREG <geosoft.gxapi.GXREG>`, the indicated default will be set. """ gxapi_cy.WrapDU._load_gravity(GXContext._get_tls_geo(), db, reg, line, data.encode()) @classmethod def load_gravity_cg6(cls, db, data): """ Load a CG-6 gravity survey file. :param db: Database :param data: Gravity data file :type db: GXDB :type data: str .. versionadded:: 9.3.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Has its own format - space-delimited columns of data """ gxapi_cy.WrapDU._load_gravity_cg6(GXContext._get_tls_geo(), db, data.encode()) @classmethod def load_gravity_cg6_to_line(cls, db, data, line): """ Load a CG-6 gravity survey file. Specify the name of the output line :param db: Database :param data: Gravity data file :param line: line name - can be empty in which case it will create a line name from the input file name :type db: GXDB :type data: str :type line: str .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Has its own format - space-delimited columns of data """ gxapi_cy.WrapDU._load_gravity_cg6_to_line(GXContext._get_tls_geo(), db, data.encode(), line.encode()) @classmethod def load_gravity_cg6_ex(cls, db, data, line, date_format): """ Load a CG-6 gravity survey file. Specify the name of the output line :param db: Database :param data: Gravity data file :param line: line name - can be empty in which case it will create a line name from the input file name :param date_format: :ref:`DATE_FORMAT` :type db: GXDB :type data: str :type line: str :type date_format: int .. versionadded:: 2022.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Has its own format - space-delimited columns of data """ gxapi_cy.WrapDU._load_gravity_cg6_ex(GXContext._get_tls_geo(), db, data.encode(), line.encode(), date_format) @classmethod def load_ltb(cls, db, line, ltb, mode): """ Load `GXLTB <geosoft.gxapi.GXLTB>` into a database line. :param db: Database :param line: Line :param ltb: Table :param mode: :ref:`DU_LOADLTB` :type db: GXDB :type line: int :type ltb: GXLTB :type mode: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** A new channel will be created for all `GXLTB <geosoft.gxapi.GXLTB>` fields that do not already exist. The `GXLTB <geosoft.gxapi.GXLTB>` field type will be double if all entries can be converted to double, otherwise it will be a string type set to the larger of 16 characters or the longest string in the field. For _APPEND, the `GXLTB <geosoft.gxapi.GXLTB>` data is simply added the end of each channel. `re_fid_all_ch <geosoft.gxapi.GXDU.re_fid_all_ch>` can be used to re-fid data to match a specifc channel and there-by case all channels to be the same length before appending data. """ gxapi_cy.WrapDU._load_ltb(GXContext._get_tls_geo(), db, line, ltb, mode) @classmethod def make_fid(cls, db, line, i_ch, o_ch): """ Make a fiducial channel based on an existing channel. :param db: Database object :param line: Line Symbol :param i_ch: Base channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: New fiducial channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type db: GXDB :type line: int :type i_ch: int :type o_ch: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapDU._make_fid(GXContext._get_tls_geo(), db, line, i_ch, o_ch) @classmethod def mask(cls, db, line, i_ch, m_ch): """ Mask dummies in one channel against another. :param db: Database :param line: Line handle :param i_ch: Channel to mask [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param m_ch: Mask channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :type db: GXDB :type line: int :type i_ch: int :type m_ch: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._mask(GXContext._get_tls_geo(), db, line, i_ch, m_ch) @classmethod def math(cls, db, line, exp): """ Apply an expression to the database :param db: Database :param line: Line handle :param exp: Math expression object (`GXEXP <geosoft.gxapi.GXEXP>`) :type db: GXDB :type line: int :type exp: GXEXP .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The MATH_DU method will READWRITE lock channels on the left side of expressions and READONLY lock channels on the right side of expressions. Channels are unlocked before returning. Therefore, channels on the left side of an expression cannot be locked READONLY because the `math <geosoft.gxapi.GXDU.math>` attempt to lock the channel READWRITE will fail. Similarly, channels on the right side of an expression cannot be locked READWRITE because `math <geosoft.gxapi.GXDU.math>`'s attempt to lock the channels READONLY will fail. If this is confusing, just make sure no channels used in the expression are locked before calling `math <geosoft.gxapi.GXDU.math>`. .. seealso:: `GXEXP <geosoft.gxapi.GXEXP>` """ gxapi_cy.WrapDU._math(GXContext._get_tls_geo(), db, line, exp) @classmethod def merge_line(cls, db, i_line, m_line, o_line, mode): """ Merge a line a the fiducial and copies any data past that fiducial into the new line. :param db: Database :param i_line: Input Line1 [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param m_line: Input Line2 [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_line: Output Line [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param mode: :ref:`DU_MERGE` :type db: GXDB :type i_line: int :type m_line: int :type o_line: int :type mode: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._merge_line(GXContext._get_tls_geo(), db, i_line, m_line, o_line, mode) @classmethod def mod_fid_range(cls, db, ln, fid_start, incr, start_index, num, opt): """ Insert/Append/Delete a range of fids. :param db: Database :param ln: Line :param fid_start: Base fid start :param incr: Base fid increment :param start_index: Start index (can be negative) :param num: Number of fids :param opt: :ref:`DU_MODFID` :type db: GXDB :type ln: int :type fid_start: float :type incr: float :type start_index: int :type num: int :type opt: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Channels that do not have the same fid start or fid increment are not processed. Protected channels are modified automatically. """ gxapi_cy.WrapDU._mod_fid_range(GXContext._get_tls_geo(), db, ln, fid_start, incr, start_index, num, opt) @classmethod def move(cls, db, line, i_ch, c_ch, o_ch, mode): """ Move/correct a channel to a control channel. :param db: Database :param line: Line Handle to Apply this to :param i_ch: Input channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param c_ch: Control channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Result channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param mode: :ref:`DU_MOVE` :type db: GXDB :type line: int :type i_ch: int :type c_ch: int :type o_ch: int :type mode: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The input channel is moved to the absolute location of the control channel. """ gxapi_cy.WrapDU._move(GXContext._get_tls_geo(), db, line, i_ch, c_ch, o_ch, mode) @classmethod def nl_filt(cls, db, line, i_ch, o_ch, width, tol): """ This method applies a non-linear filter to the specified line/channel and places the output in the output channel. :param db: Database :param line: Line handle :param i_ch: Channel to filter [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Output filtered channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param width: Filter Width :param tol: Filter Tolerance, 0 for 10% of Std. Dev. :type db: GXDB :type line: int :type i_ch: int :type o_ch: int :type width: int :type tol: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._nl_filt(GXContext._get_tls_geo(), db, line, i_ch, o_ch, width, tol) @classmethod def normal(cls, db, ch, ignore): """ Set fid of all channels to match a specified channel. :param db: Database handle :param ch: Base Channel for normalization. [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param ignore: Ignore write protection on channels? :type db: GXDB :type ch: int :type ignore: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ .. seealso:: `re_fid_all_ch <geosoft.gxapi.GXDU.re_fid_all_ch>` """ gxapi_cy.WrapDU._normal(GXContext._get_tls_geo(), db, ch, ignore) @classmethod def poly_fill(cls, db, line, x_chan, y_chan, r_chan, pply, dummy): """ Fill using a polygon with a value of 1. :param db: Database :param line: Line Handle :param x_chan: X Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_chan: Y Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param r_chan: Channel to fill [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param pply: Polygon Object to use :param dummy: :ref:`DU_FILL` :type db: GXDB :type line: int :type x_chan: int :type y_chan: int :type r_chan: int :type pply: GXPLY :type dummy: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._poly_fill(GXContext._get_tls_geo(), db, line, x_chan, y_chan, r_chan, pply, dummy) @classmethod def poly_mask(cls, db, line, x_chan, y_chan, r_chan, pply, dummy): """ Mask against a polygon. :param db: Database :param line: Line Handle :param x_chan: X Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_chan: Y Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param r_chan: Channel to mask [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param pply: Polygon Object to use :param dummy: :ref:`DU_MASK` :type db: GXDB :type line: int :type x_chan: int :type y_chan: int :type r_chan: int :type pply: GXPLY :type dummy: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._poly_mask(GXContext._get_tls_geo(), db, line, x_chan, y_chan, r_chan, pply, dummy) @classmethod def project_data(cls, db, line, ix_ch, iy_ch, ox_ch, oy_ch, pj): """ Project X,Y channels :param db: Database :param line: Line Handle to project :param ix_ch: X Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param iy_ch: Y Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param ox_ch: X Output Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param oy_ch: Y Output Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param pj: Projection object to Apply :type db: GXDB :type line: int :type ix_ch: int :type iy_ch: int :type ox_ch: int :type oy_ch: int :type pj: GXPJ .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Output channels can be the same as input channels """ gxapi_cy.WrapDU._project_data(GXContext._get_tls_geo(), db, line, ix_ch, iy_ch, ox_ch, oy_ch, pj) @classmethod def project_xyz(cls, db, line, ix_ch, iy_ch, iz_ch, ox_ch, oy_ch, oz_ch, pj): """ Project X,Y,Z channels from one system to another. :param db: Database :param line: Line Handle to project :param ix_ch: X Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param iy_ch: Y Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param iz_ch: Z Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param ox_ch: X Output Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param oy_ch: Y Output Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param oz_ch: Z Output Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param pj: Projection object to Apply :type db: GXDB :type line: int :type ix_ch: int :type iy_ch: int :type iz_ch: int :type ox_ch: int :type oy_ch: int :type oz_ch: int :type pj: GXPJ .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Output channels can be the same as input channels """ gxapi_cy.WrapDU._project_xyz(GXContext._get_tls_geo(), db, line, ix_ch, iy_ch, iz_ch, ox_ch, oy_ch, oz_ch, pj) @classmethod def proj_points(cls, db, line, ix_ch, iy_ch, iz_ch, ox_ch, oy_ch, oz_ch, i_name_chan, i_datum_chan, i_method_chan, i_unit_chan, il_datum_chan, o_name_chan, o_datum_chan, o_method_chan, o_unit_chan, ol_datum_chan, error_chan, force_local_datum): """ Project X,Y(Z) channels with different projections :param db: Database :param line: Line Handle to project :param ix_ch: X Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param iy_ch: Y Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param iz_ch: Z Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] (can be DB_NULL_SYMB) :param ox_ch: X Output Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param oy_ch: Y Output Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param oz_ch: Z Output Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] (can be DB_NULL_SYMB) :param i_name_chan: Input Name Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param i_datum_chan: Input Datum Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param i_method_chan: Input Method Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param i_unit_chan: Input Unit Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param il_datum_chan: Input Local Datum Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_name_chan: Output Name Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_datum_chan: Output Datum Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_method_chan: Output Method Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_unit_chan: Output Unit Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param ol_datum_chan: Output Local Datum Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param error_chan: Error Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param force_local_datum: Force Local Datum Shifts? :type db: GXDB :type line: int :type ix_ch: int :type iy_ch: int :type iz_ch: int :type ox_ch: int :type oy_ch: int :type oz_ch: int :type i_name_chan: int :type i_datum_chan: int :type i_method_chan: int :type i_unit_chan: int :type il_datum_chan: int :type o_name_chan: int :type o_datum_chan: int :type o_method_chan: int :type o_unit_chan: int :type ol_datum_chan: int :type error_chan: int :type force_local_datum: int .. versionadded:: 6.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Output channels can be the same as input channels """ gxapi_cy.WrapDU._proj_points(GXContext._get_tls_geo(), db, line, ix_ch, iy_ch, iz_ch, ox_ch, oy_ch, oz_ch, i_name_chan, i_datum_chan, i_method_chan, i_unit_chan, il_datum_chan, o_name_chan, o_datum_chan, o_method_chan, o_unit_chan, ol_datum_chan, error_chan, force_local_datum) @classmethod def qc_init_separation(cls, db, sep, dir): """ Creates the nearest line channels for line separation QC. :param db: Database :param sep: Nominal Line separation :param dir: Nominal Line direction :type db: GXDB :type sep: float :type dir: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This must be called before QCSeparation_DU. It uses a pager to establish the relative positions of the selected lines, then, for every point determines the closest point in another line to the left and to the right (as determined by looking in the direction of the line.) These distances are stored to two new channels in the database, "Closest_Left" and "Closest_Right" """ gxapi_cy.WrapDU._qc_init_separation(GXContext._get_tls_geo(), db, sep, dir) @classmethod def qc_survey_plan(cls, db, wa, pply, sl_spa, sl_azi, slx, sly, sl_sta, sl_inc, tl_spa, tl_azi, tlx, tly, tl_sta, tl_inc, type, sample_spacing, extend_outside): """ Create a database containing proposed survey plan in a `GXPLY <geosoft.gxapi.GXPLY>` :param db: Database to save proposed survey plan :param wa: `GXWA <geosoft.gxapi.GXWA>` to save survey plan summary :param pply: Boundary `GXPLY <geosoft.gxapi.GXPLY>` :param sl_spa: Survey line spacing :param sl_azi: Survey line azimuth :param slx: Survey line reference X coordinate :param sly: Survey line reference Y coordinate :param sl_sta: Survey line starting number of LINES :param sl_inc: Line number increment for survey line :param tl_spa: Tie line spacing :param tl_azi: Tie line azimuth :param tlx: Tie line reference X coordinate :param tly: Tie line reference Y coordinate :param tl_sta: Tie line starting number of LINES :param tl_inc: Line number increment for Tie line :param type: :ref:`QC_PLAN_TYPE` :param sample_spacing: Sample spacing (spacing between points in lines) :param extend_outside: Spacing to extend lines outside polygon :type db: GXDB :type wa: GXWA :type pply: GXPLY :type sl_spa: float :type sl_azi: float :type slx: float :type sly: float :type sl_sta: int :type sl_inc: int :type tl_spa: float :type tl_azi: float :type tlx: float :type tly: float :type tl_sta: int :type tl_inc: int :type type: int :type sample_spacing: float :type extend_outside: float :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The LINE on which has the reference (X,Y) will have the starting Line number The lines on the right hand side of the reference line (while looking into azimuth of ref. line) have increasing line numbers. The lines on the left hand side have the decreasing line numbers from the starting number. Returns an error code or 0 (if successful) """ ret_val = gxapi_cy.WrapDU._qc_survey_plan(GXContext._get_tls_geo(), db, wa, pply, sl_spa, sl_azi, slx, sly, sl_sta, sl_inc, tl_spa, tl_azi, tlx, tly, tl_sta, tl_inc, type, sample_spacing, extend_outside) return ret_val @classmethod def qc_survey_plan2(cls, db, wa, pply, sl_spa, sl_azi, slx, sly, sl_sta, sl_inc, tl_spa, tl_azi, tlx, tly, tl_sta, tl_inc, type, sample_spacing, extend_outside): """ Same as QCSurveyPlan_DU, but lines split by the polygon increment version numbers and keep the line number the same. :param db: Database to save proposed survey plan :param wa: `GXWA <geosoft.gxapi.GXWA>` to save survey plan summary :param pply: Boundary `GXPLY <geosoft.gxapi.GXPLY>` :param sl_spa: Survey line spacing :param sl_azi: Survey line azimuth :param slx: Survey line reference X coordinate :param sly: Survey line reference Y coordinate :param sl_sta: Survey line starting number of LINES :param sl_inc: Line number increment for survey line :param tl_spa: Tie line spacing :param tl_azi: Tie line azimuth :param tlx: Tie line reference X coordinate :param tly: Tie line reference Y coordinate :param tl_sta: Tie line starting number of LINES :param tl_inc: Line number increment for Tie line :param type: :ref:`QC_PLAN_TYPE` :param sample_spacing: Sample spacing (spacing between points in lines) :param extend_outside: Spacing to extend lines outside polygon :type db: GXDB :type wa: GXWA :type pply: GXPLY :type sl_spa: float :type sl_azi: float :type slx: float :type sly: float :type sl_sta: int :type sl_inc: int :type tl_spa: float :type tl_azi: float :type tlx: float :type tly: float :type tl_sta: int :type tl_inc: int :type type: int :type sample_spacing: float :type extend_outside: float :rtype: int .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The LINE on which has the reference (X,Y) will have the starting Line number The lines on the right hand side of the reference line (while looking into azimuth of ref. line) have increasing line numbers. The lines on the left hand side have the decreasing line numbers from the starting number. Returns an error code or 0 (if successful) """ ret_val = gxapi_cy.WrapDU._qc_survey_plan2(GXContext._get_tls_geo(), db, wa, pply, sl_spa, sl_azi, slx, sly, sl_sta, sl_inc, tl_spa, tl_azi, tlx, tly, tl_sta, tl_inc, type, sample_spacing, extend_outside) return ret_val @classmethod def direction(cls, db, line, x_ch, y_ch): """ Returns the direction of a line. :param db: Database Object :param line: Line Symbol :param x_ch: X reference channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_ch: Y reference channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :returns: direction in degrees azimuth (clockwise relative the +Y direction). `GS_R8DM <geosoft.gxapi.GS_R8DM>` if the line has no data, or if there is a problem. Problems will register errors. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The direction is calculated from the first and last non-dummy locations in the X and Y reference channels. """ ret_val = gxapi_cy.WrapDU._direction(GXContext._get_tls_geo(), db, line, x_ch, y_ch) return ret_val @classmethod def re_fid(cls, db, line, in_ch, ref_ch, out_ch, mode, start, incr, gap): """ Re-fid a channel based on a reference channel :param db: Database Object :param line: Line Symbol :param in_ch: Original Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] "Y" values :param ref_ch: Reference Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] "X" locations :param out_ch: Output Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param mode: :ref:`DU_REFID` :param start: Start Fid, if `GS_R8DM <geosoft.gxapi.GS_R8DM>`, use ref channel minimum :param incr: Fid increment, if `GS_R8DM <geosoft.gxapi.GS_R8DM>` use nominal spacing of the reference channel. :param gap: Maximum gap to interpolate across :type db: GXDB :type line: int :type in_ch: int :type ref_ch: int :type out_ch: int :type mode: int :type start: float :type incr: float :type gap: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The original channel can be an array channel, in which case the columns (up to the number of columns available in the output) are individually interpolated. If the number of columns in the output channel is more than the input channel, the remaining columns are dummied. This function is fundamentally different in behaviour from `re_fid_ch <geosoft.gxapi.GXDU.re_fid_ch>`. The values in the Reference channel in `re_fid <geosoft.gxapi.GXDU.re_fid>` are the "X" locations corresponding to the "Y" locations in the "Original Channel". Output Channel values are calculated at the new "X" locations specified by the Start Fid and the Fid Increment. """ gxapi_cy.WrapDU._re_fid(GXContext._get_tls_geo(), db, line, in_ch, ref_ch, out_ch, mode, start, incr, gap) @classmethod def re_fid_all_ch(cls, db, line, ref_ch): """ Simple re-fid of all channels based on a reference channel :param db: Database Object :param line: Line Symbol :param ref_ch: Reference Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :type db: GXDB :type line: int :type ref_ch: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Channels can be array channels, in which case the columns are individually re-fidded. .. seealso:: `normal <geosoft.gxapi.GXDU.normal>` """ gxapi_cy.WrapDU._re_fid_all_ch(GXContext._get_tls_geo(), db, line, ref_ch) @classmethod def re_fid_ch(cls, db, line, ref_ch, ch): """ Simple re-fid of a channel based on a reference channel :param db: Database Object :param line: Line Symbol :param ref_ch: Reference Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param ch: Channel to refid [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type db: GXDB :type line: int :type ref_ch: int :type ch: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The original channel can be an array channel, in which case the columns are individually re-fidded. `re_fid_ch <geosoft.gxapi.GXDU.re_fid_ch>` resamples the "Channel to refid" to the "Reference Channel" Fid range and increment. """ gxapi_cy.WrapDU._re_fid_ch(GXContext._get_tls_geo(), db, line, ref_ch, ch) @classmethod def rotate(cls, db, line, in_x_ch, in_y_ch, out_x_ch, out_y_ch, x0, y0, deg): """ Rotate coordinates. :param db: Database :param line: Line symbol :param in_x_ch: Input X channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param in_y_ch: Input Y channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param out_x_ch: Output X channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param out_y_ch: Output Y channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param x0: X point about which to rotate :param y0: Y of point about which to rotate :param deg: Angle in degrees CCW :type db: GXDB :type line: int :type in_x_ch: int :type in_y_ch: int :type out_x_ch: int :type out_y_ch: int :type x0: float :type y0: float :type deg: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._rotate(GXContext._get_tls_geo(), db, line, in_x_ch, in_y_ch, out_x_ch, out_y_ch, x0, y0, deg) @classmethod def sample_gd(cls, db, line, ix_ch, iy_ch, o_ch, img): """ Sample a `GXGD <geosoft.gxapi.GXGD>` at a specified X and Y. :param db: Database :param line: Line Handle to sample :param ix_ch: X Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param iy_ch: Y Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Z Output Channel sampled from `GXGD <geosoft.gxapi.GXGD>` [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param img: Grid handle :type db: GXDB :type line: int :type ix_ch: int :type iy_ch: int :type o_ch: int :type img: GXGD .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Values in result channel """ gxapi_cy.WrapDU._sample_gd(GXContext._get_tls_geo(), db, line, ix_ch, iy_ch, o_ch, img) @classmethod def sample_img(cls, db, line, ix_ch, iy_ch, o_ch, img): """ Sample a `GXIMG <geosoft.gxapi.GXIMG>` at a specified X and Y. :param db: Database :param line: Line Handle to sample :param ix_ch: X Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param iy_ch: Y Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Z Output Channel sampled from `GXIMG <geosoft.gxapi.GXIMG>` [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param img: `GXIMG <geosoft.gxapi.GXIMG>` handle :type db: GXDB :type line: int :type ix_ch: int :type iy_ch: int :type o_ch: int :type img: GXIMG .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Values in result channel """ gxapi_cy.WrapDU._sample_img(GXContext._get_tls_geo(), db, line, ix_ch, iy_ch, o_ch, img) @classmethod def sample_img_line_lst(cls, db, lst, ix_ch, iy_ch, o_ch, img): """ Sample an `GXIMG <geosoft.gxapi.GXIMG>` at a specified X and Y, for a `GXLST <geosoft.gxapi.GXLST>` of lines. :param db: Database :param lst: `GXLST <geosoft.gxapi.GXLST>` of (Line Name, Line Handle) values to sample :param ix_ch: X Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param iy_ch: Y Input Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Z Output Channel sampled from `GXIMG <geosoft.gxapi.GXIMG>` [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param img: `GXIMG <geosoft.gxapi.GXIMG>` handle :type db: GXDB :type lst: GXLST :type ix_ch: int :type iy_ch: int :type o_ch: int :type img: GXIMG .. versionadded:: 8.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Values in result channel """ gxapi_cy.WrapDU._sample_img_line_lst(GXContext._get_tls_geo(), db, lst, ix_ch, iy_ch, o_ch, img) @classmethod def scan_ado(cls, connect, table, templ): """ Scans an external ADO database and generates a default template. :param connect: Database connection string :param table: Database Table Name :param templ: Template file name to Create :type connect: str :type table: str :type templ: str .. versionadded:: 5.0.8 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** All the channels are listed """ gxapi_cy.WrapDU._scan_ado(GXContext._get_tls_geo(), connect.encode(), table.encode(), templ.encode()) @classmethod def scan_aseg(cls, file, data, flc, templ): """ This method scans an ASEG-GDF file and generates a default template listing all the channels and all the ALIAS lines. :param file: Header file name :param data: Data file name :param flc: Flight Line Channel name :param templ: Template file name to Create :type file: str :type data: str :type flc: str :type templ: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapDU._scan_aseg(GXContext._get_tls_geo(), file.encode(), data.encode(), flc.encode(), templ.encode()) @classmethod def scan_dao(cls, file, type, table, templ): """ Scans an external DAO database and generates a default template. :param file: Database file name :param type: Database Type :param table: Database Table Name :param templ: Template file name to Create :type file: str :type type: str :type table: str :type templ: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** All the channels are listed """ gxapi_cy.WrapDU._scan_dao(GXContext._get_tls_geo(), file.encode(), type.encode(), table.encode(), templ.encode()) @classmethod def scan_pico(cls, data, templ): """ This method scans a picodas file and generates a default template listing all the channels and all the ALIAS lines. :param data: Data file Name :param templ: Template file name to Create :type data: str :type templ: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._scan_pico(GXContext._get_tls_geo(), data.encode(), templ.encode()) @classmethod def sort(cls, db, line, ch, sort): """ Sort the contents of a channel. :param db: Database :param line: Line symbol :param ch: Channel to sort [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param sort: :ref:`DU_SORT` :type db: GXDB :type line: int :type ch: int :type sort: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._sort(GXContext._get_tls_geo(), db, line, ch, sort) @classmethod def sort_index(cls, db, line, ch, in_ch, sort): """ Create an ordered index of the contents of a channel. The order of rows where compared items are the same is preserved. :param db: Database :param line: Line symbol :param ch: Channel to sort [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param in_ch: Output index channel (should be int) [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param sort: :ref:`DU_SORT` :type db: GXDB :type line: int :type ch: int :type in_ch: int :type sort: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._sort_index(GXContext._get_tls_geo(), db, line, ch, in_ch, sort) @classmethod def sort_index2(cls, db, line, ch1, sort1, ch2, sort2, in_ch): """ Create an ordered index from two channels. The order of rows where compared items are the same is preserved. :param db: Database :param line: Line symbol :param ch1: Sort by this channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param sort1: :ref:`DU_SORT` :param ch2: Then by this channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param sort2: :ref:`DU_SORT` :param in_ch: Output index channel (should be int) [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type db: GXDB :type line: int :type ch1: int :type sort1: int :type ch2: int :type sort2: int :type in_ch: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._sort_index2(GXContext._get_tls_geo(), db, line, ch1, sort1, ch2, sort2, in_ch) @classmethod def sort_index_n(cls, db, line, chVV, orderVV, in_ch): """ Create an ordered index from any number of channels. The order of rows where compared items are the same is preserved. :param db: Database :param line: Line symbol :param chVV: VV of channel symbols (INT). Sort by these channels [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param orderVV: VV of sort order values (INT, one for each channel) :ref:`DU_SORT` :param in_ch: Output index channel (should be int) [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :type db: GXDB :type line: int :type chVV: GXVV :type orderVV: GXVV :type in_ch: int .. versionadded:: 9.10 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._sort_index_n(GXContext._get_tls_geo(), db, line, chVV, orderVV, in_ch) @classmethod def split_line(cls, db, i_line, o_line, fid): """ Splits a line a the fiducial and copies any data past that fiducial into the new line. :param db: Database :param i_line: Input Line, will be reduced at fid [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param o_line: Output Line, will take data above fid [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param fid: Fid number of split :type db: GXDB :type i_line: int :type o_line: int :type fid: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._split_line(GXContext._get_tls_geo(), db, i_line, o_line, fid) @classmethod def split_line2(cls, db, i_line, o_line, fid, reset_fi_ds): """ Splits a line a the fiducial and copies any data past that fiducial into the new line. :param db: Database :param i_line: Input Line, will be reduced at fid [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param o_line: Output Line, will take data above fid [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param fid: Fid number of split :param reset_fi_ds: Reset starting fiducials to zero (0: No, 1: Yes) :type db: GXDB :type i_line: int :type o_line: int :type fid: float :type reset_fi_ds: int .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The same as SplitLine, but with an option to reset each line's starting fiducial to zero. """ gxapi_cy.WrapDU._split_line2(GXContext._get_tls_geo(), db, i_line, o_line, fid, reset_fi_ds) @classmethod def split_line_xy(cls, db, line, x_ch, y_ch, dirctn, tolrnc, down_tol, method, first_line, line_inc): """ Break up a line based on tolerance of lateral and horizontal distance, with options for the output line names. :param db: Database :param line: Line to be broken up [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param x_ch: Channel X [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param y_ch: Channel Y [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param dirctn: Line direction, 0-any, 1-X, 2-Y. :param tolrnc: Lateral tolerance, DUMMY for the default (10% of the separation between the first two points. :param down_tol: Downline Tolerance, DUMMY for none :param method: :ref:`DU_SPLITLINE` :param first_line: First line in the sequence, for `DU_SPLITLINE_SEQUENTIAL <geosoft.gxapi.DU_SPLITLINE_SEQUENTIAL>`. On return, the next line in the sequence. :param line_inc: Increment in the line number sequence, for `DU_SPLITLINE_SEQUENTIAL <geosoft.gxapi.DU_SPLITLINE_SEQUENTIAL>` :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :type dirctn: int :type tolrnc: float :type down_tol: float :type method: int :type first_line: int_ref :type line_inc: int .. versionadded:: 8.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The original line will be deleted. """ first_line.value = gxapi_cy.WrapDU._split_line_xy(GXContext._get_tls_geo(), db, line, x_ch, y_ch, dirctn, tolrnc, down_tol, method, first_line.value, line_inc) @classmethod def split_line_xy2(cls, db, line, x_ch, y_ch, dirctn, tolrnc, down_tol, method, first_line, line_inc, reset_fi_ds): """ Break up a line based on tolerance of lateral and horizontal distance, with options for the output line names. :param db: Database :param line: Line to be broken up [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param x_ch: Channel X [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param y_ch: Channel Y [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param dirctn: Line direction, 0-any, 1-X, 2-Y. :param tolrnc: Lateral tolerance, DUMMY for the default (10% of the separation between the first two points. :param down_tol: Downline Tolerance, DUMMY for none :param method: :ref:`DU_SPLITLINE` :param first_line: First line in the sequence, for `DU_SPLITLINE_SEQUENTIAL <geosoft.gxapi.DU_SPLITLINE_SEQUENTIAL>`. On return, the next line in the sequence. :param line_inc: Increment in the line number sequence, for `DU_SPLITLINE_SEQUENTIAL <geosoft.gxapi.DU_SPLITLINE_SEQUENTIAL>` :param reset_fi_ds: Reset starting fiducials to zero (0: No, 1: Yes) :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :type dirctn: int :type tolrnc: float :type down_tol: float :type method: int :type first_line: int_ref :type line_inc: int :type reset_fi_ds: int .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The same as SplitLineXY, but with an option to reset each line's starting fiducial to zero. """ first_line.value = gxapi_cy.WrapDU._split_line_xy2(GXContext._get_tls_geo(), db, line, x_ch, y_ch, dirctn, tolrnc, down_tol, method, first_line.value, line_inc, reset_fi_ds) @classmethod def split_line_xy3(cls, db, line, x_ch, y_ch, dirctn, tolrnc, down_tol, method, first_line, line_inc, retain_line_type, reset_fi_ds): """ Break up a line based on tolerance of lateral and horizontal distance, with options for the output line names. :param db: Database :param line: Line to be broken up [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param x_ch: Channel X [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param y_ch: Channel Y [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param dirctn: Line direction, 0-any, 1-X, 2-Y. :param tolrnc: Lateral tolerance, DUMMY for the default (10% of the separation between the first two points. :param down_tol: Downline Tolerance, DUMMY for none :param method: :ref:`DU_SPLITLINE` :param first_line: First line in the sequence, for `DU_SPLITLINE_SEQUENTIAL <geosoft.gxapi.DU_SPLITLINE_SEQUENTIAL>`. On return, the next line in the sequence. :param line_inc: Increment in the line number sequence, for `DU_SPLITLINE_SEQUENTIAL <geosoft.gxapi.DU_SPLITLINE_SEQUENTIAL>` :param retain_line_type: Maintain line types for `DU_SPLITLINE_SEQUENTIAL <geosoft.gxapi.DU_SPLITLINE_SEQUENTIAL>` (0: No, 1: Yes) :param reset_fi_ds: Reset starting fiducials to zero (0: No, 1: Yes) :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :type dirctn: int :type tolrnc: float :type down_tol: float :type method: int :type first_line: int_ref :type line_inc: int :type retain_line_type: int :type reset_fi_ds: int .. versionadded:: 9.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The same as SplitLineXY2, but with the option to maintain line types when outputting sequentially numbered lines. """ first_line.value = gxapi_cy.WrapDU._split_line_xy3(GXContext._get_tls_geo(), db, line, x_ch, y_ch, dirctn, tolrnc, down_tol, method, first_line.value, line_inc, retain_line_type, reset_fi_ds) @classmethod def split_line_by_direction(cls, db, line, x_ch, y_ch, angular_change, over_a_distance_of, minimum_line_length, break_on_separation_distance, save_discards, method, first_line, line_inc, reset_fi_ds): """ The line is split when the heading (calculated from the current X and Y channels) changes by more than a specified amount over a specified distance. Additional options to discard too-short lines :param db: Database :param line: Line to be broken up [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param x_ch: X Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`]. :param y_ch: Y Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`]. :param angular_change: Maximum angular change allowed (degrees)... :param over_a_distance_of: ...over a distance of :param minimum_line_length: Delete lines shorter than (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param break_on_separation_distance: Break on data XY separation greater than (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param save_discards: Whether to save too-short segments as special lines or to discard them :param method: :ref:`DU_SPLITLINE` ONLY DU_SPLITLINEXY_SEQUENTIAL and DU_SPLITLINEXY_VERSIONS :param first_line: First line in the sequence, for `DU_SPLITLINE_SEQUENTIAL <geosoft.gxapi.DU_SPLITLINE_SEQUENTIAL>`. On return, the next line in the sequence. :param line_inc: Increment in the line number sequence, for `DU_SPLITLINE_SEQUENTIAL <geosoft.gxapi.DU_SPLITLINE_SEQUENTIAL>` :param reset_fi_ds: Reset starting fiducials to zero (0: No, 1: Yes) :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :type angular_change: float :type over_a_distance_of: float :type minimum_line_length: float :type break_on_separation_distance: float :type save_discards: int :type method: int :type first_line: int_ref :type line_inc: int :type reset_fi_ds: int .. versionadded:: 8.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Split a line based on changes in heading. """ first_line.value = gxapi_cy.WrapDU._split_line_by_direction(GXContext._get_tls_geo(), db, line, x_ch, y_ch, angular_change, over_a_distance_of, minimum_line_length, break_on_separation_distance, save_discards, method, first_line.value, line_inc, reset_fi_ds) @classmethod def split_line_by_direction2(cls, db, line, x_ch, y_ch, angular_change, over_a_distance_of, minimum_line_length, break_on_separation_distance, save_discards, method, first_line, line_inc, retain_line_type, reset_fi_ds): """ The same as SplitLineByDirection, but with the option to maintain line types when outputting sequentially numbered lines. :param db: Database :param line: Line to be broken up [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param x_ch: X Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`]. :param y_ch: Y Channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`]. :param angular_change: Maximum angular change allowed (degrees)... :param over_a_distance_of: ...over a distance of :param minimum_line_length: Delete lines shorter than (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param break_on_separation_distance: Break on data XY separation greater than (can be `rDUMMY <geosoft.gxapi.rDUMMY>`) :param save_discards: Whether to save too-short segments as special lines or to discard them :param method: :ref:`DU_SPLITLINE` ONLY DU_SPLITLINEXY_SEQUENTIAL and DU_SPLITLINEXY_VERSIONS :param first_line: First line in the sequence, for `DU_SPLITLINE_SEQUENTIAL <geosoft.gxapi.DU_SPLITLINE_SEQUENTIAL>`. On return, the next line in the sequence. :param line_inc: Increment in the line number sequence, for `DU_SPLITLINE_SEQUENTIAL <geosoft.gxapi.DU_SPLITLINE_SEQUENTIAL>` :param retain_line_type: Maintain line types for `DU_SPLITLINE_SEQUENTIAL <geosoft.gxapi.DU_SPLITLINE_SEQUENTIAL>` (0: No, 1: Yes) :param reset_fi_ds: Reset starting fiducials to zero (0: No, 1: Yes) :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :type angular_change: float :type over_a_distance_of: float :type minimum_line_length: float :type break_on_separation_distance: float :type save_discards: int :type method: int :type first_line: int_ref :type line_inc: int :type retain_line_type: int :type reset_fi_ds: int .. versionadded:: 9.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Split a line based on changes in heading. """ first_line.value = gxapi_cy.WrapDU._split_line_by_direction2(GXContext._get_tls_geo(), db, line, x_ch, y_ch, angular_change, over_a_distance_of, minimum_line_length, break_on_separation_distance, save_discards, method, first_line.value, line_inc, retain_line_type, reset_fi_ds) @classmethod def stat(cls, db, line, i_ch, st): """ Add a data channel to a statistics object. :param db: Database :param line: Line handle :param i_ch: Channel handle [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param st: Statistics handle :type db: GXDB :type line: int :type i_ch: int :type st: GXST .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If the input channel is a `GXVA <geosoft.gxapi.GXVA>` (array) channel, then the columns set using `GXDB.set_va_windows <geosoft.gxapi.GXDB.set_va_windows>` are used in the statistics; all columns are used by default. .. seealso:: `GXST <geosoft.gxapi.GXST>` """ gxapi_cy.WrapDU._stat(GXContext._get_tls_geo(), db, line, i_ch, st) @classmethod def table_line_fid(cls, db, chan, ref, tb, field): """ Place a Line/Fid information into a Channel. :param db: Database :param chan: Output channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param ref: Reference channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param tb: Table to Use :param field: Table field wanted :type db: GXDB :type chan: int :type ref: int :type tb: GXTB :type field: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._table_line_fid(GXContext._get_tls_geo(), db, chan, ref, tb, field) @classmethod def table_selected_lines_fid(cls, db, chan, ref, tb, field): """ Place a Line/Fid information into a Channel for the selected lines in the database. :param db: Database :param chan: Output channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param ref: Reference channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param tb: Table to Use :param field: Table field wanted :type db: GXDB :type chan: int :type ref: int :type tb: GXTB :type field: int .. versionadded:: 9.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._table_selected_lines_fid(GXContext._get_tls_geo(), db, chan, ref, tb, field) @classmethod def time_constant(cls, db, line, resp_chan, time_chan, tau_chan, intercept_chan, fit_chan, log_opt): """ Calculate TEM time constant (Tau) :param db: Database, required :param line: Line Handle, required :param resp_chan: Response channel, required [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param time_chan: Time channel, required [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param tau_chan: Output Time constant (Tau) channel, required [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param intercept_chan: Output Intercept channel, no output if `NULLSYMB <geosoft.gxapi.NULLSYMB>` [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param fit_chan: Output predicted response channel, no output if `NULLSYMB <geosoft.gxapi.NULLSYMB>` [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] Result is based on least square fit from Tau and Intercept :param log_opt: Log option applied to time channel: 0 - linear, 1 - log10 :type db: GXDB :type line: int :type resp_chan: int :type time_chan: int :type tau_chan: int :type intercept_chan: int :type fit_chan: int :type log_opt: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** When DU_TIME_LOG option is used, Time channel will be converted with logarithmic before calculating time constant. Logarthmic conversion is always applied to the response channel. """ gxapi_cy.WrapDU._time_constant(GXContext._get_tls_geo(), db, line, resp_chan, time_chan, tau_chan, intercept_chan, fit_chan, log_opt) @classmethod def trend(cls, db, line, i_ch, o_ch, order): """ Calculates an n'th order trend of a data channel. :param db: Database :param line: Line Handle to Apply this to :param i_ch: Input channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param o_ch: Result channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param order: Trend Order, 0 to 9 :type db: GXDB :type line: int :type i_ch: int :type o_ch: int :type order: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ .. seealso:: `b_spline <geosoft.gxapi.GXDU.b_spline>` """ gxapi_cy.WrapDU._trend(GXContext._get_tls_geo(), db, line, i_ch, o_ch, order) @classmethod def update_intersect_db(cls, db, x_chan, z_chan, int_db): """ Update the Z and DZ values in an intersection database, using the current database. :param db: Flight Database Object :param x_chan: X Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] (for location info) :param z_chan: Z Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param int_db: Intersection database to update :type db: GXDB :type x_chan: int :type z_chan: int :type int_db: GXDB .. versionadded:: 7.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Updates the TZ, TDZ, LZ and LDZ channels at the intersections, using the current flight database. """ gxapi_cy.WrapDU._update_intersect_db(GXContext._get_tls_geo(), db, x_chan, z_chan, int_db) @classmethod def voxel_section(cls, db, line, x_ch, y_ch, vox, grid, cell_x, cell_y, interp): """ Slice a voxel to a grid under a database line. :param db: Database Object :param line: Input Line Symbol [READWRITE] :param x_ch: X Channel (DB_NO_SYMB if LineDir==0) :param y_ch: Y Channel (DB_NO_SYMB if LineDir==0) :param vox: Voxel to slice :param grid: Output grid name :param cell_x: X cell size (horizontal) :param cell_y: Y cell size (vertical) :param interp: Interp: 1 - linear, 0 - nearest :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :type vox: GXVOX :type grid: str :type cell_x: float :type cell_y: float :type interp: int .. versionadded:: 6.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Takes the first and XY locations in a line (using the current X and Y channels) and defines a section grid as a slice through a voxel file. The grid cell sizes can be left as `GS_R8DM <geosoft.gxapi.GS_R8DM>`, in which case an attempt will be made to match the voxel cell size, based on the line azimuth, voxel rotation, etc. If the slice does NOT intersect the voxel, or if there are fewer than 2 valid locations in the line, then no grid file is created, but there is no error. (This is to simplify creating multiple grids from at once, where not all may intersect). """ gxapi_cy.WrapDU._voxel_section(GXContext._get_tls_geo(), db, line, x_ch, y_ch, vox, grid.encode(), cell_x, cell_y, interp) @classmethod def write_wa(cls, db, line, lst, wa): """ Write data to an ASCII file. :param db: Database :param line: Line symbol :param lst: List of channel names to write :param wa: `GXWA <geosoft.gxapi.GXWA>` to write to :type db: GXDB :type line: int :type lst: GXLST :type wa: GXWA .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Channels to be written should be placed in a `GXLST <geosoft.gxapi.GXLST>` object. Channels are written in the order of the list. Only the channel names in the list are used. Data is formated as in the channel definition and channels are separated by a single space character. """ gxapi_cy.WrapDU._write_wa(GXContext._get_tls_geo(), db, line, lst, wa) @classmethod def xyz_line(cls, db, line, x_ch, y_ch, dirctn, tolrnc): """ Break up a line based on tolerance of lateral distance. :param db: Database :param line: Line to be broken up :param x_ch: Channel X :param y_ch: Channel Y :param dirctn: Line direction, 0-any, 1-X, 2-Y. :param tolrnc: Tolerance, DUMMY for the default (10% of the separation between the first two points. :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :type dirctn: int :type tolrnc: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The original line will be deleted. """ gxapi_cy.WrapDU._xyz_line(GXContext._get_tls_geo(), db, line, x_ch, y_ch, dirctn, tolrnc) @classmethod def xyz_line2(cls, db, line, x_ch, y_ch, dirctn, tolrnc, down_tol): """ Break up a line based on tolerance of lateral and horizontal distance. :param db: Database :param line: Line to be broken up [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param x_ch: Channel X [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param y_ch: Channel Y [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param dirctn: Line direction, 0-any, 1-X, 2-Y. :param tolrnc: Tolerance, DUMMY for the default (10% of the separation between the first two points. :param down_tol: Downline Tolerance, DUMMY for none :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :type dirctn: int :type tolrnc: float :type down_tol: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The original line will be deleted. """ gxapi_cy.WrapDU._xyz_line2(GXContext._get_tls_geo(), db, line, x_ch, y_ch, dirctn, tolrnc, down_tol) @classmethod def xyz_line3(cls, db, line, x_ch, y_ch, dirctn, tolrnc, down_tol, reset_fi_ds): """ Break up a line based on tolerance of lateral and horizontal distance. :param db: Database :param line: Line to be broken up [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param x_ch: Channel X [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param y_ch: Channel Y [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param dirctn: Line direction, 0-any, 1-X, 2-Y. :param tolrnc: Tolerance, DUMMY for the default (10% of the separation between the first two points. :param down_tol: Downline Tolerance, DUMMY for none :param reset_fi_ds: Reset starting fiducials to zero (0: No, 1: Yes) :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :type dirctn: int :type tolrnc: float :type down_tol: float :type reset_fi_ds: int .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The same as XyzLine2, but with an option to reset each line's starting fiducial to zero. """ gxapi_cy.WrapDU._xyz_line3(GXContext._get_tls_geo(), db, line, x_ch, y_ch, dirctn, tolrnc, down_tol, reset_fi_ds) @classmethod def z_mask(cls, db, line, chan, z_chan, zmin, zmax): """ Mask dummies in one channel against another(Z) with the range Zmin/Zmax. :param db: Database :param line: Line Handle :param chan: Channel to mask [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param z_chan: Mask Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param zmin: Min value of mask range :param zmax: Max value of mask range :type db: GXDB :type line: int :type chan: int :type z_chan: int :type zmin: float :type zmax: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._z_mask(GXContext._get_tls_geo(), db, line, chan, z_chan, zmin, zmax) @classmethod def range_xy(cls, db, x_min, y_min, x_max, y_max): """ Find the range of X, and Y in the selected lines. :param db: Database :param x_min: Minimum X (returned) :param y_min: Minimum Y (returned) :param x_max: Maximum X (returned) :param y_max: Maximum Y (returned) :type db: GXDB :type x_min: float_ref :type y_min: float_ref :type x_max: float_ref :type y_max: float_ref .. versionadded:: 8.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Returns the range in X and Y of the current X and Y channels. Returned values are dummy if no valid items are found. """ x_min.value, y_min.value, x_max.value, y_max.value = gxapi_cy.WrapDU._range_xy(GXContext._get_tls_geo(), db, x_min.value, y_min.value, x_max.value, y_max.value) @classmethod def range_xyz(cls, db, x_ch, y_ch, z_ch, x_min, y_min, z_min, x_max, y_max, z_max, n_tot): """ Find the range of X, Y and Z in selected lines. :param db: Database :param x_ch: X Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_ch: Y Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param z_ch: Z Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param x_min: Minimum X (returned) :param y_min: Minimum Y (returned) :param z_min: Minimum Z (returned) :param x_max: Maximum X (returned) :param y_max: Maximum Y (returned) :param z_max: Maximum Z (returned) :param n_tot: Number of data values (returned) :type db: GXDB :type x_ch: int :type y_ch: int :type z_ch: int :type x_min: float_ref :type y_min: float_ref :type z_min: float_ref :type x_max: float_ref :type y_max: float_ref :type z_max: float_ref :type n_tot: int_ref .. versionadded:: 8.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The X, Y and Z channels should be normal (not array) channels. Only locations where all values are non-dummy are included in the calculation. If no non-dummy values are found, Dummy values are returned. """ x_min.value, y_min.value, z_min.value, x_max.value, y_max.value, z_max.value, n_tot.value = gxapi_cy.WrapDU._range_xyz(GXContext._get_tls_geo(), db, x_ch, y_ch, z_ch, x_min.value, y_min.value, z_min.value, x_max.value, y_max.value, z_max.value, n_tot.value) @classmethod def range_xyz_data(cls, db, x_ch, y_ch, z_ch, d_ch, x_min, y_min, z_min, d_min, x_max, y_max, z_max, d_max, n_tot): """ Find the range of X, Y, Z and Data values in selected lines. :param db: Database :param x_ch: X Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_ch: Y Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param z_ch: Z Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param d_ch: Data Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param x_min: Minimum X (returned) :param y_min: Minimum Y (returned) :param z_min: Minimum Z (returned) :param d_min: Minimum Data value (returned) :param x_max: Maximum X (returned) :param y_max: Maximum Y (returned) :param z_max: Maximum Z (returned) :param d_max: Maximum Data value (returned) :param n_tot: Number of data values (returned) :type db: GXDB :type x_ch: int :type y_ch: int :type z_ch: int :type d_ch: int :type x_min: float_ref :type y_min: float_ref :type z_min: float_ref :type d_min: float_ref :type x_max: float_ref :type y_max: float_ref :type z_max: float_ref :type d_max: float_ref :type n_tot: int_ref .. versionadded:: 8.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The Z and Data channels may be array channels, but both must have the same number of columns. Only values where all channels are non-dummy (or, for `GXVA <geosoft.gxapi.GXVA>` channels, where the Z or Data value are defined) are included in the calculation. If no non-dummy values are found, Dummy values are returned. This function is optimized for cases where Z and Data are array channels with many columns (e.g. 32 or more columns). """ x_min.value, y_min.value, z_min.value, d_min.value, x_max.value, y_max.value, z_max.value, d_max.value, n_tot.value = gxapi_cy.WrapDU._range_xyz_data(GXContext._get_tls_geo(), db, x_ch, y_ch, z_ch, d_ch, x_min.value, y_min.value, z_min.value, d_min.value, x_max.value, y_max.value, z_max.value, d_max.value, n_tot.value) @classmethod def create_drillhole_parameter_weight_constraint_database(cls, db, ch, reg, database): """ Used for weighting inversion models. :param db: Database (selected lines used) :param ch: Property channel handle [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param reg: Parameters (see notes) :param database: Output database :type db: GXDB :type ch: int :type reg: GXREG :type database: str .. versionadded:: 8.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Control parameters are passed in the `GXREG <geosoft.gxapi.GXREG>` (to allow for future expansion without the need to modify the wrappers). The input drillhole database must contain current X, Y and Z channels. Drillhole data should be equally spaced (or nearly so) down the hole. Weights are calculated on a circle perpendicular to the hole at each point. RADIUS - Maximum radius from drillhole to create weighting points (Default = 100). INCRMENENT - Grid cell size in weighting circle (Default = 10). MINIMUM - the minimum weighting value to apply, at the radius (Default = 0.0001). POWER - Exponential power to use in the weighting function (negative of this is used) (Default = 2). """ gxapi_cy.WrapDU._create_drillhole_parameter_weight_constraint_database(GXContext._get_tls_geo(), db, ch, reg, database.encode()) @classmethod def calculate_draped_survey_altitude(cls, db, line, x_ch, y_ch, img, z_ch, ascent, descent, drape_height, n_hanning, hanning_width, min_curvature): """ Calculate a draped flight path, enforcing maximum descent and ascent rates. :param db: Database :param line: Line [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param x_ch: X Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_ch: Y Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param img: Topography grid :param z_ch: Output draped altitude channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param ascent: Maximum rate of ascent (%) :param descent: Maximum rate of descent (%) :param drape_height: Minimum terrain clearance (drape height) :param n_hanning: Number of times to apply Hanning Filter :param hanning_width: Width of Hanning Filter :param min_curvature: Minimum radius of curvature down slopes and at valley bottoms (`rDUMMY <geosoft.gxapi.rDUMMY>` to disable) :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :type img: GXIMG :type z_ch: int :type ascent: float :type descent: float :type drape_height: float :type n_hanning: int :type hanning_width: float :type min_curvature: float .. versionadded:: 8.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Calculate a draped flight path, enforcing maximum descent and ascent rates. Additional Inputs are the sample distance along the line and a topography grid. """ gxapi_cy.WrapDU._calculate_draped_survey_altitude(GXContext._get_tls_geo(), db, line, x_ch, y_ch, img, z_ch, ascent, descent, drape_height, n_hanning, hanning_width, min_curvature) @classmethod def calculate_draped_survey_altitude2(cls, db, line, x_ch, y_ch, img, dem_ch, z_ch, ascent, descent, drape_height, min_drape_height, n_hanning, hanning_width, min_curvature): """ Calculate a draped flight path, enforcing maximum descent and ascent rates. :param db: Database :param line: Line [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param x_ch: X Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_ch: Y Channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param img: Topography grid :param dem_ch: Output DEM channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] (can be `NULLSYMB <geosoft.gxapi.NULLSYMB>` if not required) :param z_ch: Output draped altitude channel [`DB_LOCK_READWRITE <geosoft.gxapi.DB_LOCK_READWRITE>`] :param ascent: Maximum rate of ascent (%) :param descent: Maximum rate of descent (%) :param drape_height: Nominal terrain clearance (drape height) :param min_drape_height: Minimum terrain clearance (hard minimum drape height) :param n_hanning: Number of times to apply Hanning Filter :param hanning_width: Width of Hanning Filter :param min_curvature: Minimum radius of curvature down slopes and at valley bottoms (`rDUMMY <geosoft.gxapi.rDUMMY>` to disable) :type db: GXDB :type line: int :type x_ch: int :type y_ch: int :type img: GXIMG :type dem_ch: int :type z_ch: int :type ascent: float :type descent: float :type drape_height: float :type min_drape_height: float :type n_hanning: int :type hanning_width: float :type min_curvature: float .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Calculate a draped flight path, enforcing maximum descent and ascent rates. Set both a nominal and minimum drape height. Additional Inputs are the sample distance along the line and a topography grid. """ gxapi_cy.WrapDU._calculate_draped_survey_altitude2(GXContext._get_tls_geo(), db, line, x_ch, y_ch, img, dem_ch, z_ch, ascent, descent, drape_height, min_drape_height, n_hanning, hanning_width, min_curvature) @classmethod def direct_grid_data_to_voxel(cls, db, x_channel, y_channel, z_channel, data_channel, output_voxel_filename, origin_x, origin_y, origin_z, cell_count_x, cell_count_y, cell_count_z, cell_size_x, cell_size_y, cell_size_z, method): """ Create a voxel using direct gridding. :param db: Database :param x_channel: X channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_channel: Y channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param z_channel: Z channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param data_channel: Data channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param output_voxel_filename: Output voxel filename :param origin_x: Voxel origin X :param origin_y: Voxel origin Y :param origin_z: Voxel origin Z :param cell_count_x: Voxel cell count X :param cell_count_y: Voxel cell count Y :param cell_count_z: Voxel cell count Z :param cell_size_x: Voxel cell size X :param cell_size_y: Voxel cell size Y :param cell_size_z: Voxel cell size Z :param method: :ref:`DU_DIRECTGRID_METHOD` :type db: GXDB :type x_channel: int :type y_channel: int :type z_channel: int :type data_channel: int :type output_voxel_filename: str :type origin_x: float :type origin_y: float :type origin_z: float :type cell_count_x: int :type cell_count_y: int :type cell_count_z: int :type cell_size_x: float :type cell_size_y: float :type cell_size_z: float :type method: int .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The Z and Data channels may be array channels. If they are, the array sizes must match. """ gxapi_cy.WrapDU._direct_grid_data_to_voxel(GXContext._get_tls_geo(), db, x_channel, y_channel, z_channel, data_channel, output_voxel_filename.encode(), origin_x, origin_y, origin_z, cell_count_x, cell_count_y, cell_count_z, cell_size_x, cell_size_y, cell_size_z, method) @classmethod def direct_grid_item_counts_to_voxel(cls, db, x_channel, y_channel, z_channel, data_channel, output_voxel_filename, origin_x, origin_y, origin_z, cell_count_x, cell_count_y, cell_count_z, cell_size_x, cell_size_y, cell_size_z, pb_replace_zeroes_with_dummy): """ Create a voxel using direct gridding containing the number of data points in each cell. :param db: Database :param x_channel: X channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param y_channel: Y channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param z_channel: Z channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param data_channel: Data channel [`DB_LOCK_READONLY <geosoft.gxapi.DB_LOCK_READONLY>`] :param output_voxel_filename: Output voxel filename :param origin_x: Voxel origin X :param origin_y: Voxel origin Y :param origin_z: Voxel origin Z :param cell_count_x: Voxel cell count X :param cell_count_y: Voxel cell count Y :param cell_count_z: Voxel cell count Z :param cell_size_x: Voxel cell size X :param cell_size_y: Voxel cell size Y :param cell_size_z: Voxel cell size Z :param pb_replace_zeroes_with_dummy: Replace zero values in output with DUMMY? :type db: GXDB :type x_channel: int :type y_channel: int :type z_channel: int :type data_channel: int :type output_voxel_filename: str :type origin_x: float :type origin_y: float :type origin_z: float :type cell_count_x: int :type cell_count_y: int :type cell_count_z: int :type cell_size_x: float :type cell_size_y: float :type cell_size_z: float :type pb_replace_zeroes_with_dummy: bool .. versionadded:: 8.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The Z and Data channels may be array channels. If they are, the array sizes must match. """ gxapi_cy.WrapDU._direct_grid_item_counts_to_voxel(GXContext._get_tls_geo(), db, x_channel, y_channel, z_channel, data_channel, output_voxel_filename.encode(), origin_x, origin_y, origin_z, cell_count_x, cell_count_y, cell_count_z, cell_size_x, cell_size_y, cell_size_z, pb_replace_zeroes_with_dummy) # EM Tau Calculation @classmethod def em_tau_automatic(cls, hVVobs, hVVtime, hVVnoise, lWindow, dTau0, dA0, dItrTol, lItrMax, plItr, pdTau, pdA, pdMisfit, iError, hVVcalcVV): """ Automatic fitting EM Tau :param hVVobs: Observed EM [READONLY] :param hVVtime: Time [READONLY] :param hVVnoise: Noise [READONLY] :param lWindow: Time window [READONLY] :param dTau0: Starting Tau [READONLY] :param dA0: Starting coeff. A [READONLY] :param dItrTol: Iterarion tolerance [READONLY] :param lItrMax: Maximum iteration [READONLY] :param plItr: Number of iterations :param pdTau: Calculated Tau :param pdA: Calculated coeff. A :param pdMisfit: Calculated misfit :param iError: Error message code 0 (No error), 1 (Insufficient points above noise threshold) 2 (No convergence in 30 svdcmp iterations) :param hVVcalcVV: Calculated EM :type hVVobs: GXVV :type hVVtime: GXVV :type hVVnoise: GXVV :type lWindow: int :type dTau0: float :type dA0: float :type dItrTol: float :type lItrMax: int :type plItr: int_ref :type pdTau: float_ref :type pdA: float_ref :type pdMisfit: float_ref :type iError: int_ref :type hVVcalcVV: GXVV :returns: 0 - OK 1 - if error :rtype: int .. versionadded:: 9.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val, plItr.value, pdTau.value, pdA.value, pdMisfit.value, iError.value = gxapi_cy.WrapDU._em_tau_automatic(GXContext._get_tls_geo(), hVVobs, hVVtime, hVVnoise, lWindow, dTau0, dA0, dItrTol, lItrMax, plItr.value, pdTau.value, pdA.value, pdMisfit.value, iError.value, hVVcalcVV) return ret_val @classmethod def em_tau_calc(cls, hVVobs, hVVtime, dTau0, dA0, dItrTol, lItrMax, plItr, pdTau, pdA, pdMisfit, iError, hVVcalcVV): """ Fitting f(t) = A * e^(-t/Tau) = e^s0 * e^(-s1*t), where s0=lnA, s1=1/Tau :param hVVobs: Observed EM [READONLY] :param hVVtime: Time [READONLY] :param dTau0: Starting Tau [READONLY] :param dA0: Starting coeff. A [READONLY] :param dItrTol: Iterarion tolerance [READONLY] :param lItrMax: Maximum iteration [READONLY] :param plItr: Number of iterations :param pdTau: Calculated Tau :param pdA: Calculated coeff. A :param pdMisfit: Calculated misfit :param iError: Error message code 0 (No error), 1 (Insufficient points above noise threshold) 2 (No convergence in 30 svdcmp iterations) :param hVVcalcVV: Calculated EM :type hVVobs: GXVV :type hVVtime: GXVV :type dTau0: float :type dA0: float :type dItrTol: float :type lItrMax: int :type plItr: int_ref :type pdTau: float_ref :type pdA: float_ref :type pdMisfit: float_ref :type iError: int_ref :type hVVcalcVV: GXVV :returns: 0 - OK 1 - if error :rtype: int .. versionadded:: 9.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val, plItr.value, pdTau.value, pdA.value, pdMisfit.value, iError.value = gxapi_cy.WrapDU._em_tau_calc(GXContext._get_tls_geo(), hVVobs, hVVtime, dTau0, dA0, dItrTol, lItrMax, plItr.value, pdTau.value, pdA.value, pdMisfit.value, iError.value, hVVcalcVV) return ret_val @classmethod def em_tau_late_time(cls, hVVobs, hVVtime, hVVnoise, lWindow, TauProcess, dMaxTau, dTau0, dA0, dItrTol, lItrMax, plItr, pdTau, pdA, pdMisfit, iError, hVVcalcVV): """ Automatic fitting EM Tau :param hVVobs: Observed EM [READONLY] :param hVVtime: Time [READONLY] :param hVVnoise: Noise [READONLY] :param lWindow: Time window [READONLY] :param TauProcess: 1-TauProcess: 2-MoveWindow [READONLY] :param dMaxTau: Max Tau [READONLY] :param dTau0: Starting Tau [READONLY] :param dA0: Starting coeff. A [READONLY] :param dItrTol: Iterarion tolerance [READONLY] :param lItrMax: Maximum iteration [READONLY] :param plItr: Number of iterations :param pdTau: Calculated Tau :param pdA: Calculated coeff. A :param pdMisfit: Calculated misfit :param iError: Error message code 0 (No error), 1 (Insufficient points above noise threshold) 2 (No convergence in 30 svdcmp iterations) :param hVVcalcVV: Calculated EM :type hVVobs: GXVV :type hVVtime: GXVV :type hVVnoise: GXVV :type lWindow: int :type TauProcess: int :type dMaxTau: float :type dTau0: float :type dA0: float :type dItrTol: float :type lItrMax: int :type plItr: int_ref :type pdTau: float_ref :type pdA: float_ref :type pdMisfit: float_ref :type iError: int_ref :type hVVcalcVV: GXVV :returns: 0 - OK 1 - if error :rtype: int .. versionadded:: 9.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val, plItr.value, pdTau.value, pdA.value, pdMisfit.value, iError.value = gxapi_cy.WrapDU._em_tau_late_time(GXContext._get_tls_geo(), hVVobs, hVVtime, hVVnoise, lWindow, TauProcess, dMaxTau, dTau0, dA0, dItrTol, lItrMax, plItr.value, pdTau.value, pdA.value, pdMisfit.value, iError.value, hVVcalcVV) return ret_val @classmethod def em_tau_manual(cls, hVVobs, hVVtime, hVVnoise, dMinTime, dMaxTime, dTau0, dA0, dItrTol, lItrMax, plItr, pdTau, pdA, pdMisfit, iError, hVVcalcVV): """ Automatic fitting EM Tau :param hVVobs: Observed EM [READONLY] :param hVVtime: Time [READONLY] :param hVVnoise: Noise [READONLY] :param dMinTime: Minimum time [READONLY] :param dMaxTime: Maximum time [READONLY] :param dTau0: Starting Tau [READONLY] :param dA0: Starting coeff. A [READONLY] :param dItrTol: Iterarion tolerance [READONLY] :param lItrMax: Maximum iteration [READONLY] :param plItr: Number of iterations :param pdTau: Calculated Tau :param pdA: Calculated coeff. A :param pdMisfit: Calculated misfit :param iError: Error message code 0 (No error), 1 (Insufficient points above noise threshold) 2 (No convergence in 30 svdcmp iterations) :param hVVcalcVV: Calculated EM :type hVVobs: GXVV :type hVVtime: GXVV :type hVVnoise: GXVV :type dMinTime: float :type dMaxTime: float :type dTau0: float :type dA0: float :type dItrTol: float :type lItrMax: int :type plItr: int_ref :type pdTau: float_ref :type pdA: float_ref :type pdMisfit: float_ref :type iError: int_ref :type hVVcalcVV: GXVV :returns: 0 - OK 1 - if error :rtype: int .. versionadded:: 9.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val, plItr.value, pdTau.value, pdA.value, pdMisfit.value, iError.value = gxapi_cy.WrapDU._em_tau_manual(GXContext._get_tls_geo(), hVVobs, hVVtime, hVVnoise, dMinTime, dMaxTime, dTau0, dA0, dItrTol, lItrMax, plItr.value, pdTau.value, pdA.value, pdMisfit.value, iError.value, hVVcalcVV) return ret_val @classmethod def em_tau_trend_window(cls, hVVobs, hVVtime, nWindows, hVVtau, hVVfitError): """ Automatic fitting EM Tau :param hVVobs: Observed EM [READONLY] :param hVVtime: Time [READONLY] :param nWindows: Get linear trends of nWindows consecutive points [READONLY] :param hVVtau: Calculated tau values - same number of items as hVVobs :param hVVfitError: rms error of fit :type hVVobs: GXVV :type hVVtime: GXVV :type nWindows: int :type hVVtau: GXVV :type hVVfitError: GXVV .. versionadded:: 9.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapDU._em_tau_trend_window(GXContext._get_tls_geo(), hVVobs, hVVtime, nWindows, hVVtau, hVVfitError) @classmethod def footprint_coverage_static(cls, dat, area, radius, segments, thinning_threshold, covered_area, total_area, result): """ Compute the footprint of a survey :param dat: Input DAT for the dataset :param area: The input area including exclusion regions :param radius: The radius of the instrument :param segments: Number of segments to define a circle (default 30) :param thinning_threshold: The maxium change in circumference allowed to reduce polygon complexity :param covered_area: Computed Covered Area :param total_area: Computed Total Area :param result: Computed coverted polygon and its exclusion regions :type dat: GXDAT :type area: GXPLY :type radius: float :type segments: int :type thinning_threshold: float :type covered_area: float_ref :type total_area: float_ref :type result: GXMPLY .. versionadded:: 9.5 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ covered_area.value, total_area.value = gxapi_cy.WrapDU._footprint_coverage_static(GXContext._get_tls_geo(), dat, area, radius, segments, thinning_threshold, covered_area.value, total_area.value, result) @classmethod def footprint_coverage_dynamic(cls, dat, area, channel_name, segments, thinning_threshold, covered_area, total_area, result): """ Compute the footprint of a survey :param dat: Input DAT for the dataset :param area: The input area including exclusion regions :param channel_name: Channel that defines the radius :param segments: Number of segments to define a circle (default 30) :param thinning_threshold: The maxium change in circumference allowed to reduce polygon complexity :param covered_area: Computed Covered Area :param total_area: Computed Total Area :param result: Computed coverted polygon and its exclusion regions :type dat: GXDAT :type area: GXPLY :type channel_name: str :type segments: int :type thinning_threshold: float :type covered_area: float_ref :type total_area: float_ref :type result: GXMPLY .. versionadded:: 9.5 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ covered_area.value, total_area.value = gxapi_cy.WrapDU._footprint_coverage_dynamic(GXContext._get_tls_geo(), dat, area, channel_name.encode(), segments, thinning_threshold, covered_area.value, total_area.value, result) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXTEST.rst .. _GXTEST: GXTEST class ================================== .. autoclass:: geosoft.gxapi.GXTEST :members: <file_sep>/examples/tutorial/Grids and Images/grid_statistics_numpy.py import numpy as np import geosoft.gxpy.gx as gx import geosoft.gxpy.grid as gxgrid # create context gxc = gx.GXpy() # open the grid with gxgrid.Grid.open('elevation_surfer.grd(SRF;VER=V7)') as grid: # get the data in a numpy array data_values = grid.xyzv()[:, :, 3] # print statistical properties print('minimum: ', np.nanmin(data_values)) print('maximum: ', np.nanmax(data_values)) print('mean: ', np.nanmean(data_values)) print('standard deviation:', np.nanstd(data_values)) <file_sep>/geosoft/gxapi/GXKML.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXKML(gxapi_cy.WrapKML): """ GXKML class. `GXKML <geosoft.gxapi.GXKML>` functions provide an interface KML (Keyhole markup language) files. **Note:** None. """ def __init__(self, handle=0): super(GXKML, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXKML <geosoft.gxapi.GXKML>` :returns: A null `GXKML <geosoft.gxapi.GXKML>` :rtype: GXKML """ return GXKML() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def import_3d_polygon(cls, mview, name, vv_vx, vv_vy, vv_vz, color, extruded): """ Imports a KML 3D polygon into a provided view. :param mview: `GXMVIEW <geosoft.gxapi.GXMVIEW>` object - the (3d) view to import the polygon into. :param name: The name of the resulting polygon group. :param vv_vx: X Vertex Components - VV of GS_REAL :param vv_vy: Y Vertex Components - VV of GS_REAL :param vv_vz: Z Vertex Components - VV of GS_REAL :param color: The colour of the resulting surface - COL_ANY. :param extruded: Extrude the polygon to the base - BOOL. :type mview: GXMVIEW :type name: str :type vv_vx: GXVV :type vv_vy: GXVV :type vv_vz: GXVV :type color: int :type extruded: bool :rtype: int .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Requirements -- The MVIEW must be 3D and valid (see `GXMVIEW.is_view_3d <geosoft.gxapi.GXMVIEW.is_view_3d>`);- The group name must not be null/empty;- The three VV for vector components must contain vertices, and be of equal length. """ ret_val = gxapi_cy.WrapKML._import_3d_polygon(GXContext._get_tls_geo(), mview, name.encode(), vv_vx, vv_vy, vv_vz, color, extruded) return ret_val @classmethod def import_3d_line_path(cls, mview, name, vv_vx, vv_vy, vv_vz, color, extruded): """ Imports a KML 3D LinePath into a provided view. :param mview: `GXMVIEW <geosoft.gxapi.GXMVIEW>` object - the (3d) view to import the LinePath into. :param name: The name of the resulting LinePath group. :param vv_vx: X Vertex Components - VV of GS_REAL :param vv_vy: Y Vertex Components - VV of GS_REAL :param vv_vz: Z Vertex Components - VV of GS_REAL :param color: The colour of the resulting surface - COL_ANY. :param extruded: Extrude the LinePath to the base - BOOL. :type mview: GXMVIEW :type name: str :type vv_vx: GXVV :type vv_vy: GXVV :type vv_vz: GXVV :type color: int :type extruded: bool :rtype: int .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Requirements -- The MVIEW must be 3D and valid (see `GXMVIEW.is_view_3d <geosoft.gxapi.GXMVIEW.is_view_3d>`);- The group name must not be null/empty;- The three VV for vector components must contain vertices, and be of equal length. """ ret_val = gxapi_cy.WrapKML._import_3d_line_path(GXContext._get_tls_geo(), mview, name.encode(), vv_vx, vv_vy, vv_vz, color, extruded) return ret_val ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/tests/_test_localhost_microservice.py import unittest import os import numpy as np from requests import get as http_get, post as http_post, exceptions as http_exceptions import json import geosoft import geosoft.gxpy.gx as gx import geosoft.gxpy.utility as gxu import geosoft.gxpy.system as gsys import geosoft.gxpy.coordinate_system as gxcs import geosoft.gxpy.dap_client as gxdap import geosoft.gxpy.geometry as gxgeo import geosoft.gxpy.grid as gxgrd from base import GXPYTest class Test(GXPYTest): @classmethod def setUpClass(cls): cls.setUpGXPYTest() @classmethod def tearDownClass(cls): cls.tearDownGXPYTest() def test_post(self): self.start() doit = {'doit': 'read a Flask tutorial', 'stuff': 'some stuff'} response = http_post('http://localhost:5000/dothis', data=json.dumps(doit), headers={'Content-Type': 'application/json', 'Accept': 'application/json'}) if (response.ok): data = json.loads(response.content.decode('utf-8')) print(data) else: response.raise_for_status() def test_makiserver_get(self): self.start() message = {'doit': 'read a book'} pass ############################################################################################### if __name__ == '__main__': gxc = gx.GXpy() print(gxc.gid) unittest.main() <file_sep>/geosoft/gxapi/GXACQUIRE.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXACQUIRE(gxapi_cy.WrapACQUIRE): """ GXACQUIRE class. This class is used to import acQuire data. It uses the public acQuire API. """ def __init__(self, handle=0): super(GXACQUIRE, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXACQUIRE <geosoft.gxapi.GXACQUIRE>` :returns: A null `GXACQUIRE <geosoft.gxapi.GXACQUIRE>` :rtype: GXACQUIRE """ return GXACQUIRE() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls): """ Create an acQuire object :returns: acQuire Object :rtype: GXACQUIRE .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapACQUIRE._create(GXContext._get_tls_geo()) return GXACQUIRE(ret_val) def delete_empty_chan(self, db): """ Delete empty channels :param db: Database :type db: GXDB .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._delete_empty_chan(db) def import_hole(self, proj, dir, para, geo_vv, delete_existing, convert): """ Import Drillhole data acQuire database into a GDB :param proj: Project name :param dir: Project directory :param para: Parameter File :param geo_vv: List of geology name database :param delete_existing: 0: Write to existing databases (overwrite holes), 1: Delete existing databases. :param convert: Convert Negatives (0,1) :type proj: str :type dir: str :type para: str :type geo_vv: GXVV :type delete_existing: int :type convert: int :returns: 0 - Ok 1 - Error (Will not stop GX) :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Point data and polygon data are saved into Dnnn lines in GDB, nnn representing incremental number starting from 0 """ ret_val = self._import_hole(proj.encode(), dir.encode(), para.encode(), geo_vv, delete_existing, convert) return ret_val def import_point(self, db, para, convert): """ Import Point Sample data acQuire database into a GDB :param db: Geosoft GDB :param para: Parameter File :param convert: Convert Negatives (0,1) :type db: GXDB :type para: str :type convert: int :returns: 0 - Ok 1 - Error (Will not stop GX) :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Data existing in the receiving GDB file will be over-written. Point data and polygon data are saved into Dnnn lines in GDB, nnn representing incremental number starting from 0 """ ret_val = self._import_point(db, para.encode(), convert) return ret_val def selection_tool(self, selection_file, mode): """ Run the acQuire Selection Tool. :param selection_file: Selection File Name :param mode: :ref:`ACQUIRE_SEL` :type selection_file: str :type mode: int :returns: 0 - Ok 1 - if user cancels :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The selection file will be loaded (if present) and then the user can make selections then the selections are saved back in the selection file. """ ret_val = self._selection_tool(selection_file.encode(), mode) return ret_val def selection_tool_force_grid_selection(self, selection_file, mode): """ Run the acQuire Selection Tool, but force selection of destination grid. :param selection_file: Selection File Name :param mode: :ref:`ACQUIRE_SEL` :type selection_file: str :type mode: int :returns: 0 - Ok 1 - if user cancels :rtype: int .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The selection file will be loaded (if present) and then the user can make selections then the selections are saved back in the selection file. """ ret_val = self._selection_tool_force_grid_selection(selection_file.encode(), mode) return ret_val def get_selection_info(self, selection_file, mode, destination_grid): """ Get some information from existing selection file. :param selection_file: Selection File Name :param mode: :ref:`ACQUIRE_SEL` :param destination_grid: 0 - Destination grid was not selected 1 - Destination grid was selected :type selection_file: str :type mode: int_ref :type destination_grid: int_ref .. versionadded:: 9.6 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ mode.value, destination_grid.value = self._get_selection_info(selection_file.encode(), mode.value, destination_grid.value) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/tests/test_agg.py import unittest import os import geosoft.gxpy.system as gsys import geosoft.gxpy.agg as gxagg import geosoft.gxpy.grid as gxgrid import geosoft.gxpy.utility as gxu import geosoft.gxpy.map as gxmap import geosoft.gxpy.geometry as gxgm import geosoft from base import GXPYTest class Test(GXPYTest): @classmethod def setUpClass(cls): cls.setUpGXPYTest(res_stack=4) cls.folder, files = gsys.unzip(os.path.join(os.path.dirname(cls._test_case_py), 'testgrids.zip'), folder=cls._gx.temp_folder()) cls.g1f = os.path.join(cls.folder, 'test_grid_1.grd') cls.g2f = os.path.join(cls.folder, 'test_grid_2.grd') cls.g3f = os.path.join(cls.folder, 'test_agg_utm.grd') cls.folder, files = gsys.unzip(os.path.join(os.path.dirname(cls._test_case_py), 'section_grids.zip'), folder=cls._gx.temp_folder()) cls.g4f = os.path.join(cls.folder, 'X_Slices.grd') def test_version(self): self.start() self.assertEqual(gxagg.__version__, geosoft.__version__) def test_agg(self): self.start() with gxagg.Aggregate_image.new() as agg: self.assertEqual(str(agg), '') self.assertEqual(agg.layer_count, 0) with gxagg.Aggregate_image.new(self.g3f) as agg: self.assertEqual(str(agg), 'test_agg_utm') self.assertEqual(agg.layer_count, 1) with gxagg.Aggregate_image.new(self.g3f, shade=True, color_map='cycle') as agg: self.assertEqual(str(agg), 'test_agg_utm') self.assertEqual(agg.layer_count, 2) agg.add_layer(self.g2f) self.assertEqual(str(agg), 'test_agg_utm, test_grid_2') self.assertEqual(agg.layer_count, 3) agg.add_layer(self.g1f, shade=True, color_map='hotcycle') self.assertEqual(str(agg), 'test_agg_utm, test_grid_2, test_grid_1') self.assertEqual(agg.layer_count, 5) self.assertEqual(len(agg.layer_file_names), 5) cmap = agg.layer_color_map() self.assertEqual(cmap.length, 64) cmap = agg.layer_color_map(4) self.assertEqual(cmap.length, 25) self.assertRaises(gxagg.AggregateException, agg.layer_color_map, 5) cmap = agg.layer_color_map(layer=agg.layer_file_names[3]) self.assertEqual(cmap.length, 63) cmap = agg.layer_color_map(agg.layer_file_names[1]) self.assertEqual(cmap.length, 25) def test_open(self): self.start() with gxagg.Aggregate_image.new(self.g3f) as agg: self.assertEqual(str(agg), 'test_agg_utm') self.assertEqual(agg.layer_count, 1) with gxagg.Aggregate_image.open(agg.gxagg) as open_agg: self.assertEqual(agg.name, open_agg.name) self.assertEqual(open_agg.layer_count, 1) def test_settings(self): self.start() with gxagg.Aggregate_image.new(self.g3f, shade=True) as agg: self.assertEqual(agg.layer_count, 2) self.assertEqual(agg.brightness, 0.0) agg.brightness = -0.5 self.assertEqual(agg.brightness, -0.5) def test_figure_1(self): self.start() gxgrid.Grid.open(self.g3f, mode=gxgrid.FILE_READWRITE).unit_of_measure = 'nT' with gxagg.Aggregate_image.new(self.g3f, shade=True, color_map='elevation.tbl', contour=20) as agg: self.crc_map(agg.figure_map(title='image test').file_name) def test_figure_2(self): self.start() gxgrid.Grid.open(self.g3f, mode=gxgrid.FILE_READWRITE).unit_of_measure = 'nT' with gxagg.Aggregate_image.new(self.g3f, shade=True, color_map='elevation.tbl', contour=20) as agg: self.crc_map(agg.figure_map(title='Image with LL Annotations\nsub-title', legend_label='nT', features='all').file_name) def test_image_file(self): self.start() image_file = '' try: with gxagg.Aggregate_image.new(self.g3f, shade=True, color_map='elevation.tbl', contour=20) as agg: image_file = agg.image_file(image_type=gxmap.RASTER_FORMAT_PNG, pix_width=None) with gxgrid.Grid.open(image_file + '(IMG,t=png)') as g: nx, ny, x0, y0, dx, dy, rot = agg.spatial_properties self.assertEqual(g.coordinate_system, agg.coordinate_system) self.assertEqual(g.nx, nx) self.assertEqual(g.ny, ny) self.assertEqual(g.x0, x0) self.assertEqual(g.y0, y0) self.assertAlmostEqual(g.dx, dx) self.assertAlmostEqual(g.dy, dy) self.assertAlmostEqual(g.rot, rot) finally: gxu.delete_file(image_file) try: with gxagg.Aggregate_image.new(self.g3f, shade=True, color_map='elevation.tbl', contour=20) as agg: display_area = gxgm.Point2(((336000, 6160000, 0.0),(338000, 6161500, 0.0))) image_file = agg.image_file(image_type=gxmap.RASTER_FORMAT_PNG, pix_width=800, display_area=display_area) with gxgrid.Grid.open(image_file + '(IMG,t=png)') as g: self.assertEqual(g.coordinate_system, agg.coordinate_system) self.assertEqual(g.nx, 800) self.assertEqual(g.ny, 600) self.assertEqual(g.extent_xy, display_area.extent_xy) self.assertEqual(g.extent_minimum_xy, display_area.extent_minimum_xy) self.assertEqual(g.extent_maximum_xy, display_area.extent_maximum_xy) finally: gxu.delete_file(image_file) try: with gxagg.Aggregate_image.new(self.g3f, shade=True, color_map='elevation.tbl', contour=20) as agg: display_area = gxgm.Point2(agg.extent, coordinate_system='AGD66 / AMG zone 53') image_file = agg.image_file(image_type=gxmap.RASTER_FORMAT_PNG, pix_width=800, display_area=display_area) with gxgrid.Grid.open(image_file + '(IMG,t=png)') as g: self.assertEqual(g.coordinate_system, display_area.coordinate_system) self.assertEqual(g.nx, 800) self.assertEqual(g.ny, 602) finally: gxu.delete_file(image_file) def test_slice_agg(self): self.start() with gxagg.Aggregate_image.new(self.g4f) as agg: image_file = agg.image_file(image_type=gxmap.RASTER_FORMAT_PNG, pix_width=None) with gxgrid.Grid.open(image_file + '(IMG;T=png)') as img: self.assertEqual(img.nx, 57) self.assertEqual(img.ny, 13) self.assertTrue(img.coordinate_system.is_oriented) self.assertEqual(img.dimension, (28499.968724331295, 42.222175888717175, 6500.0)) if __name__ == '__main__': unittest.main() <file_sep>/docs/GXTIN.rst .. _GXTIN: GXTIN class ================================== .. autoclass:: geosoft.gxapi.GXTIN :members: <file_sep>/geosoft/gxpy/segy_reader.py """ Read SEG-Y files into geosoft databases, grids and voxels. :Classes: :class:`SegyReader` Reads SEG-Y files and creates Geosoft grids or voxels .. versionadded:: 9.9.1 """ import geosoft import geosoft.gxapi as gxapi from . import coordinate_system as coordinate_system from . import vv as gxvv __version__ = geosoft.__version__ from enum import Enum import json class Endianess(Enum): LITTLE_ENDIAN = True BIG_ENDIAN = False class DataType(Enum): IBM_FLOAT4 = 'IBM_float4' IEEE_FLOAT4 = 'IEEE_float4' INT32 = 'int32' INT16 = 'int16' INT8 = 'int8' @classmethod def from_str(cls, str): for member in list(cls): if member.value == str: return member raise ValueError("Not a valid DataType name") class InlineCrosslineSanity(Enum): OK = 0 MAYBE_SWAPPED = 1 ONLY_ONE_LINE = 2 UNKNOWN_PROBLEM = 3 class ZType(Enum): DEPTH = "DEPTH" TIME = "TIME" class FieldMapping(Enum): Inline = "INLINE" Crossline = "CROSSLINE" X = "X" Y = "Y" class TraceConfigField: """ TODO: Not really finished. After the SEG-Y is scanned, `SegyReader.field_configuration.fields` will contain a list of all the trace header fields that contain nonzero values. The EXPORT_TO_GDB field is used to indicate if the data from this field should be written to the GDB on export. The MAPPING field is used to indicate which fields should be interpreted as inline, crossline, x and y. For 3D SEG-Y files, exactly one field should be set to each of these values. For 2D files, only X and Y apply. The rest of the fields are read-only. I've defined the field names for all of them in __INIT__ but some of them are still missing accessors. """ def __init__(self, ltb, index): self.index = index self.ltb = ltb self._export_to_gdb_field = ltb.find_field("EXPORT_TO_GDB") self._offset_field = ltb.find_field("OFFSET") self._datatype_field = ltb.find_field("FORMAT") self._scale_by_field_field = ltb.find_field("SCALE_BY") self._count_field = ltb.find_field("COUNT") self._min_field = ltb.find_field("MIN") self._max_field = ltb.find_field("MAX") self._scaled_min_field = ltb.find_field("SCALED_MIN") self._scaled_max_field = ltb.find_field("SCALED_MAX") self._stride_field = ltb.find_field("STRIDE") self._mapping_field = ltb.find_field("MAPPING") @property def name(self): ref = gxapi.str_ref() self.ltb.get_english_string(self.index, 0, ref) return ref.value @property def export_to_gdb(self): ref = gxapi.int_ref() self.ltb.get_int(self.index, self._export_to_gdb_field, ref) return bool(ref.value) @export_to_gdb.setter def export_to_gdb(self, value): self.ltb.set_int(self.index, self._export_to_gdb_field, value) @property def min(self): ref = gxapi.int_ref() self.ltb.get_int(self.index, self._max_field, ref) return ref.value @property def max(self): ref = gxapi.int_ref() self.ltb.get_int(self.index, self._min_field, ref) return ref.value @property def mapping(self): ref = gxapi.str_ref() self.ltb.get_english_string(self.index, self._mapping_field, ref) for member in list(FieldMapping): if member.value == ref.value: return member @mapping.setter def mapping(self, mapping): self.ltb.set_string( self.index, self._mapping_field, mapping.value) class TraceFieldConfiguration: def __init__(self, ltb): self._ltb = ltb self.fields = [] for i in range(ltb.records()): self.fields.append(TraceConfigField(self._ltb, i)) class SegyReader: """ TODO: The GXSEGYREADER GetTiePoint and SetTiePoint functions are important and don't have wrappers yet. Also, none of the filtering functionality is implemented. Reads a SEG-Y file and can generate Geosoft database, grid or voxel files from it. For many SEG-Y files, the following is all that's needed to use this class:: .. code:: >>> import geosoft.gxpy.segy_reader as gxsegy >>> segy = gxsegy.SegyReader('c:/data/example.segy') >>> segy.export_file(gdb='c:/data/output.gdb') Unfortunately, some SEG-Y files will require some configuration before they can be read properly. TODO: more documentation once the API is finalized. """ def __init__(self, filename, is3d=None, endianess=None): """ Initialize the SegyReader object and do a quick initial scan of the SEG-Y file. :param filename: The name of the SEG-Y file to read :param is3d True is the SEG-Y file is 3D, False if the file is 2D. If this parameter is not specified, the reader will attempt to automatically determine if the file is 2D or 3D. :param endianess: Endianess.LITTLE_ENDIAN or Endianess.BIG_ENDIAN, specifying the endianess (byte order) of the SEG-Y file. Defaults to big- endian which is correct for most SEG-Y files. The new SegyReader object will read the first part of the SEG-Y file and attempt to determine the correct configuration. Most of the methods for this class are for querying and modifying that configuration. """ self._gx_segy_reader = gxapi.GXSEGYREADER.open_file(filename) if endianess is not None: self._gx_segy_reader.set_endianess(endianess) if is3d is not None: self._gx_segy_reader.set_is_3d(is3d) self._trace_data_type_names = [] for i in range(self._gx_segy_reader.get_num_trace_data_types()): name_ref = gxapi.str_ref() self._gx_segy_reader.get_trace_data_type_name(i, name_ref) self._trace_data_type_names.append(name_ref.value) unit_list_ref = gxapi.str_ref() self._gx_segy_reader.get_possible_z_units(unit_list_ref) self._z_units_list = unit_list_ref.value.splitlines(keepends=False) self._scan_completed = False def scan_file(self): self._gx_segy_reader.scan_file() self._scan_completed = True if self._gx_segy_reader.get_is_3d(): return self._check_sane_inline_crossline() @property def georeferencing(self): """A :class:`geosoft.gxpy.coordinate_system.Coordinate_system` object containing the coordinate system for the SEG-Y file and the generated output files. """ return coordinate_system.Coordinate_system( self._gx_segy_reader.get_georeferencing()) @georeferencing.setter def georeferencing(self, georef_object): self._gx_segy_reader.set_georeferencing(georef_object.gxipj) def export_files(self, **kwargs): """ Export the data in the SEG-Y file to the specified Geosoft files. What files are output depend on the keyword arguments:: :param gdb: The filename of the Geosoft database file to export to. If this parameter is not specified, then no database is output. :param voxel: The filename of the Geosoft voxel file to export to. Only valid for 3D SEG-Y files. If this option is not specified, then no voxel file is output. :param slice_prefix: The base path and filename for slice grids. If this argument is specified, then at least one of `inline_slices`, `crossline_slices` or z_slices` must also be specified. :param inline_slices: An array-like containing the inline coordinates for slice grids. If this argument is specified then, for each coordinate in the list a geosoft grid file is generated from the slice of the SEG-Y volume with that inline coordinate. The filename will consist of `slice_prefix` folowed by "_IL???.grd" where ??? is the inline coordinate. Only valid for 3D SEG-Y files. :param crossline_slices: An array-like containing the crossline coordinates for slice grids. If this argument is specified then, for each coordinate in the list a geosoft grid file is generated from the slice of the SEG-Y volume with that crossline coordinate. The filename will consist of `slice_prefix` folowed by "_XL???.grd" where ??? is the crossline coordinate. Only valid for 3D SEG-Y files. :param z_slices: An array-like containing the trace index for slice grids. If this argument is specified then, for coordinate in the list a geosoft grid file is generated by taking the element at that index in the trace data for each trace. The filename will consist of `slice_prefix` folowed by "_Z???.grd" where ??? is the z coordinate. (The parameter values are array indices (0, for the top of the volumne, 1 for the next slice down, etc.) but the number in the file name is the z-coordinate in physical units (e.g. feet, metres, etc,)) Only valid for 3D SEG-Y files. """ def validate_kwargs(kwargs): if 'gdb' in kwargs.keys(): return True if 'voxel' in kwargs.keys(): return True if 'slice_prefix' in kwargs.keys(): if 'inline_slices' in kwargs.keys(): return True if 'crossline_slices' in kwargs.keys(): return True if 'z_slices' in kwargs.keys(): return True return False if not validate_kwargs(kwargs): raise ValueError('No output files configured') if not self._scan_completed: self.scan_file() for key, value in kwargs.items(): if key == 'gdb': self._gx_segy_reader.set_gdb_output_filename(value) elif key == 'voxel': self._gx_segy_reader.set_voxel_output_filename(value) elif key == 'slice_prefix': self._gx_segy_reader.set_slice_output_prefix(value) elif key == 'inline_slices': indices_vv = gxvv.GXvv(value).gxvv self._gx_segy_reader.set_inline_slice_indices(indices_vv) elif key == 'crossline_slices': indices_vv = gxvv.GXvv(value).gxvv self._gx_segy_reader.set_crossline_slice_indices(indices_vv) elif key == 'z_slices': indices_vv = gxvv.GXvv(value).gxvv self._gx_segy_reader.set_z_slice_indices(indices_vv) else: raise TypeError('Unexpected keyword argument: {}'.format(key)) self._gx_segy_reader.export_files() def _check_sane_inline_crossline(self): is_sane_ref = gxapi.bool_ref() possibly_swapped_ref = gxapi.bool_ref() only_one_line_ref = gxapi.bool_ref() self._gx_segy_reader.check_sane_inline_crossline( is_sane_ref, possibly_swapped_ref, only_one_line_ref) if is_sane_ref.value: return InlineCrosslineSanity.OK elif possibly_swapped_ref.value: return InlineCrosslineSanity.MAYBE_SWAPPED elif only_one_line_ref.value: return InlineCrosslineSanity.ONLY_ONE_LINE else: return InlineCrosslineSanity.UNKNOWN_PROBLEM @property def text_header(self): """ The text header from the SEG-Y file as a string. This is human-readable text and often usefull for configuring the reader. """ result = gxapi.str_ref() self._gx_segy_reader.get_text_header(result) return result.value @property def binary_header(self): """ The contents of the SEG-Y binary header, as a dictionary. Usefull for diagnostic purposes. """ ltb = self._gx_segy_reader.get_binary_header() result = dict() for record_num in range(ltb.records()): field_name = gxapi.str_ref() ltb.get_english_string(record_num, 0, field_name) result[field_name.value] = ltb.get_int(record_num, 1) return result def get_trace_header(self, trace_number): ref = gxapi.str_ref() self._gx_segy_reader.get_trace_header_as_json(trace_number, ref) return json.loads(ref.value) def get_trace_data(self, trace_number): vv = self._gx_segy_reader.get_trace_data(trace_number) # It seems like ther should be an easier and more efficient way to # create a gxpy.vv from a gxapi.vv, but if there is I can't find it. return gxvv.GXvv(vv.get_data_array(0, vv.length(), gxapi.GS_DOUBLE)) @property def trace_length(self): """The number of elements in each trace.""" return self._gx_segy_reader.get_trace_length() @trace_length.setter def trace_length(self, value): self._gx_segy_reader.set_trace_length_configuration('none', value) def get_trace_length_from_file_header(self, offset): """ Specify that `trace_length` should be set to the value from the specified byte offset in the binary file header. """ self._gx_segy_reader.set_trace_length_configuration( 'file_header', offset) def get_trace_length_from_trace_header(self, offset): """ Specify that `trace_length` should be set to the value from the specified byte offset in the binary trace header. Even when this setting is used, the SEG-Y reader still requires that all traces be the same length. """ self._gx_segy_reader.set_trace_length_configuration( 'trace_header', offset) @property def trace_data_type(self): """ The data type of the trace data in the SEG-Y file. Specified using one of the constants defined in the `DataType` Enum. """ ref = gxapi.str_ref() self._gx_segy_reader.get_trace_data_type(ref) return DataType.from_str(ref.value) @trace_data_type.setter def trace_data_type(self, data_type): if not isinstance(data_type, DataType): raise ValueError("Expected DataType instance.") self._gx_segy_reader.set_trace_data_type(data_type.value) @property def trace_sample_interval(self): """ The interval between successive values in the trace data. E.g. microseconds or millimetres. """ ref = gxapi.float_ref() self._gx_segy_reader.get_sample_interval(ref) return ref.value @trace_sample_interval.setter def trace_sample_interval(self, value): self._gx_segy_reader.set_sample_interval_configuration( 'none', 0, value) @property def z_units(self): """The units used for `trace_sample_interval`""" ref = gxapi.str_ref() self._gx_segy_reader.get_z_units(ref) return ref.value @z_units.setter def z_units(self, value): if value not in self._z_units_list: raise ValueError("Unsupported Z units") self._gx_segy_reader.set_z_units(value) @property def z_offset(self): """The Z-coordinate of the start (top) of each trace""" ref = gxapi.float_ref() self._gx_segy_reader.get_z_offset(ref) return ref.value @z_offset.setter def z_offset(self, value): self._gx_segy_reader.set_z_offset_configuration('none', 0, value) @property def z_offset_units(self): """The units used for `z_offset`""" ref = gxapi.str_ref() self._gx_segy_reader.get_z_offset_units(ref) return ref.value @z_offset_units.setter def z_offset_units(self, value): if value not in self._z_units_list: raise ValueError("Unsupported Z units") self._gx_segy_reader.set_z_offset_units(value) @property def xy_units(self): """The horizontal units or the SEG_Y""" ref = gxapi.str_ref() self._gx_segy_reader.get_xy_units(ref) return ref.value @property def is_depth_or_time(self): """The "type" of z-units: either ZType.DEPTH or ZType.TIME.""" ref = gxapi.str_ref() self._gx_segy_reader.get_z_type(ref) return ZType[ref.value] @property def trace_range(self): """The depth of the top and bottom of the traces.""" top = self.z_offset bottom_ref = gxapi.float_ref() self._gx_segy_reader.get_last_sample_at(bottom_ref) return (top, bottom_ref.value) @property def field_configuration(self): if not self._scan_completed: self.scan_file() return TraceFieldConfiguration(self._gx_segy_reader.get_field_configuration()) @field_configuration.setter def field_configuration(self, field_configuration): self._gx_segy_reader.set_field_configuration(field_configuration._ltb) def __enter__(self): return self def __exit__(self, _type, _value, _traceback): self.__del__() def __del__(self): self._gx_segy_reader = None <file_sep>/geosoft/gxpy/geometry_utility.py """ Geometry utilities. :Constants: :SPLINE_LINEAR: `geosoft.gxapi.VVU_SPL_LINEAR` :SPLINE_CUBIC: `geosoft.gxapi.VVU_SPL_CUBIC` :SPLINE_AKIMA: `geosoft.gxapi.VVU_SPL_AKIMA` :SPLINE_NEAREST: `geosoft.gxapi.VVU_SPL_NEAREST` .. seealso:: `geosoft.gxpy.geometry` .. note:: Regression tests provide usage examples: `Tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_geometry_utility.py>`_ """ import numpy as np import geosoft import geosoft.gxapi as gxapi from . import vv as gxvv from . import geometry as gxgeo __version__ = geosoft.__version__ SPLINE_LINEAR = gxapi.VVU_SPL_LINEAR SPLINE_CUBIC = gxapi.VVU_SPL_CUBIC SPLINE_AKIMA = gxapi.VVU_SPL_AKIMA SPLINE_NEAREST = gxapi.VVU_SPL_NEAREST def _t(s): return geosoft.gxpy.system.translate(s) class GeometryUtilityException(geosoft.GXRuntimeError): """ Exceptions from `geosoft.gxpy.geometry_utility`. .. versionadded:: 9.4 """ pass def resample(pp, interval, spline=SPLINE_CUBIC, closed=None): """ Return points resampled at a constant separation along the trace of points. :param pp: `geosoft.gxpy.geometry.PPoint` instance, or a 2D numpy array. :param interval: constant sampling interval :param spline: spline method, one of: ============== ======================================================== SPLINE_LINEAR points will be along linear line segments between points SPLINE_CUBIC use a minimum-curvature smooth spline SPLINE_AKIMA us an Akima spline, which will not over-shoot data SPLINE_NEAREST assign the nearest value ============== ======================================================== :param closed: `True` to close the line. Smooth splines will appear continuous at the join if closed. If not specified and the first and last points are the same, `True` is assumed. :return: `geosoft.gxpy.geometry.PPoint` instance, or a 2D numpy array, matching the type passed. .. versionadded:: 9.4 """ if interval <= 0: raise GeometryUtilityException(_t('Interval must be > 0')) if isinstance(pp, gxgeo.PPoint): xyz = pp.xyz else: if not isinstance(pp, np.ndarray): pp = np.array(pp, dtype=np.float64) if pp.ndim == 1: pp = pp.reshape(len(pp), 1) if pp.shape[1] >= 3: xyz = pp[:, :3] else: xyz = np.zeros((len(pp), 3), dtype=np.float64) xyz[:, :pp.shape[1]] = pp if len(xyz) < 2: return pp.copy() # closed? already_closed = tuple(xyz[0]) == tuple(xyz[-1]) last_point = -1 if len(xyz) == 2: if already_closed: return pp.copy() if closed is None: closed = False if spline in (SPLINE_AKIMA, SPLINE_CUBIC): spline = SPLINE_LINEAR if closed: _xyz = np.zeros(len(xyz) + 1) _xyz[:-1] = xyz _xyz[-1] = xyz[0] xyz = _xyz else: if closed is None: closed = already_closed if closed: # add points to ensure continuously smooth on ends if already_closed: closed_xyz = np.empty((len(xyz) + 4, 3)) closed_xyz[-2:] = xyz[1:3] closed_xyz[0:2] = xyz[-3:-1] else: closed_xyz = np.empty((len(xyz) + 5, 3)) closed_xyz[-3:] = xyz[:3] closed_xyz[0:2] = xyz[-2:] closed_xyz[2: 2 + len(xyz)] = xyz xyz = closed_xyz last_point = -3 # get vvs vvx, vvy, vvz = gxvv.vvset_from_np(xyz) # calculate distance vvd = gxvv.GXvv(dtype=np.float64) gxapi.GXVVU.distance_3d(vvx.gxvv, vvy.gxvv, vvz.gxvv, 0., vvd.gxvv) # make up a sampling vector if closed: start = vvd[2][0] d = vvd[last_point][0] - start else: start = 0. d = vvd[-1][0] nd = int(d / interval) + 1 if closed: nd += 1 nps = np.arange(nd, dtype=np.float64) * interval + start vvs = gxvv.GXvv(nps) # spline locations vvxd = gxvv.GXvv(dtype=np.float64) gxapi.GXVVU.spline2(vvd.gxvv, vvx.gxvv, vvs.gxvv, vvxd.gxvv, spline) vvyd = gxvv.GXvv(dtype=np.float64) gxapi.GXVVU.spline2(vvd.gxvv, vvy.gxvv, vvs.gxvv, vvyd.gxvv, spline) vvzd = gxvv.GXvv(dtype=np.float64) gxapi.GXVVU.spline2(vvd.gxvv, vvz.gxvv, vvs.gxvv, vvzd.gxvv, spline) xyz = gxvv.np_from_vvset((vvxd, vvyd, vvzd)) if closed: xyz[-1] = xyz[0] if tuple(xyz[-1]) == tuple(xyz[-2]): xyz = xyz[:-1] if isinstance(pp, gxgeo.PPoint): return gxgeo.PPoint(xyz, coordinate_system=pp.coordinate_system) else: return xyz[:, :pp.shape[1]] <file_sep>/docs/GXDBWRITE.rst .. _GXDBWRITE: GXDBWRITE class ================================== .. autoclass:: geosoft.gxapi.GXDBWRITE :members: <file_sep>/geosoft/gxapi/GXMPLY.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXMPLY(gxapi_cy.WrapMPLY): """ GXMPLY class. The `GXMPLY <geosoft.gxapi.GXMPLY>` object contains the definitions for one or more PPLY. """ def __init__(self, handle=0): super(GXMPLY, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXMPLY <geosoft.gxapi.GXMPLY>` :returns: A null `GXMPLY <geosoft.gxapi.GXMPLY>` :rtype: GXMPLY """ return GXMPLY() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls): """ Creates a Multi Polygon Object. :returns: `GXMPLY <geosoft.gxapi.GXMPLY>` Handle :rtype: GXMPLY .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMPLY._create(GXContext._get_tls_geo()) return GXMPLY(ret_val) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXMESHUTIL.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXMESHUTIL(gxapi_cy.WrapMESHUTIL): """ GXMESHUTIL class. Mesh utility methods. """ def __init__(self, handle=0): super(GXMESHUTIL, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXMESHUTIL <geosoft.gxapi.GXMESHUTIL>` :returns: A null `GXMESHUTIL <geosoft.gxapi.GXMESHUTIL>` :rtype: GXMESHUTIL """ return GXMESHUTIL() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def import_grid_to_surface(cls, grid_file_name, geosurface_filename, surface_name): """ Imports a Grid to a Surface :param grid_file_name: Grid File Name :param geosurface_filename: Surface File Name :param surface_name: Surface Item Name within the file :type grid_file_name: str :type geosurface_filename: str :type surface_name: str .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMESHUTIL._import_grid_to_surface(GXContext._get_tls_geo(), grid_file_name.encode(), geosurface_filename.encode(), surface_name.encode()) @classmethod def clip_surface_with_grid(cls, inputSurfaceFile, inputSurface, gridSurfaceFileName, gridSurfaceName, outputSurfaceFile, outputSurfaceNameAbove, outputSurfaceNameBelow, surface_clip_mode): """ Clip a Surface with a Grid Surface (grid converted to surface) :param inputSurfaceFile: Input Geosurface file :param inputSurface: Input Surface name within Geosurface file :param gridSurfaceFileName: Grid Surface file name :param gridSurfaceName: Grid surface name within file :param outputSurfaceFile: Output Surface file :param outputSurfaceNameAbove: Name of Surface Item above grid - required for mode=CLIP_ABOVE and CLIP_BOTH :param outputSurfaceNameBelow: Name of Surface Item below grid - required for mode=CLIP_BELOW and CLIP_BOTH :param surface_clip_mode: :ref:`SURFACE_CLIP_MODE` :type inputSurfaceFile: str :type inputSurface: str :type gridSurfaceFileName: str :type gridSurfaceName: str :type outputSurfaceFile: str :type outputSurfaceNameAbove: str :type outputSurfaceNameBelow: str :type surface_clip_mode: int :returns: Surface clip status :rtype: int .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMESHUTIL._clip_surface_with_grid(GXContext._get_tls_geo(), inputSurfaceFile.encode(), inputSurface.encode(), gridSurfaceFileName.encode(), gridSurfaceName.encode(), outputSurfaceFile.encode(), outputSurfaceNameAbove.encode(), outputSurfaceNameBelow.encode(), surface_clip_mode) return ret_val @classmethod def clip_surface_with_extents(cls, inputSurfaceFile, inputSurface, outputSurfaceFile, outputSurfaceName, min_x, max_x, min_y, max_y, min_z, max_z): """ Clip a Surface with X,Y,Z extents :param inputSurfaceFile: Input Geosurface file :param inputSurface: Input Surface name within Geosurface file :param outputSurfaceFile: Output Surface file :param outputSurfaceName: Output Surface name :param min_x: Min value of X :param max_x: Max value of X :param min_y: Min value of Y :param max_y: Max value of Y :param min_z: Min value of Z :param max_z: Max value of Z :type inputSurfaceFile: str :type inputSurface: str :type outputSurfaceFile: str :type outputSurfaceName: str :type min_x: float :type max_x: float :type min_y: float :type max_y: float :type min_z: float :type max_z: float :returns: Surface clip status :rtype: int .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMESHUTIL._clip_surface_with_extents(GXContext._get_tls_geo(), inputSurfaceFile.encode(), inputSurface.encode(), outputSurfaceFile.encode(), outputSurfaceName.encode(), min_x, max_x, min_y, max_y, min_z, max_z) return ret_val @classmethod def clip_surface_with_polygon2d(cls, inputSurfaceFile, inputSurface, polygonFile, outputSurfaceFile, outputSurfaceName, maskInside): """ Clip a Surface a specified Polygon file :param inputSurfaceFile: Input Geosurface file :param inputSurface: Input Surface name within Geosurface file :param polygonFile: Polygon File :param outputSurfaceFile: Output Surface file :param outputSurfaceName: Output Surface name :param maskInside: Set true if the values inside polygon are to be masked :type inputSurfaceFile: str :type inputSurface: str :type polygonFile: str :type outputSurfaceFile: str :type outputSurfaceName: str :type maskInside: bool :returns: Surface clip status :rtype: int .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMESHUTIL._clip_surface_with_polygon2d(GXContext._get_tls_geo(), inputSurfaceFile.encode(), inputSurface.encode(), polygonFile.encode(), outputSurfaceFile.encode(), outputSurfaceName.encode(), maskInside) return ret_val @classmethod def compute_surface_union(cls, primarySurfaceFile, primarySurface, secondarySurfaceFile, secondarySurface, outputSurfaceFile, outputSurface): """ Compute union of two surfaces :param primarySurfaceFile: Primary Geosurface file :param primarySurface: Primary Surface Name within Geosurface File :param secondarySurfaceFile: Secondary Geosurface file :param secondarySurface: Secondary Surface Name within Geosurface File :param outputSurfaceFile: Output surface file :param outputSurface: Output surface name :type primarySurfaceFile: str :type primarySurface: str :type secondarySurfaceFile: str :type secondarySurface: str :type outputSurfaceFile: str :type outputSurface: str .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMESHUTIL._compute_surface_union(GXContext._get_tls_geo(), primarySurfaceFile.encode(), primarySurface.encode(), secondarySurfaceFile.encode(), secondarySurface.encode(), outputSurfaceFile.encode(), outputSurface.encode()) @classmethod def compute_surface_clip(cls, primarySurfaceFile, primarySurface, secondarySurfaceFile, secondarySurface, outputSurfaceFile, outputSurface): """ Clip a surface with another surface, and output the clipped surfaces :param primarySurfaceFile: Primary Geosurface file :param primarySurface: Primary Surface Name within Geosurface File :param secondarySurfaceFile: Secondary Geosurface file :param secondarySurface: Secondary Surface Name within Geosurface File :param outputSurfaceFile: Output surface file :param outputSurface: Output surface name :type primarySurfaceFile: str :type primarySurface: str :type secondarySurfaceFile: str :type secondarySurface: str :type outputSurfaceFile: str :type outputSurface: str .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMESHUTIL._compute_surface_clip(GXContext._get_tls_geo(), primarySurfaceFile.encode(), primarySurface.encode(), secondarySurfaceFile.encode(), secondarySurface.encode(), outputSurfaceFile.encode(), outputSurface.encode()) @classmethod def compute_surface_intersection(cls, primarySurfaceFile, primarySurface, secondarySurfaceFile, secondarySurface, outputSurfaceFile, outputSurface): """ Computes and outputs the intersection of two closed surfaces :param primarySurfaceFile: Primary Geosurface file :param primarySurface: Primary Surface Name within Geosurface File :param secondarySurfaceFile: Secondary Geosurface file :param secondarySurface: Secondary Surface Name within Geosurface File :param outputSurfaceFile: Output surface file :param outputSurface: Output surface name :type primarySurfaceFile: str :type primarySurface: str :type secondarySurfaceFile: str :type secondarySurface: str :type outputSurfaceFile: str :type outputSurface: str .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMESHUTIL._compute_surface_intersection(GXContext._get_tls_geo(), primarySurfaceFile.encode(), primarySurface.encode(), secondarySurfaceFile.encode(), secondarySurface.encode(), outputSurfaceFile.encode(), outputSurface.encode()) @classmethod def compute_surface_simplification(cls, inputSurfaceFile, inputSurface, outputSurfaceFile, outputSurface): """ Simplifies a surface by reducing the number of edges by half :param inputSurfaceFile: Input Geosurface file :param inputSurface: Input Surface Name within Geosurface File :param outputSurfaceFile: Output Geosurface file :param outputSurface: Output Surface Name within Geosurface File :type inputSurfaceFile: str :type inputSurface: str :type outputSurfaceFile: str :type outputSurface: str .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMESHUTIL._compute_surface_simplification(GXContext._get_tls_geo(), inputSurfaceFile.encode(), inputSurface.encode(), outputSurfaceFile.encode(), outputSurface.encode()) @classmethod def compute_surface_subdivision(cls, inputSurfaceFile, inputSurface, outputSurfaceFile, outputSurface): """ Smooths a surface by applying a loop subdivision algorithm :param inputSurfaceFile: Input Geosurface file :param inputSurface: Input Surface Name within Geosurface File :param outputSurfaceFile: Output Geosurface file :param outputSurface: Output Surface Name within Geosurface File :type inputSurfaceFile: str :type inputSurface: str :type outputSurfaceFile: str :type outputSurface: str .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMESHUTIL._compute_surface_subdivision(GXContext._get_tls_geo(), inputSurfaceFile.encode(), inputSurface.encode(), outputSurfaceFile.encode(), outputSurface.encode()) @classmethod def does_surface_intersect(cls, primarySurfaceFile, primarySurface, secondarySurfaceFile, secondarySurface): """ Checks if the two surfaces intersect at all :param primarySurfaceFile: Primary Geosurface file :param primarySurface: Primary Surface Name within Geosurface File :param secondarySurfaceFile: Secondary Geosurface file :param secondarySurface: Secondary Surface Name within Geosurface File :type primarySurfaceFile: str :type primarySurface: str :type secondarySurfaceFile: str :type secondarySurface: str :returns: Returns 1 if intersects, 0 if surfaces do not intersect :rtype: int .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMESHUTIL._does_surface_intersect(GXContext._get_tls_geo(), primarySurfaceFile.encode(), primarySurface.encode(), secondarySurfaceFile.encode(), secondarySurface.encode()) return ret_val @classmethod def does_surface_self_intersect(cls, surfaceFile, surfaceName): """ Checks if a surface self-intersects :param surfaceFile: Geosurface file :param surfaceName: Primary Surface Name within Geosurface File :type surfaceFile: str :type surfaceName: str :returns: Returns 1 if surface self intersects, 0 if surface has no self-intersections :rtype: int .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMESHUTIL._does_surface_self_intersect(GXContext._get_tls_geo(), surfaceFile.encode(), surfaceName.encode()) return ret_val @classmethod def extract_isosurface_from_voxel(cls, voxelFile, surfaceFile, surfaceName, contourMin, contourMax, close): """ Extracts isosurface from a voxel, and saves the voxel to a Geosurface file :param voxelFile: Voxel file :param surfaceFile: Geosurface file :param surfaceName: Surface name within geosurface file :param contourMin: Minimum/higher value :param contourMax: Maximum/lower value :param close: Closed option - create a closed surface? :type voxelFile: str :type surfaceFile: str :type surfaceName: str :type contourMin: float :type contourMax: float :type close: bool .. versionadded:: 9.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMESHUTIL._extract_isosurface_from_voxel(GXContext._get_tls_geo(), voxelFile.encode(), surfaceFile.encode(), surfaceName.encode(), contourMin, contourMax, close) @classmethod def reproject_geosurface_file(cls, input_surface_filename, output_surface_filename, ipj): """ Repoject surface with the coordinate system of the `GXIPJ <geosoft.gxapi.GXIPJ>`. :param input_surface_filename: Input Geosurface file name :param output_surface_filename: Output Geosurface file name :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` in which to place the new Geosurface coordinate system :type input_surface_filename: str :type output_surface_filename: str :type ipj: GXIPJ .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMESHUTIL._reproject_geosurface_file(GXContext._get_tls_geo(), input_surface_filename.encode(), output_surface_filename.encode(), ipj) @classmethod def project_geosurface_onto_grid(cls, surface_filename, surface_name, input_geometry_grid, projection_method, output_grid): """ Repoject surface with the coordinate system of the `GXIPJ <geosoft.gxapi.GXIPJ>`. :param surface_filename: Input Geosurface file name :param surface_name: Geosurface surface name :param input_geometry_grid: Input Geometry grid :param projection_method: :ref:`SURFACE_PROJECTION_METHOD` :param output_grid: Output grid :type surface_filename: str :type surface_name: str :type input_geometry_grid: str :type projection_method: int :type output_grid: str .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMESHUTIL._project_geosurface_onto_grid(GXContext._get_tls_geo(), surface_filename.encode(), surface_name.encode(), input_geometry_grid.encode(), projection_method, output_grid.encode()) @classmethod def copy_mesh_to_geo_surface_file(cls, input_surface_filename, surface_name, output_surface_filename): """ Copy a mesh from one geosurface file to another :param input_surface_filename: Input Geosurface file name :param surface_name: Geosurface surface name :param output_surface_filename: Output Geosurface file name :type input_surface_filename: str :type surface_name: str :type output_surface_filename: str .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMESHUTIL._copy_mesh_to_geo_surface_file(GXContext._get_tls_geo(), input_surface_filename.encode(), surface_name.encode(), output_surface_filename.encode()) @classmethod def apply_transformation(cls, inputSurfaceFile, inputSurfaceName, outputSurfaceFile, outputSurfaceName, transformationMethod, x, y, z): """ Applies a transformation to a surface, see :ref:`SURFACE_TRANSFORMATION_METHOD`for available operations. The existing mesh will be preserved, and a new mesh will be created with the target name in the target file. Reprojection willbe handled automatically in the case that the coordinate systems differ. :param inputSurfaceFile: Input surface file name :param inputSurfaceName: Input surface layer (mesh) name :param outputSurfaceFile: Output surface file name :param outputSurfaceName: Output surface layer (mesh) name :param transformationMethod: :ref:`SURFACE_TRANSFORMATION_METHOD` :param x: X transformation component :param y: Y transformation component :param z: Z transformation component :type inputSurfaceFile: str :type inputSurfaceName: str :type outputSurfaceFile: str :type outputSurfaceName: str :type transformationMethod: int :type x: float :type y: float :type z: float .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapMESHUTIL._apply_transformation(GXContext._get_tls_geo(), inputSurfaceFile.encode(), inputSurfaceName.encode(), outputSurfaceFile.encode(), outputSurfaceName.encode(), transformationMethod, x, y, z) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXPRAGA3.rst .. _GXPRAGA3: GXPRAGA3 class ================================== .. autoclass:: geosoft.gxapi.GXPRAGA3 :members: <file_sep>/geosoft/gxpy/tests/test_grid.py import unittest import os import numpy as np import geosoft import geosoft.gxapi as gxa import geosoft.gxpy.gx as gx import geosoft.gxpy.system as gsys import geosoft.gxpy.coordinate_system as gxcs import geosoft.gxpy.grid as gxgrd import geosoft.gxpy.map as gxmap import geosoft.gxpy.gdb as gxgdb from base import GXPYTest class Test(GXPYTest): @classmethod def setUpClass(cls): cls.setUpGXPYTest() cls.folder, files = gsys.unzip(os.path.join(os.path.dirname(cls._test_case_py), 'testgrids.zip'), folder=cls._gx.temp_folder()) cls.g1f = os.path.join(cls.folder, 'test_grid_1.grd') cls.g2f = os.path.join(cls.folder, 'test_grid_2.grd') cls.gcf = os.path.join(cls.folder, 'test_bool1_color.grd') cls.mag = os.path.join(cls.folder, 'mag.grd') cls.folder, files = gsys.unzip(os.path.join(os.path.dirname(cls._test_case_py), 'section_grids.zip'), folder=cls._gx.temp_folder()) cls.section = os.path.join(cls.folder, 'section.grd') cls.swing = os.path.join(cls.folder, 'swing_section.grd') cls.crooked = os.path.join(cls.folder, 'crooked_section.grd') def test_gridProperties(self): self.start() with gxgrd.Grid.open(self.g1f) as g1: properties = g1.properties() self.assertEqual(properties.get('dx'),0.01) self.assertEqual(properties.get('dy'),0.01) self.assertEqual(properties.get('x0'),7.0) self.assertEqual(properties.get('y0'),44.0) self.assertEqual(properties.get('rot'),0.0) self.assertEqual(properties.get('is_color'),False) self.assertEqual(properties.get('nx'),101) self.assertEqual(properties.get('ny'),101) self.assertEqual(str(properties.get('coordinate_system')),'WGS 84') self.assertEqual(g1.dx, 0.01) self.assertEqual(g1.dy, 0.01) self.assertEqual(g1.x0, 7.0) self.assertEqual(g1.y0, 44.0) self.assertEqual(g1.rot, 0.0) self.assertEqual(g1.is_color, False) self.assertEqual(g1.nx, 101) self.assertEqual(g1.ny, 101) self.assertEqual(str(g1.coordinate_system), 'WGS 84') with gxgrd.Grid.open(self.gcf) as gc: properties = gc.properties() self.assertEqual(properties.get('is_color'),True) self.assertEqual(gc.is_color, True) def test_statistics(self): self.start() with gxgrd.Grid.open(self.g1f) as g1: stats = g1.statistics() self.assertAlmostEqual(stats['mean'], 997.2176063303659) self.assertEqual(stats['num_data'] + stats['num_dummy'], g1.nx * g1.ny) def test_copy(self): self.start() #create a grids outGrid = os.path.join(self.folder, 'test_copy') with gxgrd.Grid.open(self.g1f) as g: mean = g.statistics()['mean'] with gxgrd.Grid.copy(g, outGrid) as grd: grd.delete_files() properties = grd.properties() self.assertEqual(properties.get('dx'),0.01) self.assertEqual(properties.get('dy'),0.01) self.assertEqual(properties.get('x0'),7.0) self.assertEqual(properties.get('y0'),44.0) self.assertEqual(properties.get('rot'),0.0) self.assertEqual(properties.get('nx'),101) self.assertEqual(properties.get('ny'),101) self.assertEqual(str(properties.get('coordinate_system')),'WGS 84') self.assertAlmostEqual(grd.statistics()['mean'], mean) # temporary grid copy with gxgrd.Grid.open(self.g1f) as g: mean = g.statistics()['mean'] with gxgrd.Grid.copy(g) as grd: self.assertAlmostEqual(grd.statistics()['mean'], mean) def test_set_properties(self): self.start() with gxgrd.Grid.open(self.g1f) as g1: properties = g1.properties() properties['x0'] = 45.0 properties['y0'] = -15.0 properties['dx'] = 1.5 properties['dy'] = 2.5 properties['rot'] = 33.333333 properties['coordinate_system'] = gxcs.Coordinate_system('NAD27 / UTM zone 18N') self.assertRaises( gxgrd.GridException, g1.set_properties, properties) outGrid = os.path.join(self.folder, 'test_set_properties.grd(GRD;TYPE=SHORT;COMP=SPEED)') with gxgrd.Grid.open(self.g1f) as g: with gxgrd.Grid.copy(g, outGrid) as grd: grd.dx = 1.5 grd.dy = 2.5 grd.x0 = 45.0 grd.y0 = -15.0 grd.rot = 33.333333 grd.coordinate_system = gxcs.Coordinate_system('NAD27 / UTM zone 18N') with gxgrd.Grid.open(outGrid) as grd: properties = grd.properties() self.assertEqual(properties.get('dx'),1.5) self.assertEqual(properties.get('dy'),2.5) self.assertEqual(properties.get('x0'),45.0) self.assertEqual(properties.get('y0'),-15.0) self.assertEqual(properties.get('rot'),33.333333) self.assertEqual(properties.get('nx'),101) self.assertEqual(properties.get('ny'),101) self.assertEqual(str(properties.get('coordinate_system')),'NAD27 / UTM zone 18N') self.assertEqual(properties.get('dtype'),np.int16) outGrid = os.path.join(self.folder, 'test_set_properties.grd(GRD;TYPE=SHORT;COMP=SPEED)') with gxgrd.Grid.open(self.g1f) as g: with gxgrd.Grid.copy(g, outGrid, overwrite=True) as grd: grd.set_properties(properties) with gxgrd.Grid.open(outGrid) as grd: properties = grd.properties() self.assertEqual(properties.get('dx'),1.5) self.assertEqual(properties.get('dy'),2.5) self.assertEqual(properties.get('x0'),45.0) self.assertEqual(properties.get('y0'),-15.0) self.assertEqual(properties.get('rot'),33.333333) self.assertEqual(properties.get('nx'),101) self.assertEqual(properties.get('ny'),101) self.assertEqual(str(properties.get('coordinate_system')),'NAD27 / UTM zone 18N') self.assertEqual(properties.get('dtype'),np.int16) def test_in_memory(self): self.start() with gxgrd.Grid.new(in_memory=True, properties={'dtype': np.int16, 'nx': 100, 'ny': 50, 'x0':4, 'y0':8, 'dx': 0.1, 'dy':0.2, 'rot': 5, 'coordinate_system': gxcs.Coordinate_system('NAD27 / UTM zone 18N')}) as grd: properties = grd.properties() self.assertEqual(properties.get('dx'),0.1) self.assertEqual(properties.get('dy'),0.2) self.assertEqual(properties.get('x0'),4.0) self.assertEqual(properties.get('y0'),8.0) self.assertEqual(properties.get('rot'),5.0) self.assertEqual(properties.get('nx'),100) self.assertEqual(properties.get('ny'),50) self.assertEqual(properties.get('gridtype'), 'MEMORY') self.assertEqual(str(properties.get('coordinate_system')),'NAD27 / UTM zone 18N') self.assertEqual(properties.get('dtype'),np.int16) m = grd.metadata self.assertFalse(bool(m)) stats = grd.statistics() self.assertTrue(stats['mean'] is None) def test_from_pg(self): self.start() with gxgrd.Grid.open(self.mag) as grd: magpg = grd.gxpg() with gxgrd.Grid.from_data_array(magpg, properties=grd.properties()) as newpg: self.assertEqual(newpg.nx, grd.nx) def test_temp(self): self.start() with gxgrd.Grid.new(properties={'dtype': np.int16, 'nx': 100, 'ny': 50, 'x0':4, 'y0':8, 'dx': 0.1, 'dy':0.2, 'rot': 5, 'coordinate_system': gxcs.Coordinate_system('NAD27 / UTM zone 18N')}) as grd: properties = grd.properties() self.assertEqual(properties.get('dx'),0.1) self.assertEqual(properties.get('dy'),0.2) self.assertEqual(properties.get('x0'),4.0) self.assertEqual(properties.get('y0'),8.0) self.assertEqual(properties.get('rot'),5.0) self.assertEqual(properties.get('nx'),100) self.assertEqual(properties.get('ny'),50) self.assertEqual(str(properties.get('coordinate_system')),'NAD27 / UTM zone 18N') self.assertEqual(properties.get('dtype'),np.int16) m = grd.metadata self.assertFalse(bool(m)) def test_delete_grid(self): self.start() self.assertRaises(gxgrd.GridException, gxgrd.Grid.new, self.g1f) with gxgrd.Grid.open(self.g1f) as g: with gxgrd.Grid.copy(g, os.path.join(self.folder,'testDelete.grd'), overwrite=True) as g2: filen = g2.file_name g2.delete_files() self.assertFalse(os.path.isfile(filen)) self.assertFalse(os.path.isfile(filen+'.gi')) self.assertFalse(os.path.isfile(filen+'.xml')) def test_hgd(self): self.start() with gxgrd.Grid.open(self.g1f) as g: ofile = gxgrd.Grid.decorate_name(os.path.join(self.folder, 'test.hgd'), 'HGD') with gxgrd.Grid.copy(g, ofile) as g2: g2.delete_files() properties = g2.properties() self.assertEqual(properties.get('decoration'),'HGD') self.assertEqual(properties.get('gridtype'),'HGD') self.assertAlmostEqual(properties.get('dx'),0.01) self.assertAlmostEqual(properties.get('dy'),0.01) self.assertAlmostEqual(properties.get('x0'),7.0) self.assertAlmostEqual(properties.get('y0'),44.0) self.assertEqual(properties.get('rot'),0.0) self.assertEqual(properties.get('nx'),101) self.assertEqual(properties.get('ny'),101) self.assertEqual(str(properties.get('coordinate_system')),'WGS 84') def test_name_parts(self): self.start() namep = gxgrd.Grid.name_parts("f:/someFolder/name.grd(GRD;TYPE=SHORT)") self.assertEqual(namep,('f:\\someFolder', 'name.grd', 'name', '.grd', 'GRD;TYPE=SHORT')) namep = gxgrd.Grid.name_parts(".\\name.grd(GRD;TYPE=SHORT)") self.assertEqual(namep[0],os.getcwd()) self.assertEqual(namep[1:],('name.grd', 'name', '.grd', 'GRD;TYPE=SHORT')) namep = gxgrd.Grid.name_parts(".\\name.grd") self.assertEqual(namep[0],os.getcwd()) self.assertEqual(namep[1:],('name.grd', 'name', '.grd', '')) ref = 'billybob(decs;more)' name = gxgrd.Grid.decorate_name('billybob','(decs;more)') self.assertEqual(name,ref) name = gxgrd.Grid.decorate_name('billybob','decs;more') self.assertEqual(name,ref) name = gxgrd.Grid.decorate_name('billybob','(decs;more') self.assertEqual(name,ref) name = gxgrd.Grid.decorate_name('billybob','decs;more)') self.assertEqual(name,ref) name = gxgrd.Grid.decorate_name(ref) self.assertEqual(name,ref) def test_index_window(self): self.start() with gxgrd.Grid.open(self.g1f) as g: p = g.properties() window = os.path.join(self.folder,'testwindow.grd(GRD)') with gxgrd.Grid.index_window(g, window, 4, 2, 96, 5, overwrite=True) as gw: pw = gw.properties() self.assertAlmostEqual(gw.x0, g.x0+(4*g.dx)) self.assertAlmostEqual(gw.y0, g.y0+(2*g.dy)) self.assertEqual(gw.nx, 96) self.assertEqual(gw.ny, 5) with gxgrd.Grid.index_window(g, x0=4, y0=2, nx=96, ny=5, overwrite=True) as gw: nx = gw.nx gw.delete_files() pw = gw.properties() self.assertAlmostEqual(gw.x0, g.x0+(4*g.dx)) self.assertAlmostEqual(gw.y0, g.y0+(2*g.dy)) self.assertEqual(gw.nx, 96) self.assertEqual(gw.ny, 5) self.assertEqual(gw.name, 'test_grid_1_(4,2)(96,5)') with gxgrd.Grid.index_window(g, window, nx=20, ny=100, overwrite=True) as gw: gw.delete_files() pw = gw.properties() self.assertAlmostEqual(pw.get('x0'),p.get('x0')) self.assertAlmostEqual(pw.get('y0'),p.get('y0')) self.assertEqual(pw.get('nx'),20) self.assertEqual(pw.get('ny'),100) with gxgrd.Grid.index_window(g, window, x0=29, y0=100, overwrite=True) as gw: gw.delete_files() pw = gw.properties() dx = p.get('dx') self.assertAlmostEqual(pw.get('x0'),p.get('x0')+(29*dx)) dy = p.get('dy') self.assertAlmostEqual(pw.get('y0'),p.get('y0')+(100*dy)) self.assertEqual(pw.get('nx'),72) self.assertEqual(pw.get('ny'),1) self.assertRaises(gxgrd.GridException, gxgrd.Grid.index_window, g, window, x0=2900, y0=3600, ny=2) self.assertRaises(gxgrd.GridException, gxgrd.Grid.index_window, g, window, -1) self.assertRaises(gxgrd.GridException, gxgrd.Grid.index_window, g, window, y0=-1) with gxgrd.Grid.open(self.g1f) as g: window = os.path.join(self.folder, 'testwindow.grd(GRD)') with gxgrd.Grid.index_window(g, window, 4, 2, 96, 5, overwrite=True) as gw: gw.delete_files() ex = gw.extent_2d() self.assertAlmostEqual(ex[0], 7.035) self.assertAlmostEqual(ex[1], 44.015) self.assertAlmostEqual(ex[2], 7.995) self.assertAlmostEqual(ex[3], 44.065) g.rot = 10.0 window = os.path.join(self.folder, 'testwindow.grd(GRD)') with gxgrd.Grid.index_window(g, window, 4, 2, 96, 5, overwrite=True) as gw: gw.delete_files() ex = gw.extent_2d() self.assertAlmostEqual(ex[0], 6.9513424466727765) self.assertAlmostEqual(ex[1], 43.81649172360945) self.assertAlmostEqual(ex[2], 7.905440298447842) self.assertAlmostEqual(ex[3], 44.032434361820314) def test_from_array(self): self.start() file_name = os.path.join(self.folder, "test_array.grd") data = np.arange(24).reshape((8,3)) with gxgrd.Grid.from_data_array(data, file_name) as grd: grd.delete_files() properties = grd.properties() self.assertEqual(properties.get('dx'),1.0) self.assertEqual(properties.get('dy'),1.0) self.assertEqual(properties.get('x0'),0.0) self.assertEqual(properties.get('y0'),0.0) self.assertEqual(properties.get('rot'),0.0) self.assertEqual(properties.get('nx'),3) self.assertEqual(properties.get('ny'),8) self.assertEqual(str(properties.get('coordinate_system')),'*unknown') with gxgrd.Grid.from_data_array(list(data), file_name) as grd: grd.delete_files() properties = grd.properties() self.assertEqual(properties.get('dx'),1.0) self.assertEqual(properties.get('dy'),1.0) self.assertEqual(properties.get('x0'),0.0) self.assertEqual(properties.get('y0'),0.0) self.assertEqual(properties.get('rot'),0.0) self.assertEqual(properties.get('nx'),3) self.assertEqual(properties.get('ny'),8) self.assertEqual(str(properties.get('coordinate_system')),'*unknown') with gxgrd.Grid.from_data_array(data, file_name, properties={'x0':575268, 'dx':2.0, 'dy':1.5, 'rot':15, 'coordinate_system':'WGS 84'}) as grd: grd.delete_files() properties = grd.properties() self.assertEqual(properties.get('dx'),2.0) self.assertEqual(properties.get('dy'),1.5) self.assertEqual(properties.get('x0'),575268.0) self.assertEqual(properties.get('y0'),0.0) self.assertEqual(properties.get('rot'),15.0) self.assertEqual(properties.get('nx'),3) self.assertEqual(properties.get('ny'),8) self.assertEqual(str(properties.get('coordinate_system')),'WGS 84') def test_array_locations(self): self.start() with gxgrd.Grid.new(properties={'x0':100, 'y0':-25.25, 'dx': 5, 'nx':101, 'ny':501}) as g: a = g.xyzv() self.assertEqual(len(a.shape),3) self.assertEqual(a.shape[0], g.ny) self.assertEqual(a.shape[1], g.nx) self.assertEqual(a.shape[2], 4) self.assertEqual(a[0,0,0], 100.0) self.assertEqual(a[0,0,1], -25.25) self.assertEqual(a[0,0,2], 0.0) self.assertTrue(np.isnan(a[0, 0, 3])) self.assertEqual(a[0,1,0]-a[0,0,0], g.dx) self.assertEqual(a[1,0,1]-a[0,0,1], g.dy) self.assertEqual(a[0,0,2]-a[1,1,2], 0.) props = {'x0':100, 'y0':-25.25, 'dx': 5, 'nx':101, 'ny':501, 'rot':10} a = gxgrd.array_locations(props) self.assertEqual((tuple(a[0, 0, :])), (100.0, -25.25, 0.0)) self.assertEqual((tuple(a[0, 10, :])), (149.24038765061039, -33.932408883346518, 0.0)) self.assertEqual((tuple(a[10, 0, :])), (108.68240888334651, 23.990387650610401, 0.0)) self.assertEqual((tuple(a[10, 10, :])), (157.92279653395693, 15.307978767263883, 0.0)) cs = gxcs.Coordinate_system({'type': 'local', 'lon_lat': (-96,43), 'azimuth': 10}).gxf props = {'x0':0, 'y0':0, 'dx': 5, 'nx':101, 'ny':501, 'coordinate_system':cs} a = gxgrd.array_locations(props) self.assertEqual((tuple(a[0, 0, :])), (0.0, 0.0, 0.0)) self.assertEqual((tuple(a[0, 10, :])), (49.240387650610401, -8.6824088833465165, 0.0)) self.assertEqual((tuple(a[10, 0, :])), (8.6824088833465165, 49.240387650610401, 0.0)) self.assertEqual((tuple(a[10, 10, :])), (57.92279653395692, 40.557978767263883, 0.0)) def test_hanging_resource(self): self.start() g1 = gxgrd.Grid.open(self.g1f) g2 = gxgrd.Grid.open(self.g2f) gx_tls_globals = gx._get_tls_globals() rs = len(gx_tls_globals._res_heap) self.assertTrue(rs >= 2) g1.close() self.assertEqual(len(gx_tls_globals._res_heap), rs-1) g2.close() def test_extent(self): self.start() with gxgrd.Grid.open(self.g1f) as g: with gxgrd.Grid.copy(g, os.path.join(self.folder, 'test_extent.grd(GRD)')) as grd: grd.delete_files() grd.x0 = grd.y0 = 0.0 grd.dx = grd.dy = 0.1 grd.rot = 0.0 ex = grd.extent_2d() self.assertAlmostEqual(ex[0], -0.05) self.assertAlmostEqual(ex[1], -0.05) self.assertAlmostEqual(ex[2], 10.05) self.assertAlmostEqual(ex[3], 10.05) grd.rot = 30.0 ex = grd.extent_2d() self.assertAlmostEqual(ex[0], -0.06830127018922194) self.assertAlmostEqual(ex[1], -5.068301270189221) self.assertAlmostEqual(ex[2], 13.72855530803361) self.assertAlmostEqual(ex[3], 8.72855530803361) cs = grd.coordinate_system cs_name = cs.cs_name(gxcs.NAME_HCS_VCS) + ' <0,0,0,0,0,30>' grd.coordinate_system = gxcs.Coordinate_system(cs_name) ex = grd.extent_2d() self.assertAlmostEqual(ex[0], -0.06830127018922194) self.assertAlmostEqual(ex[1], -5.068301270189221) self.assertAlmostEqual(ex[2], 13.72855530803361) self.assertAlmostEqual(ex[3], 8.72855530803361) grd.rot = 0 ex = grd.extent_2d() self.assertAlmostEqual(ex[0], -0.05) self.assertAlmostEqual(ex[1], -0.05) self.assertAlmostEqual(ex[2], 10.05) self.assertAlmostEqual(ex[3], 10.05) ex = grd.extent_3d() self.assertAlmostEqual(ex[0], -0.06830127018922) self.assertAlmostEqual(ex[1], -5.068301270189221) self.assertAlmostEqual(ex[2], 0.0) self.assertAlmostEqual(ex[3], 13.72855530803361) self.assertAlmostEqual(ex[4], 8.72855530803361) self.assertAlmostEqual(ex[5], 0) cs_name = cs.cs_name(gxcs.NAME_HCS_VCS) + ' <0,0,0,90,0,30>' grd.coordinate_system = gxcs.Coordinate_system(cs_name) ex = grd.extent_3d() self.assertAlmostEqual(ex[0], -0.04330127018922194) self.assertAlmostEqual(ex[1], -5.025) self.assertAlmostEqual(ex[2], -10.05) self.assertAlmostEqual(ex[3], 8.70355530803360) self.assertAlmostEqual(ex[4], 0.025) self.assertAlmostEqual(ex[5], 0.05) grd.rot = 30.0 ex = grd.extent_2d() self.assertAlmostEqual(ex[0], -0.0683012701892219) self.assertAlmostEqual(ex[1], -5.068301270189221) self.assertAlmostEqual(ex[2], 13.72855530803361) self.assertAlmostEqual(ex[3], 8.72855530803361) ex = grd.extent_3d() self.assertAlmostEqual(ex[0], -0.05915063509461113) self.assertAlmostEqual(ex[1], -6.864277654016804) self.assertAlmostEqual(ex[2], -8.72855530803361) self.assertAlmostEqual(ex[3], 11.889277654016805) self.assertAlmostEqual(ex[4], 0.03415063509461143) self.assertAlmostEqual(ex[5], 5.068301270189221) def test_read(self): self.start() with gxgrd.Grid.open(self.g1f) as g: for row in range(g.ny): vv = g.read_row(row) self.assertEqual(vv.length, g.nx) for col in range(g.nx): vv = g.read_column(col) self.assertEqual(vv.length, g.ny) def test_write(self): self.start() with gxgrd.Grid.open(self.g1f) as g: with gxgrd.Grid.new(properties=g.properties()) as gwr: for row in range(g.ny): gwr.write_row(g.read_row(row), row) with gxgrd.Grid.new(properties=g.properties()) as gwc: for col in range(g.nx): gwc.write_column(g.read_column(col)) def test_getitem(self): self.start() with gxgrd.Grid.open(self.g1f) as g: self.assertTrue(isinstance(g[0], tuple)) self.assertTrue(isinstance(g[0][3], float)) self.assertEqual(g[0][3], 771.0) self.assertEqual(g[(0,0)][3], 771.0) self.assertEqual(g[(g.nx * g.ny) - 1][3], 243.0) self.assertEqual(g[(g.nx - 1, g.ny - 1)][3], 243.0) def test_copy_from_file_name(self): self.start() with gxgrd.Grid.copy(self.g1f, dtype=int) as g: self.assertTrue(isinstance(g[0][3], int)) self.assertEqual(g[0][3], 771) self.assertEqual(g[45], (7.45, 44.0, 0.0, 1699)) def test_open_int(self): self.start() with gxgrd.Grid.open(self.g1f, dtype=int) as g: vv = g.read_row(0) self.assertEqual(g[0][3], 771) def test_value(self): self.start() with gxgrd.Grid.open(self.g1f) as g: self.assertEqual(g.get_value(7.043, 44.625), 1912.4500000000035) self.assertEqual(g.get_value(0, 0), None) with gxgrd.Grid.open(self.g1f, dtype=int) as g: self.assertEqual(g.get_value(7.043, 44.625), 1912.4500000000035) self.assertEqual(g.get_value(0,0), None) def test_metadata(self): self.start() with gxgrd.Grid.open(self.g1f, mode=gxgrd.FILE_READWRITE) as g: m = g.metadata gm = m['geosoft'] self.assertTrue('dataset' in gm) self.assertTrue('georeference' in gm['dataset']) newstuff = {'maki':{'a':1, 'b':(4, 5, 6), 'units': 'nT'}} g.metadata = newstuff with gxgrd.Grid.open(self.g1f) as g: m = g.metadata maki = m['maki'] self.assertEqual(maki['b'], ['4', '5', '6']) self.assertEqual(maki['units'], 'nT') def test_units_of_measure(self): self.start() with gxgrd.Grid.open(self.g1f, mode=gxgrd.FILE_READWRITE) as g: uom = g.unit_of_measure self.assertEqual(uom, 'm') g.unit_of_measure = 'metres' with gxgrd.Grid.open(self.g1f) as g: uom = g.unit_of_measure self.assertEqual(uom, 'metres') def test_iterator(self): self.start() with gxgrd.Grid.open(self.g2f) as g0: self.assertEqual((8.0, 44.0, 0.0, 763.0), g0[0, 0]) self.assertEqual((9.0, 45.0, 0.0, 88.0), g0[100, 100]) with gxgrd.Grid.index_window(g0, nx=75, ny=60, overwrite=True) as g: g.delete_files() self.assertEqual((8.0099999999999998, 44.009999999999998, 0.0, 384.0), g[1, 1]) self.assertEqual((8.7400000000000002, 44.590000000000003, 0.0, 530.0), g[74, 59]) data = g.xyzv()[:, :, 3] i = 0 sum = 0.0 dummies = 0 for x, y, z, v in g: i += 1 if v is None: dummies += 1 else: sum += v self.assertEqual(i, g.nx * g.ny) self.assertEqual(sum, np.nansum(data)) self.assertEqual(dummies, np.count_nonzero(np.isnan(data))) def test_xyz(self): self.start() with gxgrd.Grid.open(self.g1f) as g: self.assertEqual(g.xyz(0), (g.x0, g.y0, 0.0)) self.assertEqual(g.xyz(1), (g.x0 + g.dx, g.y0, 0.0)) self.assertEqual(g.xyz(g.nx), (g.x0, g.y0 + g.dy, 0.0)) self.assertEqual(g.xyz((0,1)), (g.x0, g.y0 + g.dy, 0.0)) with gxgrd.Grid.copy(g) as gm: cs_name = gxcs.name_from_hcs_orient_vcs(gm.coordinate_system.hcs, '0, 0, 1000, 0, -90, 25', '') gm.coordinate_system = cs_name self.assertEqual(gm.xyz(0), (18.595203516590775, 39.8775426296126, 1007.0)) self.assertEqual(gm.xyz((g.nx-1, g.ny-1)), (19.017821778331474, 40.783850416649244, 1008.0)) gm.rot = 2.0 self.assertEqual(gm.xyz(0), (18.595203516590775, 39.8775426296126, 1007.0)) self.assertEqual(gm.xyz((g.nx - 1, g.ny - 1)), (19.00281516607315, 40.75166863280787, 1008.0342903237216)) def test_figure_map(self): self.start() map_file = gxgrd.figure_map(self.g1f, map_file='figure_map.map', title='image_test', features='all').file_name self.crc_map(map_file) def test_np(self): self.start() with gxgrd.Grid.open(self.g1f) as g1: data = g1.np() self.assertEqual(data.dtype, np.dtype(np.float32)) self.assertEqual(data.shape, (101, 101)) self.assertEqual(data[0, 0], 771.0) self.assertEqual(data[100, 100], 243.0) self.assertEqual(10081870.0, np.nansum(data)) self.assertEqual(91, np.count_nonzero(np.isnan(data))) with gxgrd.Grid.open(self.g2f) as g2: data = g2.np() self.assertEqual(data.dtype, np.dtype(np.float32)) self.assertEqual(data.shape, (101, 101)) self.assertEqual(data[0, 0], 763.0) self.assertEqual(data[100, 100], 88.0) self.assertEqual(2696851.0, np.nansum(data)) self.assertEqual(0, np.count_nonzero(np.isnan(data))) with gxgrd.Grid.open(self.gcf) as gc: data = gc.np() self.assertEqual(data.dtype, np.dtype(np.uint8)) self.assertEqual(data.shape, (153, 254, 4)) col_1 = data[0, 0] self.assertEqual(col_1[0], 0) self.assertEqual(col_1[1], 0) self.assertEqual(col_1[2], 0) self.assertEqual(col_1[3], 0) col_2 = data[100, 100] self.assertEqual(col_2[0], 208) self.assertEqual(col_2[1], 144) self.assertEqual(col_2[2], 102) self.assertEqual(col_2[3], 255) def test_image_file(self): self.start() image_file = gxgrd.image_file(self.g1f) with gxgrd.Grid.open(self.g1f) as g: with gxgrd.Grid.open(image_file + '(IMG,t=png)') as gi: self.assertEqual(g.coordinate_system, gi.coordinate_system) self.assertEqual(g.nx, gi.nx) self.assertEqual(g.ny, gi.ny) self.assertAlmostEqual(g.x0, gi.x0) self.assertAlmostEqual(g.y0, gi.y0) self.assertAlmostEqual(g.dx, gi.dx) self.assertAlmostEqual(g.dy, gi.dy) self.assertAlmostEqual(g.rot, gi.rot) with gxgrd.Grid.open(self.g1f) as g: with gxgrd.Grid.open(g.image_file() + '(IMG,t=png)') as gi: self.assertEqual(g.coordinate_system, gi.coordinate_system) self.assertEqual(g.nx, gi.nx) self.assertEqual(g.ny, gi.ny) self.assertAlmostEqual(g.x0, gi.x0) self.assertAlmostEqual(g.y0, gi.y0) self.assertAlmostEqual(g.dx, gi.dx) self.assertAlmostEqual(g.dy, gi.dy) self.assertAlmostEqual(g.rot, gi.rot) with gxgrd.Grid.open(self.g1f) as g: g.rot = 30 with gxgrd.Grid.open(g.image_file(pix_width=800) + '(IMG,t=png)') as gi: self.assertEqual(g.coordinate_system, gi.coordinate_system) self.assertEqual(800, gi.nx) self.assertEqual(800, gi.ny) self.assertAlmostEqual(6.995631249999999, gi.x0, 4) self.assertAlmostEqual(43.99563125, gi.y0, 4) def test_default_color_map(self): self.start() with gxgrd.Grid.open(self.g1f) as g1: cm = g1.get_default_color_map() self.assertEqual(gxa.ITR_ZONE_EQUALAREA, cm.model_type) self.assertEqual(39, cm.length) v, rgb = cm.color_map_rgb[0] self.assertAlmostEqual(231.6705835, v) self.assertEqual(0, rgb[0]) self.assertEqual(0, rgb[1]) self.assertEqual(255, rgb[2]) limits = cm.data_limits self.assertAlmostEqual(157, limits[0]) self.assertAlmostEqual(3187, limits[1]) def test_default_color_map_none_set(self): self.start() with gxgrd.Grid.open(self.mag) as g1: cm = g1.get_default_color_map() self.assertEqual(gxa.ITR_ZONE_EQUALAREA, cm.model_type) self.assertEqual(39, cm.length) v, rgb = cm.color_map_rgb[0] self.assertAlmostEqual(4748.27257047, v) self.assertEqual(0, rgb[0]) self.assertEqual(0, rgb[1]) self.assertEqual(255, rgb[2]) limits = cm.data_limits self.assertAlmostEqual(3796.711425781, limits[0]) self.assertAlmostEqual(6295.0, limits[1]) def test_uom(self): self.start() uom = gxgrd.Grid.open(self.mag).unit_of_measure with gxgrd.Grid.open(self.mag) as g: g.unit_of_measure = 'maki' self.assertEqual(g.unit_of_measure, 'maki') self.assertEqual(gxgrd.Grid.open(self.mag).unit_of_measure, uom) with gxgrd.Grid.open(self.mag, mode=gxgrd.FILE_READWRITE) as g: g.unit_of_measure = 'maki' self.assertEqual(g.unit_of_measure, 'maki') self.assertEqual(gxgrd.Grid.open(self.mag).unit_of_measure, 'maki') def test_reproject(self): self.start() gxgrd.Grid.open(self.mag, mode=gxgrd.FILE_READWRITE).coordinate_system = "NAD27 / UTM zone 32N" with gxgrd.Grid.open(self.mag) as g: self.assertEqual(str(g.coordinate_system), "NAD27 / UTM zone 32N") with gxgrd.Grid.open(self.mag, coordinate_system="NAD83 / UTM zone 32N") as g: self.assertEqual(str(g.coordinate_system), "NAD83 / UTM zone 32N") with gxgrd.Grid.open(self.mag, coordinate_system="WGS 84") as g: self.assertEqual(str(g.coordinate_system), "WGS 84") with gxgrd.Grid.open(self.mag, coordinate_system='', cell_size=75) as g: self.assertEqual(g.dx, 75.) self.assertEqual(g.dy, 75.) self.assertEqual(g.nx, 125) self.assertEqual(g.ny, 191) with gxgrd.Grid.open(self.mag, cell_size=75, expand=25) as g: self.assertEqual(g.dx, 75.) self.assertEqual(g.dy, 75.) self.assertEqual(g.nx, 183) self.assertEqual(g.ny, 280) def test_section(self): self.start() with gxgrd.Grid.open(self.section) as sect: self.assertTrue(sect.coordinate_system.is_oriented) self.assertEqual(sect.extent_xyz, (515694.9128668542, 7142239.234535628, 1425.0, 516233.9140090464, 7142637.2015803885, 1835.0)) self.assertEqual(sect.extent_2d(), (-5.0, 1425.0, 665.0, 1835.0)) def test_swing(self): self.start() with gxgrd.Grid.open(self.swing) as swing: self.assertTrue(swing.coordinate_system.is_oriented) self.assertEqual(swing.extent_xyz, (716313.064376335, 1716142.3054918314, -0.6066017177982133, 717108.3819305873, 1716809.6889240067, 360.01785668734107)) self.assertEqual(swing.extent_2d(), (-347.1403049983618, -15.0, 363.006674942662, 495.0)) def test_crooked(self): self.start() with gxgrd.Grid.open(self.crooked) as crooked: self.assertTrue(crooked.coordinate_system.is_oriented) self.assertTrue(crooked.is_crooked_path) self.assertEqual(crooked.extent_xyz, (632840.885099, 4633310.4612, 1203.0, 634556.6023, 4635124.0248, 1217.0)) self.assertEqual(crooked.extent_2d(), (-1.0, 1203.0, 4071.0, 1217.0)) def test_minimum_curvature(self): self.start() def feed_data(n): if n >= len(nxyv): return None return nxyv[n] def gdb_from_callback(callback): _gdb = gxgdb.Geosoft_gdb.new() channels = ('x', 'y', 'v') il = 0 xyz_list = callback(il) while xyz_list is not None: _gdb.write_line('L{}'.format(il), xyz_list, channels=channels) il += 1 xyz_list = callback(il) _gdb.xyz_channels = channels[:2] return _gdb xyv = [(45., 10., 100), (60., 25., 77.), (50., 8., 80.)] with gxgrd.Grid.minimum_curvature(xyv) as grd: self.assertEqual((grd.nx, grd.ny), (9, 9)) self.assertAlmostEqual(grd.statistics()['sd'], 8.708599, 5) # a callback, used for very large data, or to feed data efficiently from some other source. nxyv = np.array([[(45., 10., 100), (60., 25., 77.), (50., 8., 81.), (55., 11., 66.)], [(20., 15., 108), (25., 5., 77.), (33., 9., np.nan), (28., 2., 22.)], [(35., 18., 110), (40., 31., 77.), (13.1, 3.88, 83.), (44., 4., 7.)]]) with gxgrd.Grid.minimum_curvature(feed_data, cs=1.) as grd: self.assertEqual((grd.nx, grd.ny), (48, 30)) self.assertAlmostEqual(grd.statistics()['sd'], 30.104400923062535, 5) with gxgrd.Grid.minimum_curvature(feed_data, cs=0.25, bkd=20) as grd: self.assertEqual((grd.nx, grd.ny), (189, 117)) self.assertAlmostEqual(grd.statistics()['sd'], 22.320659139902336, 5) with gdb_from_callback(feed_data) as gdb: gxgdb.Channel(gdb, 'v').unit_of_measure = 'maki' with gxgrd.Grid.minimum_curvature((gdb, 'v'), cs=0.25, bkd=20) as grd: self.assertEqual((grd.nx, grd.ny), (189, 117)) self.assertAlmostEqual(grd.statistics()['sd'], 22.320659139902336, 5) self.assertEqual(grd.unit_of_measure, 'maki') # TODO: update this test once BASE-1265 is addressed, expected for 9.5 with gxgrd.Grid.minimum_curvature(feed_data, cs=0.25, bkd=500, edgclp=5) as grd: self.assertEqual((grd.nx, grd.ny), (199, 127)) self.assertAlmostEqual(grd.statistics()['sd'], 23.4893997876449, 5) def test_mask(self): self.start() with gxgrd.Grid.open(self.g1f) as g: with gxgrd.Grid.copy(g) as g1: data = g1.np() data[:, 50] = np.nan mask = gxgrd.Grid.from_data_array(data) g1.mask(mask) data = g1.np() self.assertEqual(np.nansum(data[:, 50]), 0) ############################################################################################### if __name__ == '__main__': unittest.main() <file_sep>/geosoft/gxapi/__init__.py ### extends 'init_empty.py' ### block Header # NOTICE: The code generator will not replace the code in this block import geosoft class ref_value: def __init__(self, value=None): self._value = None @property def value(self): return self._value @value.setter def value(self, value): self._value = value class bool_ref(ref_value): def __init__(self, value=False): self._value = value class int_ref(ref_value): def __init__(self, value=0): self._value = value class float_ref(ref_value): def __init__(self, value=0.0): self._value = value class str_ref(ref_value): def __init__(self, value=""): self._value = value class GXCancel(SystemExit): """ A subclass of `SystemExit <https://docs.python.org/3/library/exceptions.html#SystemExit>`_ which is raised when a script should cleanly exit due to a cancellation condition. Generally not caught since it will have the same effect as :exc:`SystemExit` for both standalone and Oasis montaj extension scripts. Raised from within API by :func:`geosoft.gxapi.GXSYS.cancel()` .. versionadded:: 9.1 """ pass class GXExit(SystemExit): """ A subclass of `SystemExit <https://docs.python.org/3/library/exceptions.html#SystemExit>`_ which is raised when a script should cleanly exit due to a completion condition. Generally not caught since it will have the same effect as :exc:`SystemExit` for both standalone and Oasis montaj extension scripts. Raised from within API by :func:`geosoft.gxapi.GXSYS.exit()` .. versionadded:: 9.1 """ pass class GXAPIError(RuntimeError): """ A subclass of `RuntimeError <https://docs.python.org/3/library/exceptions.html#RuntimeError>`_ which is raised whenever the GX Python API encounters initialization issues or other API violations. It generally indicates a bug in Python code. .. versionadded:: 9.1 """ pass class GXError(geosoft.GXRuntimeError): """ A subclass of `RuntimeError <https://docs.python.org/3/library/exceptions.html#RuntimeError>`_ which is raised whenever a GX Python API call encounters an error. Often the message string of these errors are informative to the user (e.g. File 'x' is locked in another application) but there could be cases where this is not the case. In most cases an attribute, :attr:`number`, is also available on the exception object that matches the number in the :code:`geosoft.ger` file. These numbers instead of the string (which could change or even be translated) should be used to identify and handle very specific exceptions. .. versionadded:: 9.1 """ def __init__(self, message, module, error_number): super(geosoft.GXRuntimeError, self).__init__(message) self.module = module self.error_number = error_number ### endblock Header ### block Constants # NOTICE: Do not edit anything here, it is generated code import struct # # GX3DC Constants # # # GX3DN Constants # # # GX3DV Constants # # # GEO3DV_OPEN constants # # Open Modes #: Geo3dv mview read GEO3DV_MVIEW_READ = 0 #: Geo3dv mview writeold GEO3DV_MVIEW_WRITEOLD = 2 # # GXAGG Constants # # # AGG_LAYER_ZONE constants # # Aggregate Layer Zone defines #: If a color table with no color transform is passed #: it will be used with the default zoning #: method of the data, which is usually #: `AGG_LAYER_ZONE_EQUALAREA <geosoft.gxapi.AGG_LAYER_ZONE_EQUALAREA>`. AGG_LAYER_ZONE_DEFAULT = 0 #: Linear Distribution AGG_LAYER_ZONE_LINEAR = 1 #: Normal Distribution AGG_LAYER_ZONE_NORMAL = 2 #: Equal Area Distribution AGG_LAYER_ZONE_EQUALAREA = 3 #: If `AGG_LAYER_ZONE_SHADE <geosoft.gxapi.AGG_LAYER_ZONE_SHADE>` is specified, a shaded relief #: layer is created from the specified grid. A new grid #: file will also be created to hold the shaded relief #: image data. This file will have the same name as the #: original grid but with "_s" added to the root name. #: It will always be located in the workspace directory #: regardless of the location of the original source image. #: If the file already exists, it will used as it is. #: Shading is always at inclination = declination = 45 deg. #: with default scaling. If different shading is desired, #: use the `layer_shade_img <geosoft.gxapi.GXAGG.layer_shade_img>` method. AGG_LAYER_ZONE_SHADE = 4 #: Log Linear Distribution AGG_LAYER_ZONE_LOGLINEAR = 5 #: The last `GXITR <geosoft.gxapi.GXITR>` used to display this #: data will be used if it exists. If it #: does not exist, the behaviour is the same #: as `AGG_LAYER_ZONE_DEFAULT <geosoft.gxapi.AGG_LAYER_ZONE_DEFAULT>`. AGG_LAYER_ZONE_LAST = 6 # # AGG_MODEL constants # # Aggregation color model defines #: Hue Saturation Value AGG_MODEL_HSV = 1 #: Red Green Blue AGG_MODEL_RGB = 2 #: Cyan Magenta Yellow AGG_MODEL_CMY = 3 # # AGG_RENDER constants # # Aggregation rendering modes #: Add all the colors together AGG_RENDER_ADD = 0 #: Adds and divides by the number of non-dummy colors AGG_RENDER_BLEND = 1 #: Adds and divides by the number of colors AGG_RENDER_BLEND_ALL = 2 #: Multiplies current colors by the input's colors over 255 (input works as the percentage of color to preserve) AGG_RENDER_FADE = 3 # # GXBF Constants # # # BF_BYTEORDER constants # # Byte order for read/write #: Least significant byte first (Intel, Windows) BF_BYTEORDER_LSB = 256 #: Most significant byte first (Mororola, Sun) BF_BYTEORDER_MSB = 512 # # BF_CLOSE constants # # Close Flags #: Bf keep BF_KEEP = 0 #: Bf delete BF_DELETE = 1 # # BF_ENCODE constants # # The way a string is encoded #: String is stored as ANSI code page BF_ENCODE_ANSI = 0 #: String is stored as :ref:`UTF8` BF_ENCODE_UTF8 = 1 # # BF_OPEN_MODE constants # # Open Status #: Read only BF_READ = 0 #: Erases existing file BF_READWRITE_NEW = 1 #: File must pre-exist BF_READWRITE_OLD = 2 #: Open and append onto pre-existing file (cannot be read from) BF_READWRITE_APP = 4 # # BF_SEEK constants # # Seek Location #: Start BF_SEEK_START = 0 #: Current BF_SEEK_CURRENT = 1 #: Eof BF_SEEK_EOF = 2 # # GXCSYMB Constants # # # CSYMB_COLOR constants # # Color Symbol filling defines #: Draw Edges only CSYMB_COLOR_EDGE = 0 #: Fill Symbols CSYMB_COLOR_FILL = 1 # # GXDAT Constants # # # DAT_FILE constants # # Type of grid #: Grid DAT_FILE_GRID = 1 #: Image DAT_FILE_IMAGE = 2 # # DAT_FILE_FORM constants # # Type of form #: Open DAT_FILE_FORM_OPEN = 0 #: Save DAT_FILE_FORM_SAVE = 1 # # DAT_XGD constants # # `GXDAT <geosoft.gxapi.GXDAT>` Open modes #: Read DAT_XGD_READ = 0 #: New DAT_XGD_NEW = 1 #: Write DAT_XGD_WRITE = 2 # # GXDATALINKD Constants # # # GXDATAMINE Constants # # # GIS_DMTYPE constants # # Datamine file types #: String GIS_DMTYPE_STRING = 2 #: Wireframe tr GIS_DMTYPE_WIREFRAME_TR = 8 #: Dtm GIS_DMTYPE_DTM = 16 #: Blockmodel GIS_DMTYPE_BLOCKMODEL = 32 #: Wireframe pt GIS_DMTYPE_WIREFRAME_PT = 64 #: Pointdata GIS_DMTYPE_POINTDATA = 1024 # # GXDB Constants # # # DB_ACTIVITY_BLOB constants # # Activity Blob #: Db activity blob DB_ACTIVITY_BLOB = "OE.DB_ACTIVITY_LOG" # # DB_CATEGORY_BLOB constants # # Blob Categories #: Normal DB_CATEGORY_BLOB_NORMAL = 0 # # DB_CATEGORY_CHAN constants # # Channel Categories # For STRING type channels, use negative integers # to specify channel width. For example, use -10 # to define a string channel with up to 10 characters. # Use the GS_SIMPLE_TYPE() macro to convert to INT,REAL or string. #: Byte DB_CATEGORY_CHAN_BYTE = 0 #: Ushort DB_CATEGORY_CHAN_USHORT = 1 #: Short DB_CATEGORY_CHAN_SHORT = 2 #: Long DB_CATEGORY_CHAN_LONG = 3 #: Float DB_CATEGORY_CHAN_FLOAT = 4 #: Double DB_CATEGORY_CHAN_DOUBLE = 5 #: Ubyte DB_CATEGORY_CHAN_UBYTE = 6 #: Ulong DB_CATEGORY_CHAN_ULONG = 7 #: Long64 DB_CATEGORY_CHAN_LONG64 = 8 #: Ulong64 DB_CATEGORY_CHAN_ULONG64 = 9 # # DB_CATEGORY_LINE constants # # Line Categories #: Flight DB_CATEGORY_LINE_FLIGHT = 100 #: Group DB_CATEGORY_LINE_GROUP = 200 #: Same as `DB_CATEGORY_LINE_FLIGHT <geosoft.gxapi.DB_CATEGORY_LINE_FLIGHT>` DB_CATEGORY_LINE_NORMAL = 100 # # DB_CATEGORY_USER constants # # User Categories #: Normal DB_CATEGORY_USER_NORMAL = 0 # # DB_CHAN_FORMAT constants # # Channel formats #: Normal DB_CHAN_FORMAT_NORMAL = 0 #: Exp DB_CHAN_FORMAT_EXP = 1 #: Time DB_CHAN_FORMAT_TIME = 2 #: Date DB_CHAN_FORMAT_DATE = 3 #: Geogr DB_CHAN_FORMAT_GEOGR = 4 #: Sigdig DB_CHAN_FORMAT_SIGDIG = 5 #: Hex DB_CHAN_FORMAT_HEX = 6 # # DB_CHAN_PROTECTION constants # # Channel Read-only Protection Status #: Db chan unprotected DB_CHAN_UNPROTECTED = 0 #: Db chan protected DB_CHAN_PROTECTED = 1 # # DB_CHAN_SYMBOL constants # # Channel symbol for special channels #: Db chan x DB_CHAN_X = 0 #: Db chan y DB_CHAN_Y = 1 #: Db chan z DB_CHAN_Z = 2 # # DB_COMP constants # # Supported compression levels #: None DB_COMP_NONE = 0 #: Speed DB_COMP_SPEED = 1 #: Size DB_COMP_SIZE = 2 # # DB_COORDPAIR constants # # Used to indicate the matching coordinate pair of a channel. #: None DB_COORDPAIR_NONE = 0 #: X DB_COORDPAIR_X = 1 #: Y DB_COORDPAIR_Y = 2 # # DB_GROUP_CLASS_SIZE constants # # Class name max size #: Db group class size DB_GROUP_CLASS_SIZE = 16 # # DB_INFO constants # # Integer Database Information #: Maximum Number of Blobs in the Database DB_INFO_BLOBS_MAX = 0 #: Maximum number of lines in the database DB_INFO_LINES_MAX = 1 #: Maximum Number of Channels in the Database DB_INFO_CHANS_MAX = 2 #: Maximum number of Users DB_INFO_USERS_MAX = 3 #: Number of Blobs currently used DB_INFO_BLOBS_USED = 4 #: Number of Lines currently used DB_INFO_LINES_USED = 5 #: Number of Channels currently used DB_INFO_CHANS_USED = 6 #: Number of Users in the database DB_INFO_USERS_USED = 7 #: Size of the smallest database block in bytes DB_INFO_PAGE_SIZE = 8 #: Number of Blocks in Entire Database DB_INFO_DATA_SIZE = 9 #: Number of Lost Blocks in the Database DB_INFO_LOST_SIZE = 10 #: Number of Free Blocks in the Database DB_INFO_FREE_SIZE = 11 #: Compression Level in use DB_INFO_COMP_LEVEL = 16 #: Number of pages given to blobs DB_INFO_BLOB_SIZE = 19 #: Entire Size of File (in kbytes) DB_INFO_FILE_SIZE = 17 #: Size of Index (in kbytes) DB_INFO_INDEX_SIZE = 18 #: Naximum number of bytes in a block DB_INFO_MAX_BLOCK_SIZE = 20 #: Will changes to this database be lost when this database is closed? DB_INFO_CHANGESLOST = 21 # # DB_LINE_LABEL_FORMAT constants # # Line Label Formats #: Line DB_LINE_LABEL_FORMAT_LINE = 1 #: Version DB_LINE_LABEL_FORMAT_VERSION = 2 #: Type DB_LINE_LABEL_FORMAT_TYPE = 4 #: Flight DB_LINE_LABEL_FORMAT_FLIGHT = 8 #: Full DB_LINE_LABEL_FORMAT_FULL = 15 #: Date DB_LINE_LABEL_FORMAT_DATE = 16 #: Link DB_LINE_LABEL_FORMAT_LINK = 7 # # DB_LINE_SELECT constants # # Select modes #: Include DB_LINE_SELECT_INCLUDE = 0 #: Exclude DB_LINE_SELECT_EXCLUDE = 1 # # DB_LINE_TYPE constants # # Line types #: Normal DB_LINE_TYPE_NORMAL = 0 #: Base DB_LINE_TYPE_BASE = 1 #: Tie DB_LINE_TYPE_TIE = 2 #: Test DB_LINE_TYPE_TEST = 3 #: Trend DB_LINE_TYPE_TREND = 4 #: Special DB_LINE_TYPE_SPECIAL = 5 #: Random DB_LINE_TYPE_RANDOM = 6 # # DB_LOCK constants # # Lock Modes #: Used only by GetSymbLock_DB DB_LOCK_NONE = -1 #: Readonly DB_LOCK_READONLY = 0 #: Readwrite DB_LOCK_READWRITE = 1 # # DB_NAME constants # # Get Database file names #: File DB_NAME_FILE = 0 # # DB_OWN constants # # Symbol Ownership #: Shared DB_OWN_SHARED = 0 #: User DB_OWN_USER = 1 # # DB_SYMB_TYPE constants # # Symbol types #: Db symb blob DB_SYMB_BLOB = 0 #: Db symb line DB_SYMB_LINE = 1 #: Db symb chan DB_SYMB_CHAN = 2 #: Db symb user DB_SYMB_USER = 3 # # DB_SYMB_NAME_SIZE constants # # Size of Symbol Names #: Same `STR_DB_SYMBOL <geosoft.gxapi.STR_DB_SYMBOL>` DB_SYMB_NAME_SIZE = 64 # # DB_WAIT constants # # Wait Times #: None DB_WAIT_NONE = 0 #: Infinity DB_WAIT_INFINITY = -1 # # DB_ARRAY_BASETYPE constants # # Array channel base coordinate type #: None DB_ARRAY_BASETYPE_NONE = 0 #: Time windows DB_ARRAY_BASETYPE_TIME_WINDOWS = 1 #: Times DB_ARRAY_BASETYPE_TIMES = 2 #: Frequencies DB_ARRAY_BASETYPE_FREQUENCIES = 3 #: Elevations DB_ARRAY_BASETYPE_ELEVATIONS = 4 #: Depths DB_ARRAY_BASETYPE_DEPTHS = 5 #: Velocities DB_ARRAY_BASETYPE_VELOCITIES = 6 #: Discrete time windows DB_ARRAY_BASETYPE_DISCRETE_TIME_WINDOWS = 7 # # NULLSYMB constants # # Database Null #: Nullsymb NULLSYMB = -1 # # GXDBREAD Constants # # # GXDBWRITE Constants # # # GXDSEL Constants # # # DSEL_PICTURE_QUALITY constants # # Line Label Formats #: Default DSEL_PICTURE_QUALITY_DEFAULT = 0 #: Lossless DSEL_PICTURE_QUALITY_LOSSLESS = 1 #: Semilossy DSEL_PICTURE_QUALITY_SEMILOSSY = 2 #: Lossy DSEL_PICTURE_QUALITY_LOSSY = 3 #: Native DSEL_PICTURE_QUALITY_NATIVE = 4 #: Ecw DSEL_PICTURE_QUALITY_ECW = 5 #: Jpg DSEL_PICTURE_QUALITY_JPG = 6 #: Png DSEL_PICTURE_QUALITY_PNG = 7 #: Bmp DSEL_PICTURE_QUALITY_BMP = 8 #: Tif DSEL_PICTURE_QUALITY_TIF = 9 # # GXE3DV Constants # # # GXEXT Constants # # # GXGEO Constants # # # GXGEOSOFT Constants # # # CRC_INIT_VALUE constants # # Initial value for starting a CRC #: 0xFFFFFFFF CRC_INIT_VALUE = 4294967295 # # DATE_FORMAT constants # # Old Date formats #: Standard Date (YYYY/MM/DD, YY/MM/DD, YYYYMMDD or YYMMDD, space or / delimited) DATE_FORMAT_YYYYMMDD = 1 #: Date (DD/MM/YYYY or DD/MM/YY century 20 if YY>50, DISC compliant) DATE_FORMAT_DDMMYYYY = 2 #: Date (MM/DD/YYYY or MM/DD/YY century 19) DATE_FORMAT_MMDDYYYY = 3 # # GEO_DUMMY constants # # Special numbers indicating NULLL #: Integer Dummy (-2147483647) iDUMMY = -2147483647 #: Floating Point Dummy (-1.0E32) rDUMMY = -1.0E32 # # GEO_FULL_LIMITS constants # # Data ranges of all Geosoft types #: (signed char ) 127 GS_S1MX = 127 #: (signed char ) -126 GS_S1MN = -126 #: (signed char ) -127 GS_S1DM = -127 #: (unsigned char ) 254U GS_U1MX = 254 #: (unsigned char ) 0U GS_U1MN = 0 #: (unsigned char ) 255U GS_U1DM = 255 #: (short ) 32767 GS_S2MX = 32767 #: (short ) -32766 GS_S2MN = -32766 #: (short ) -32767 GS_S2DM = -32767 #: (unsigned short) 65534U GS_U2MX = 65534 #: (unsigned short) 0U GS_U2MN = 0 #: (unsigned short) 65535U GS_U2DM = 65535 #: 2147483647L GS_S4MX = 2147483647 #: -2147483646L GS_S4MN = -2147483646 #: -2147483647L GS_S4DM = -2147483647 #: (unsigned long ) 0xFFFFFFFE GS_U4MX = struct.unpack('>I', bytes.fromhex('FFFFFFFE'))[0] #: (unsigned long ) 0x00000000 GS_U4MN = struct.unpack('>I', bytes.fromhex('00000000'))[0] #: (unsigned long ) 0xFFFFFFFF GS_U4DM = struct.unpack('>I', bytes.fromhex('FFFFFFFF'))[0] #: (__GS_INT64 ) 0x7FFFFFFFFFFFFFFF GS_S8MX = struct.unpack('>q', bytes.fromhex('7FFFFFFFFFFFFFFF'))[0] #: (__GS_INT64 ) 0x8000000000000001 GS_S8MN = struct.unpack('>q', bytes.fromhex('8000000000000001'))[0] #: (__GS_INT64 ) 0x8000000000000000 GS_S8DM = struct.unpack('>q', bytes.fromhex('8000000000000000'))[0] #: (__GS_UINT64 ) 0xFFFFFFFFFFFFFFFE GS_U8MX = struct.unpack('>Q', bytes.fromhex('FFFFFFFFFFFFFFFE'))[0] #: (__GS_UINT64 ) 0x0000000000000000 GS_U8MN = struct.unpack('>Q', bytes.fromhex('0000000000000000'))[0] #: (__GS_UINT64 ) 0xFFFFFFFFFFFFFFFF GS_U8DM = struct.unpack('>Q', bytes.fromhex('FFFFFFFFFFFFFFFF'))[0] #: (float ) 1.0E32 (In C these must be declared as external constants:) GS_R4MX = 1.0E32 #: (float ) -0.9E32 const float r4min=(float)-0.9E32, GS_R4MN = -0.9E32 #: (float ) -1.0E32 r4max=(float)1.0E32, #: r4dum=(float)-1.0E32; GS_R4DM = -1.0E32 #: (double ) 1.0E32 GS_R8MX = 1.0E32 #: (double ) -0.9E32 GS_R8MN = -0.9E+32 #: (double ) -1.0E32 GS_R8DM = -1.0E+32 #: (float ) 1.0E-32 GS_R4EPSILON = 1.0E-32 #: (double ) 1.0E-32 GS_R8EPSILON = 1.0E-32 # # GEO_LIMITS constants # # Data ranges of numbers #: Smallest Integer (-2147483646) iMIN = -2147483646 #: Largest Integer (2147483647) iMAX = 2147483647 #: Smallest Floating Point (-0.9E32) rMIN = -0.9E32 #: Largest Floating Point (1.0E32) rMAX = 1.0E32 # # GEO_STRING_SIZE constants # # Default string sized for different uses # GX's must use these unless there is a # very good reason not to. The path strings # here are generally larger than what is possible # in the OS, but it is defined as such for Unicode # conversion reasons. #: Default Size for almost everything (128 characters) STR_DEFAULT = 128 #: Default Size for a short string (64 characters) STR_DEFAULT_SHORT = 64 #: Default Size for a long string (1024 characters) STR_DEFAULT_LONG = 1024 #: Default Size for an error string (2048 characters) STR_ERROR = 2048 #: Default Size for a long string (16384 characters) STR_VERY_LONG = 16384 #: Name of a View (2080) STR_VIEW = 2080 #: Name of a Group (1040) STR_GROUP = 1040 #: Combined View/Group Name (2080) STR_VIEW_GROUP = 2080 #: Name of a file (1040) STR_FILE = 1040 #: Name of multiple files (16384) STR_MULTI_FILE = 16384 #: Name of database symbol (64) STR_DB_SYMBOL = 64 #: Size of strings for GXF projection info (160). STR_GXF = 160 #: Maximum path length (1040) STR_MAX_PATH = 1040 #: Multi-file path (16384) STR_MULTI_PATH = 16384 #: Same as `STR_FILE <geosoft.gxapi.STR_FILE>` GS_MAX_PATH = 1040 #: Same as `STR_MULTI_FILE <geosoft.gxapi.STR_MULTI_FILE>` GS_MULTI_PATH = 16384 # # GEO_BOOL constants # # Boolean values #: False GS_FALSE = 0 #: True GS_TRUE = 1 # # GEO_VAR constants # # Variable types. # Use -X for strings of X length #: Integer (long) GS_INT = 0 #: Floating Point (double) GS_REAL = 1 # # GS_FORMATS constants # # Special use data types. String are indicated by a # negative maximum string length (including NULL). #: Standard numbers (-134.534) FORMAT_DECIMAL = 0 #: Decimals imply number of significant digits FORMAT_SIG_DIG = 5 #: Exponential notation (-1.345e45) FORMAT_EXP = 1 #: Standard Time (HH:MM:SS.SSSS) FORMAT_TIME_COLON = 2 #: Time (HH.MMSSSSSSS) FORMAT_TIME_HMS = 8 #: Time (HHMMSS) FORMAT_TIME_HHMMSS = 9 #: Standard Date (YYYY/MM/DD, YY/MM/DD, YYYYMMDD or YYMMDD, space or / delimited) FORMAT_DATE_YYYYMMDD = 3 #: Date (DD/MM/YYYY or DD/MM/YY century 20 if YY>50, DISC compliant) FORMAT_DATE_DDMMYYYY = 6 #: Date (MM/DD/YYYY or MM/DD/YY century 19) FORMAT_DATE_MMDDYYYY = 7 #: Standard Geographical (DEG.MM.SS.SSS) FORMAT_GEOGRAPHIC = 4 #: GeoGraph (DEG:MM:SS.SSS) FORMAT_GEOGRAPHIC_1 = 10 #: GeoGraph (DEG.MMSSSSS) FORMAT_GEOGRAPHIC_2 = 11 #: GeoGraph (DEGMMmmmm or DEGMM.mmmm or DEG.MM.mmmm) (mmmm: decimal minute) FORMAT_GEOGRAPHIC_3 = 12 # # GS_TYPES constants # # Special use data types. String are indicated by a # negative maximum string length (including NULL). #: Signed Byte GS_BYTE = 0 #: Unsigned Short GS_USHORT = 1 #: Signed Short GS_SHORT = 2 #: Signed Long GS_LONG = 3 #: 32-Bit floating point GS_FLOAT = 4 #: 64-Bit floating point GS_DOUBLE = 5 #: Unsigned byte GS_UBYTE = 6 #: Unsigned Long GS_ULONG = 7 #: 64-Bit signed long GS_LONG64 = 8 #: 64-Bit unsigned long GS_ULONG64 = 9 #: 3 x 32-Bit floating point GS_FLOAT3D = 10 #: 3 x 64-Bit floating point GS_DOUBLE3D = 11 #: 2 x 32-Bit floating point GS_FLOAT2D = 12 #: 2 x 64-Bit floating point GS_DOUBLE2D = 13 #: Maximum supported type (`GS_DOUBLE2D <geosoft.gxapi.GS_DOUBLE2D>`) GS_MAXTYPE = 13 #: Default. Can be used only when a method specifically allows a default type. GS_TYPE_DEFAULT = -32767 # # SYS_CRYPT_KEY constants # # Special Encryption Keys #: Using the current license key SYS_CRYPT_LICENSE_KEY = "{***LICENSE_KEY***}" #: Use the current computer ID SYS_CRYPT_COMPUTER_ID = "{***COMPUTER_ID***}" #: Use the non-changing computer ID SYS_CRYPT_GLOBAL_ID = "{***GLOBAL_COMPUTER_ID***}" # # TIME_FORMAT constants # # Old Time formats #: Standard Time (HH:MM:SS.SSSS) TIME_FORMAT_COLON = 1 #: Time (HH.MMSSSSSSS) TIME_FORMAT_HMS = 2 # # GXGEOSTRING Constants # # # GEOSTRING_OPEN constants # # Open Modes #: Read GEOSTRING_OPEN_READ = 0 #: Readwrite GEOSTRING_OPEN_READWRITE = 1 # # SECTION_ORIENTATION constants # # Section orientation types #: Unknown SECTION_ORIENTATION_UNKNOWN = 0 #: Plan SECTION_ORIENTATION_PLAN = 1 #: Section SECTION_ORIENTATION_SECTION = 2 #: Crooked SECTION_ORIENTATION_CROOKED = 2 #: Gmsys SECTION_ORIENTATION_GMSYS = 2 # # GXGIS Constants # # # GIS_MAP2D constants # # View type to create #: Plan view GIS_MAP2D_PLAN = 0 #: Section view, East-West GIS_MAP2D_EWSECTION = 1 #: Section view, North-South GIS_MAP2D_NSSECTION = 2 # # GIS_TYPE constants # # Type of file #: Mapinfo Files GIS_TYPE_MAPINFO = 1 #: ArcView files GIS_TYPE_ARCVIEW = 2 #: Microstation DGN files GIS_TYPE_DGN = 3 #: Surpac `GXSTR <geosoft.gxapi.GXSTR>` and DTM files GIS_TYPE_SURPAC = 4 #: Datamine DM files GIS_TYPE_DATAMINE = 5 #: GEMCOM files GIS_TYPE_GEMCOM = 6 #: MICROMINE files GIS_TYPE_MICROMINE = 7 #: MINESIGHT files GIS_TYPE_MINESIGHT = 8 # # GXGRID3D Constants # # # GRID3D_TYPE constants # # Type of Voxset #: DOUBLE GRID3D_DOUBLE = 0 #: VECTOR GRID3D_VECTOR = 1 #: THEMATIC GRID3D_THEMATIC = 2 # # GXHGD Constants # # # GXHXYZ Constants # # # GXIGRF Constants # # # GXIMG Constants # # # IMG_DISPLAY_PROPERTY constants # # Image display property #: Shading inclination IMG_SHADING_INCLINATION = 0 #: Shading declination IMG_SHADING_DECLINATION = 1 #: Shading scale IMG_SHADING_SCALE = 2 #: Shading contrast (0-1) IMG_SHADING_CONTRAST = 3 #: Shading brightness (0-1) IMG_SHADING_BRIGHTNESS = 4 #: Shading wet look (0 or 1)? IMG_SHADING_WETLOOK = 5 #: Zone colours reversed from originals (0 or 1)? IMG_COLOURS_REVERSED = 6 #: Is smoothing enabled (0 or 1)? IMG_SMOOTHING_ENABLED = 7 #: Is shading enabled (0 or 1)? IMG_SHADING_ENABLED = 8 # # IMG_FAULT constants # # Fault type #: Polyline IMG_FAULT_POLYLINE = 0 #: Polygon IMG_FAULT_POLYGON = 1 # # IMG_FILE constants # # Image open modes #: Reading only IMG_FILE_READONLY = 0 #: Reading and writting IMG_FILE_READWRITE = 2 #: Allows you to open read-only grids to change the #: projection or location information. If you can write #: to the original grid (dat), the changed projection #: or location information will be passed on to the grid, #: otherwise changes will only occur in the .gi file. IMG_FILE_READORWRITE = 3 # # IMG_QUERY constants # # Information to Query #: Iwrite IMG_QUERY_iWRITE = 0 #: Ipg IMG_QUERY_iPG = 1 #: Iwritepg IMG_QUERY_iWRITEPG = 2 #: The element type used to open the `GXIMG <geosoft.gxapi.GXIMG>`. IMG_QUERY_iIMGTYPE = 3 #: DATTYPE is the native element type of the `GXDAT <geosoft.gxapi.GXDAT>`. #: Types are: 0 - byte #: 1 - unsigned 16-bit short #: 2 - 16-bit short #: 3 - 32-bit long #: 4 - 32-bit float #: 5 - 64-bit double IMG_QUERY_iDATTYPE = 4 #: Render modes are: 0 - interpolate #: 1 - pixelate #: 2 - color IMG_QUERY_iRENDER = 5 #: Ikx IMG_QUERY_iKX = 6 #: Inx IMG_QUERY_iNX = 7 #: Iny IMG_QUERY_iNY = 8 #: Inv IMG_QUERY_iNV = 9 #: Ine IMG_QUERY_iNE = 10 #: Rxo IMG_QUERY_rXO = 11 #: Ryo IMG_QUERY_rYO = 12 #: Rdx IMG_QUERY_rDX = 13 #: Rdy IMG_QUERY_rDY = 14 #: Rrot IMG_QUERY_rROT = 15 #: Rbase IMG_QUERY_rBASE = 16 #: Rmult IMG_QUERY_rMULT = 17 #: Rcompression ratio IMG_QUERY_rCOMPRESSION_RATIO = 18 # # IMG_RELOCATE constants # # Relocation Style #: Will fit the image to fill the specified area IMG_RELOCATE_FIT = 0 #: Will maintain aspect ratio IMG_RELOCATE_ASPECT = 1 # # GXIMU Constants # # # IMU_BOOL_OLAP constants # # Overlapping area option #: Overlap values are averaged IMU_BOOL_OLAP_AVE = 0 #: Overlap values use grid 1 value IMU_BOOL_OLAP_1 = 1 #: Overlap values use grid 2 value IMU_BOOL_OLAP_2 = 2 #: Overlap values use (grid 1 value - grid 2 value) IMU_BOOL_OLAP_MINUS = 4 # # IMU_BOOL_OPT constants # # Boolean logic option #: Valid areas are only where grids overlap IMU_BOOL_OPT_AND = 0 #: Valid areas are where either grid is a valid value IMU_BOOL_OPT_OR = 1 #: Overlap areas are dummied IMU_BOOL_OPT_XOR = 2 # # IMU_BOOL_SIZING constants # # Sizing option #: Output grid is sized to overlapping region IMU_BOOL_SIZING_MIN = 0 #: Output grid is sized to grid 1 IMU_BOOL_SIZING_0 = 1 #: Output grid is sized to grid 2 IMU_BOOL_SIZING_1 = 2 #: Output grid is sized to maximum combined area of both grids IMU_BOOL_SIZING_MAX = 3 # # IMU_DOUBLE_CRC_BITS constants # # Bits to use in double CRC's #: Exact CRC IMU_DOUBLE_CRC_BITS_EXACT = 0 #: Default inaccuracy in double (10 Bits) IMU_DOUBLE_CRC_BITS_DEFAULT = 10 #: Maximum number of inaccuracy bits (51 Bits) IMU_DOUBLE_CRC_BITS_MAX = 51 # # IMU_EXPAND_SHAPE constants # # Shape of output grid #: Rectangle IMU_EXPAND_SHAPE_RECTANGLE = 0 #: Square IMU_EXPAND_SHAPE_SQUARE = 1 # # IMU_FILL_ROLLOPT constants # # Defines for Grid Filling Method Options #: Linear IMU_FILL_ROLLOPT_LINEAR = 1 #: Square IMU_FILL_ROLLOPT_SQUARE = 2 # # IMU_FILT_DUMMY constants # # Settings for placing dummy values in grid if any of filter # values are dummy #: No IMU_FILT_DUMMY_NO = 0 #: Yes IMU_FILT_DUMMY_YES = 1 # # IMU_FILT_FILE constants # # Flags which indicate if a file is to be used to read the # filter values #: No IMU_FILT_FILE_NO = 0 #: Yes IMU_FILT_FILE_YES = 1 # # IMU_FILT_HZDRV constants # # Flags which indicate which type of horizontal derivative # is being applied (X direction, Y direction, none at all) #: No IMU_FILT_HZDRV_NO = 0 #: X IMU_FILT_HZDRV_X = 1 #: Y IMU_FILT_HZDRV_Y = 2 # # IMU_FLOAT_CRC_BITS constants # # Bits to use in float CRC's #: Exact CRC IMU_FLOAT_CRC_BITS_EXACT = 0 #: Default inaccuracy in floats (7 Bits) IMU_FLOAT_CRC_BITS_DEFAULT = 7 #: Maximum number of inaccuracy bits (22 Bits) IMU_FLOAT_CRC_BITS_MAX = 22 # # IMU_MASK constants # # Defined options for masking grids #: Inside IMU_MASK_INSIDE = 0 #: Outside IMU_MASK_OUTSIDE = 1 # # IMU_STAT_FORCED constants # # Defined options for forcing recalculating the grid values #: No IMU_STAT_FORCED_NO = 0 #: Yes IMU_STAT_FORCED_YES = 1 # # IMU_TRANS constants # # Transpose Options available for `grid_trns <geosoft.gxapi.GXIMU.grid_trns>` # implies original grid lines: #: Can be ANY orientation IMU_TRANS_DEFAULT = 0 #: MUST be parallel to Y-Axis IMU_TRANS_Y = 1 #: MUST be parallel to X-Axis IMU_TRANS_X = -1 # # IMU_TREND constants # # Points in grid to use #: All IMU_TREND_ALL = 0 #: Edge IMU_TREND_EDGE = 1 # # IMU_WIND_COORD constants # # Output grid coordinate units #: Imu wind grid IMU_WIND_GRID = 0 #: Imu wind ground IMU_WIND_GROUND = 1 # # IMU_WIND_DUMMIES constants # # Option for handling out-of-range Z values #: Imu wind dummy IMU_WIND_DUMMY = 0 #: Imu wind clip IMU_WIND_CLIP = 1 # # IMU_XYZ_INDEX constants # # Flags whether to use grid index numbers as # station numbers. #: No IMU_XYZ_INDEX_NO = 0 #: Yes IMU_XYZ_INDEX_YES = 1 # # IMU_XYZ_LABEL constants # # XYZ Label Flags #: No IMU_XYZ_LABEL_NO = 1 #: Yes IMU_XYZ_LABEL_YES = 0 # # GXIPJ Constants # # # IPJ_3D_FLAG constants # # 3D Flags #: Standard IPJ_3D_FLAG_NONE = 0 #: Invert angle rotation during matrix creation IPJ_3D_FLAG_INVERTANGLES = 1 #: Invert the Z plane to make up down. IPJ_3D_FLAG_INVERTZ = 2 #: Apply rotations in a specific order, determined by pdParm[7] IPJ_3D_FLAG_ORDER_ROTATION = 4 # # IPJ_3D_ROTATE constants # # 3D Rotation Mode #: Default IPJ_3D_ROTATE_DEFAULT = 0 #: Xyz IPJ_3D_ROTATE_XYZ = 1 #: Xzy IPJ_3D_ROTATE_XZY = 2 #: Yxz IPJ_3D_ROTATE_YXZ = 3 #: Yzx IPJ_3D_ROTATE_YZX = 4 #: Zxy IPJ_3D_ROTATE_ZXY = 5 #: Zyx IPJ_3D_ROTATE_ZYX = 6 # # IPJ_CSP constants # # Projection Setting #: Scale IPJ_CSP_SCALE = 0 #: Falseeast IPJ_CSP_FALSEEAST = 1 #: Falsenorth IPJ_CSP_FALSENORTH = 2 #: Latorigin IPJ_CSP_LATORIGIN = 3 #: Lonorigin IPJ_CSP_LONORIGIN = 4 #: Parallel 1 IPJ_CSP_PARALLEL_1 = 5 #: Parallel 2 IPJ_CSP_PARALLEL_2 = 6 #: Azimuth IPJ_CSP_AZIMUTH = 7 #: Angle IPJ_CSP_ANGLE = 8 #: Pointlat 1 IPJ_CSP_POINTLAT_1 = 9 #: Pointlon 1 IPJ_CSP_POINTLON_1 = 10 #: Pointlat 2 IPJ_CSP_POINTLAT_2 = 11 #: Pointlon 2 IPJ_CSP_POINTLON_2 = 12 # # IPJ_NAME constants # # Project Name #: Projected coordinate system name IPJ_NAME_PCS = 0 #: Projection name IPJ_NAME_PROJECTION = 1 #: Projection method name IPJ_NAME_METHOD = 2 #: Datum name IPJ_NAME_DATUM = 3 #: Ellipsoid name IPJ_NAME_ELLIPSOID = 4 #: Local datum name IPJ_NAME_LDATUM = 5 #: Unit abbreviation IPJ_NAME_UNIT_ABBR = 6 #: Full unit name IPJ_NAME_UNIT_FULL = 7 #: Projection type description IPJ_NAME_TYPE = 8 #: Datum transform table name IPJ_NAME_LLDATUM = 9 #: Projection method parameters in GXF order IPJ_NAME_METHOD_PARMS = 10 #: Projection method parameters labels IPJ_NAME_METHOD_LABEL = 11 #: Datum parameters (major axis, flattening, prime meridian) IPJ_NAME_DATUM_PARMS = 12 #: local datum parameters (dX,dY,dZ,rX,rY,rZ,scale) #: See GXF revision 3 for parameter list order and #: specifications. IPJ_NAME_LDATUM_PARMS = 13 #: Geoid name if known IPJ_NAME_GEOID = 14 #: Local datum description IPJ_NAME_LDATUMDESCRIPTION = 15 #: Projection method parameters in GXF order (Native units for eastings/northings) IPJ_NAME_METHOD_PARMS_NATIVE = 16 #: Orientation parameters IPJ_NAME_ORIENTATION_PARMS = 17 # # IPJ_ORIENT constants # # Projection Orientation #: no special orientation - plan view. All views in maps #: created before v5.1.3 will return this value. IPJ_ORIENT_DEFAULT = 0 #: A plan view with a reference elevation and #: optional rotation. IPJ_ORIENT_PLAN = 1 #: Has an azimuth and swing. #: The section view projects all plotted objects #: HORIZONTALLY onto the viewing plan in order to #: preserve elevations, even if the section has a swing. IPJ_ORIENT_SECTION = 2 #: Same as `IPJ_ORIENT_SECTION <geosoft.gxapi.IPJ_ORIENT_SECTION>`, but the projection is perpendicular #: to the section, not horizonatl, so elevatins are not preserved #: on swung sections. IPJ_ORIENT_SECTION_NORMAL = 5 #: This simple section has no azimuth or swing defined; #: only the depth is of importance, and it is output as #: the Y parameter, increasing downward. Used (for instance) #: for strip logs in Wholeplot. IPJ_ORIENT_DEPTH_SECTION = 3 #: A 3D rotation/scaling/translation orientation IPJ_ORIENT_3D = 4 #: A 3D matrix orientation IPJ_ORIENT_3D_MATRIX = 7 #: This is a vertical section that follows a #: curving path, like a river or survey traverse. #: The horizontal section location is the distance along #: the path, while the vertical axis gives the elevation. IPJ_ORIENT_SECTION_CROOKED = 6 # # IPJ_PARM_LST constants # # Projection List #: Coordinatesystem IPJ_PARM_LST_COORDINATESYSTEM = 0 #: Datum IPJ_PARM_LST_DATUM = 1 #: Projection IPJ_PARM_LST_PROJECTION = 2 #: Units IPJ_PARM_LST_UNITS = 3 #: Localdatumdescription IPJ_PARM_LST_LOCALDATUMDESCRIPTION = 4 #: Localdatumname IPJ_PARM_LST_LOCALDATUMNAME = 5 #: Unitsdescription IPJ_PARM_LST_UNITSDESCRIPTION = 6 # # IPJ_TYPE constants # # `GXIPJ <geosoft.gxapi.GXIPJ>` Types #: Read from a PRJ file: #: string 1 - Source file name #: string 2 and 3 are not used. IPJ_TYPE_PRJ = 0 #: Projected coordinate system: #: string 1 - POSC PCS name #: string 2 - POSC Datum transform name #: string 3 - not used. IPJ_TYPE_PCS = 1 #: Geographic coordinate system: #: string 1 - POSC Datum name #: string 2 - POSC Datum transform name #: string 3 - not used. IPJ_TYPE_GCS = 2 #: Custom projection #: string 1 - POSC Datum name #: string 2 - POSC Datum transform name #: string 3 - POSC Transform, "" if geographic IPJ_TYPE_ANY = 3 #: Not used for `read <geosoft.gxapi.GXIPJ.read>`. This is used for #: `source_type <geosoft.gxapi.GXIPJ.source_type>` to indicate no projection. IPJ_TYPE_NONE = 4 #: Wrp IPJ_TYPE_WRP = 5 #: tests the projection tables for internal consistency #: and creates report files in the project directory. #: string 1 - outout report file name #: string 2 - ESRI coordinate strings file. This contains one #: ESRI coordinate string per line. Lines that #: start with '#' are skipped. #: string 3 - not currently used IPJ_TYPE_TEST = 6 # # IPJ_UNIT constants # # Projection Unit Type #: Abbreviation IPJ_UNIT_ABBREVIATION = 0 #: Fullname IPJ_UNIT_FULLNAME = 1 # # IPJ_WARP constants # # Warp (Transformation) type #: Matrix Warp IPJ_WARP_MATRIX = -1 #: No warp IPJ_WARP_NONE = 0 #: Translate only (needs 1 point) IPJ_WARP_TRANS1 = 1 #: Translate, rotate, normal scale (needs 2 pts) IPJ_WARP_TRANS2 = 2 #: Translate, rotate, scale X and Y (needs 3 pts or more, least-square fit) IPJ_WARP_TRANS3 = 3 #: Quadrilateral warp (needs 4 points) IPJ_WARP_QUAD = 4 #: Multipoint warp (needs at least 3 points) IPJ_WARP_MULTIPOINT = 5 #: Convert from linear to log coords in X and/or Y IPJ_WARP_LOG = 6 #: Multipoint warp in Y only (needs at least 3 points) IPJ_WARP_MULTIPOINT_Y = 7 # # GXITR Constants # # # ITR_COLOR_MODEL constants # # `GXITR <geosoft.gxapi.GXITR>` Color Model defines #: Hsv ITR_COLOR_MODEL_HSV = 1 #: Rgb ITR_COLOR_MODEL_RGB = 2 #: Cmy ITR_COLOR_MODEL_CMY = 3 # # ITR_POWER constants # # Power Zoning defines #: Power of 10 ITR_POWER_10 = 0 #: Exponential ITR_POWER_EXP = 1 # # ITR_ZONE constants # # Zoning Methods #: Default ITR_ZONE_DEFAULT = 0 #: Linear ITR_ZONE_LINEAR = 1 #: Normal ITR_ZONE_NORMAL = 2 #: Equalarea ITR_ZONE_EQUALAREA = 3 #: Shade ITR_ZONE_SHADE = 4 #: Loglinear ITR_ZONE_LOGLINEAR = 5 # # ITR_ZONE_MODEL constants # # `GXITR <geosoft.gxapi.GXITR>` Zone Model defines #: The `GXITR <geosoft.gxapi.GXITR>` has no numeric zones defined (e.g. from a TBL file) ITR_ZONE_MODEL_NOZONE = -1 #: There is no specific zone model defined. ITR_ZONE_MODEL_NONE = 0 #: The `GXITR <geosoft.gxapi.GXITR>` is set up with a linear transform. ITR_ZONE_MODEL_LINEAR = 1 #: The `GXITR <geosoft.gxapi.GXITR>` is set up with a normal distribution transform. ITR_ZONE_MODEL_NORMAL = 2 #: The `GXITR <geosoft.gxapi.GXITR>` is set up with an equal area transform. ITR_ZONE_MODEL_EQUAL = 3 #: The `GXITR <geosoft.gxapi.GXITR>` is set up with a log-linear transform. ITR_MODEL_LOGLIN = 4 #: The `GXITR <geosoft.gxapi.GXITR>` is set up with a log-linear transform. ITR_ZONE_MODEL_LOGLIN = 4 # # GXKML Constants # # # KML_ALT constants # # KML Altitude values. #: Clamptoground KML_ALT_CLAMPTOGROUND = 0 #: Relativetoground KML_ALT_RELATIVETOGROUND = 1 #: Absolute KML_ALT_ABSOLUTE = 2 # # GXLAYOUT Constants # # # LAYOUT_CONSTR constants # # Layout constraint specifiers #: Adjust rectangle's left side LAYOUT_CONSTR_LEFT = 0 #: Adjust rectangle's right side LAYOUT_CONSTR_RIGHT = 1 #: Adjust rectangle's top side LAYOUT_CONSTR_TOP = 2 #: Adjust rectangle's bottom side LAYOUT_CONSTR_BOTTOM = 3 #: Adjust rectangle's width LAYOUT_CONSTR_WIDTH = 4 #: Adjust rectangle's height LAYOUT_CONSTR_HEIGHT = 5 #: Center rectangle with respect to width LAYOUT_CONSTR_HCENTER = 6 #: Center rectangle with respect to height LAYOUT_CONSTR_VCENTER = 7 #: Move rectangle, with respect to left LAYOUT_CONSTR_MOVEL = 8 #: Move rectangle, with respect to right LAYOUT_CONSTR_MOVER = 9 #: Move rectangle, with respect to top LAYOUT_CONSTR_MOVET = 10 #: Move rectangle, with respect to bottom LAYOUT_CONSTR_MOVEB = 11 # # GXLL2 Constants # # # GXLPT Constants # # # GXLST Constants # # # LST_ITEM constants # # `GXLST <geosoft.gxapi.GXLST>` data access #: Access the "Name" part of the `GXLST <geosoft.gxapi.GXLST>` item. LST_ITEM_NAME = 0 #: Access the "Value" part of the `GXLST <geosoft.gxapi.GXLST>` item. LST_ITEM_VALUE = 1 # # GXLTB Constants # # # LTB_CASE constants # # Case handling of `GXLTB <geosoft.gxapi.GXLTB>` strings #: Ignore case LTB_CASE_INSENSITIVE = 0 #: Case is used LTB_CASE_SENSITIVE = 1 # # LTB_CONLST constants # # Matching types #: Exact LTB_CONLST_EXACT = 0 #: Any LTB_CONLST_ANY = 1 # # LTB_DELIM constants # # Types of `GXLTB <geosoft.gxapi.GXLTB>` Delimiters #: Spaces LTB_DELIM_SPACE = 0 #: Commas LTB_DELIM_COMMA = 1 #: Spaces and Commas LTB_DELIM_SPACECOMMA = 2 # # LTB_TYPE constants # # Types of `GXLTB <geosoft.gxapi.GXLTB>` Headers #: Has a header LTB_TYPE_HEADER = 0 #: Has no header LTB_TYPE_NOHEADER = 1 # # GXMAP Constants # # # DUPMAP constants # # Duplicate Modes #: Blank DUPMAP_BLANK = 0 #: Copy all current contents DUPMAP_COPY = 1 #: Copy all current contents and save text for pre-6.2 versions. DUPMAP_COPY_PRE62 = 2 # # MAP_EXPORT_BITS constants # # Color Types #: 32 Bit Color (8-bit alpha) MAP_EXPORT_BITS_32 = 32 #: 24 Bit Color MAP_EXPORT_BITS_24 = 24 #: 8 Bit Gray Scale MAP_EXPORT_BITS_GREY8 = 9 #: 8 Bit Color MAP_EXPORT_BITS_8 = 8 #: 4 Bit Gray Scale MAP_EXPORT_BITS_GREY4 = 5 #: 4 Bit Color MAP_EXPORT_BITS_4 = 4 #: 1 Bit Gray Scale MAP_EXPORT_BITS_GREY1 = 1 #: Default Resolution MAP_EXPORT_BITS_DEFAULT = 0 # # MAP_EXPORT_FORMAT constants # # Export Formats # Format Description Type # ======= ========================== ==== #: "PLT" Geosoft Plot (``*.plt``) Plot MAP_EXPORT_FORMAT_PLT = "PLT" #: "`GXSHP <geosoft.gxapi.GXSHP>`" ArcView Shapfile (``*.shp``) Plot MAP_EXPORT_FORMAT_SHP = "SHP" #: "DXF12" AutoCad12 (``*.dxf``) Plot MAP_EXPORT_FORMAT_DXF12 = "DXF12" #: "DXF13" AutoCad13 (``*.dxf``) Plot MAP_EXPORT_FORMAT_DXF13 = "DXF13" #: "GTIFF" GeoTIFF (``*.tif``), Color Image MAP_EXPORT_FORMAT_GTIFF = "GTIFF" #: "CGTIFF" GeoTIFF Compressed (``*.tif``), Color Image MAP_EXPORT_FORMAT_CGTIFF = "CGTIFF" #: "MTIFF" MapInfo TIFF (``*.tif``) Color Image MAP_EXPORT_FORMAT_MTIFF = "MTIFF" #: "ATIFF" ArcView TIFF (``*.tif``) Color Image MAP_EXPORT_FORMAT_ATIFF = "ATIFF" #: "`GXGEO <geosoft.gxapi.GXGEO>`" Geosoft COLOR grid (``*.grd``) Color Image MAP_EXPORT_FORMAT_GEO = "GEO" #: "ERM" ER Mapper RGB (``*.ers``) Color Image MAP_EXPORT_FORMAT_ERM = "ERM" #: "KMZ" Keyhole Markup (``*.kmz``) Zipped XML/Image files MAP_EXPORT_FORMAT_KMZ = "KMZ" # # MAP_EXPORT_METHOD constants # # Dithering Methods #: Standard Dither MAP_EXPORT_METHOD_STANDARD = 0 #: Error Diffusion Dither MAP_EXPORT_METHOD_DIFFUSE = 1 #: No Dither MAP_EXPORT_METHOD_NONE = 2 # # MAP_EXPORT_RASTER_FORMAT constants # # Export Raster Formats # . # Format Description Type B/W B/W COL B/W COL COL # ======= ========================== =========== === === === === === === #: "EMF" Enhanced Metafile (``*.emf``) Plot MAP_EXPORT_RASTER_FORMAT_EMF = "EMF" #: "BMP" Bitmap (``*.bmp``) Color Image X X X X X X MAP_EXPORT_RASTER_FORMAT_BMP = "BMP" #: "JPEGL" JPEG Low Quality (``*.jpg``) Color Image X MAP_EXPORT_RASTER_FORMAT_JPEGL = "JPEGL" #: "JPEG" JPEG (``*.jpg``) Color Image X MAP_EXPORT_RASTER_FORMAT_JPEG = "JPEG" #: "JPEGH" JPEG High Quality (``*.jpg``) Color Image X MAP_EXPORT_RASTER_FORMAT_JPEGH = "JPEGH" #: "GIF" GIF (``*.gif``) Color Image X X X X X MAP_EXPORT_RASTER_FORMAT_GIF = "GIF" #: "PCX" PCX (``*.pcx``) Color Image X X X X X X MAP_EXPORT_RASTER_FORMAT_PCX = "PCX" #: "PNG" PNG (``*.png``) Color Image X X X X X X MAP_EXPORT_RASTER_FORMAT_PNG = "PNG" #: "EPS" Encasulated PostScript (``*.eps``) Color Image X MAP_EXPORT_RASTER_FORMAT_EPS = "EPS" #: "TIFF" TIFF (``*.tif``) Color Image X X X X X X MAP_EXPORT_RASTER_FORMAT_TIFF = "TIFF" # # MAP_LIST_MODE constants # # Map List modes #: All MAP_LIST_MODE_ALL = 0 #: 3d MAP_LIST_MODE_3D = 1 #: Not3d MAP_LIST_MODE_NOT3D = 2 # # MAP_OPEN constants # # Open Modes #: Map writenew MAP_WRITENEW = 1 #: Map writeold MAP_WRITEOLD = 2 # # GXMAPL Constants # # # GXMAPTEMPLATE Constants # # # MAPTEMPLATE_OPEN constants # # Open Modes #: Create new empty map template (will overwrite existing files) MAPTEMPLATE_WRITENEW = 0 #: Create from existing template on disk MAPTEMPLATE_EXIST = 1 # # GXMATH Constants # # # GXMESH Constants # # # ATTRIBUTE_DATA_TYPE constants # # Data Type of Attribute #: DOUBLE ATTRIBUTE_DOUBLE = 0 #: THEMATIC ATTRIBUTE_THEMATIC = 1 #: VECTOR ATTRIBUTE_VECTOR = 2 # # ATTRIBUTE_TYPE constants # # Data Type of Attribute #: Single Value Attribute ATTRIBUTE_SINGLE = 0 #: Surface Sides Attribute ATTRIBUTE_SURFACE_SIDES = 1 #: Vertices Attribute ATTRIBUTE_VERTICES = 2 #: Faces Attribute ATTRIBUTE_FACES = 3 # # GXMESHUTIL Constants # # # SURFACE_CLIP_MODE constants # # Surface Clip Mode #: Output the surface item above clipper surface SURFACE_CLIP_ABOVE = 0 #: Output the surface item below clipper surface SURFACE_CLIP_BELOW = 1 #: Output the surface items both above & below the clipper surface SURFACE_CLIP_BOTH = 2 # # SURFACE_CLIP_STATUS constants # # Surface Clip Return Code #: Surface clipping successful SURFACE_CLIP_SUCCESS = 0 #: Surface clipping successful, but with empty output SURFACE_CLIP_SUCCESS_EMPTY = 1 #: Surface clipping fail SURFACE_CLIP_FAIL = 2 # # SURFACE_PROJECTION_METHOD constants # # Surface projection method #: Maximum value: first intersection from the top SURFACE_PROJECTION_MAXIMUM = 0 #: Minimum value: first intersection from the bottom SURFACE_PROJECTION_MINIMUM = 1 #: Average value: all intersections SURFACE_PROJECTION_AVERAGE = 2 # # SURFACE_TRANSFORMATION_METHOD constants # # Surface transformation method #: Transforms the surface by shifting it, provided dX, dY, dZ SURFACE_TRANSFORMATION_METHOD_SHIFT = 0 #: Transforms the surface by scaling it, provided sX, sY, sZ SURFACE_TRANSFORMATION_METHOD_SCALE = 1 # # GXMETA Constants # # # H_META_INVALID_TOKEN constants # # `GXMETA <geosoft.gxapi.GXMETA>` Invalid Token #: H meta invalid token H_META_INVALID_TOKEN = -1 # # META_CORE_ATTRIB constants # # `GXMETA <geosoft.gxapi.GXMETA>` Core Attributes #: Description of this class META_CORE_ATTRIB_Class_Description = -300 #: Application that created this class META_CORE_ATTRIB_Class_Application = -301 #: URL that defines this class META_CORE_ATTRIB_Class_ReferenceURL = -302 #: Type of Class META_CORE_ATTRIB_Class_Type = -303 #: Description of this type META_CORE_ATTRIB_Type_Description = -304 #: URL that defines this type META_CORE_ATTRIB_Type_ReferenceURL = -305 #: Fixed size of this type (in bytes) META_CORE_ATTRIB_Type_FixedSize = -306 #: Byte order for this type META_CORE_ATTRIB_Type_ByteOrder = -307 #: Minimum Value for this type META_CORE_ATTRIB_Type_MinValue = -308 #: Maximum Value for this type META_CORE_ATTRIB_Type_MaxValue = -309 #: Maximum Size in bytes for this type META_CORE_ATTRIB_Type_MaxSize = -310 #: Object class that manages this type META_CORE_ATTRIB_Type_ObjectClass = -311 #: Object creating function META_CORE_ATTRIB_Type_hCreatS_Func = -312 #: Object serializationg function META_CORE_ATTRIB_Type_sSerial_Func = -313 #: Enumeration Value META_CORE_ATTRIB_Type_Enum_Value = -314 #: Is this attribute visible to the user META_CORE_ATTRIB_Attrib_Visible = -315 #: Is this atttribute editable by the user META_CORE_ATTRIB_Attrib_Editable = -316 #: The flat name of this attribute META_CORE_ATTRIB_Attrib_FlatName = -317 # # META_CORE_CLASS constants # # Meta Core Class Objects #: All Classes are subordinate to this class META_CORE_CLASS_Base = -100 #: All Predefined symbols are in this class META_CORE_CLASS_Predefined = -101 #: Attributes META_CORE_CLASS_Attributes = -102 #: Classattributes META_CORE_CLASS_ClassAttributes = -103 #: Typeattributes META_CORE_CLASS_TypeAttributes = -104 #: Objectattributes META_CORE_CLASS_ObjectAttributes = -105 #: Enumattributes META_CORE_CLASS_EnumAttributes = -106 #: Attributeattributes META_CORE_CLASS_AttributeAttributes = -107 #: Itemattributes META_CORE_CLASS_ItemAttributes = -108 #: Types META_CORE_CLASS_Types = -109 #: Enums META_CORE_CLASS_Enums = -110 #: Enum bool META_CORE_CLASS_Enum_Bool = -111 #: Enum classtype META_CORE_CLASS_Enum_ClassType = -112 # # META_CORE_TYPE constants # # `GXMETA <geosoft.gxapi.GXMETA>` Core Data Types #: Data Bytes (Base type) META_CORE_TYPE_Bytes = -200 #: Boolean META_CORE_TYPE_Bool = -201 #: Signed character META_CORE_TYPE_I1 = -202 #: Unsigned character META_CORE_TYPE_U1 = -203 #: Signed short META_CORE_TYPE_I2 = -204 #: Unsigned short META_CORE_TYPE_U2 = -205 #: Signed long META_CORE_TYPE_I4 = -206 #: Unsigned long META_CORE_TYPE_U4 = -207 #: Singed long long (64 bit int) META_CORE_TYPE_I8 = -208 #: Unsigned long long META_CORE_TYPE_U8 = -209 #: Float (32bit) META_CORE_TYPE_R4 = -210 #: Double (64bit) META_CORE_TYPE_R8 = -211 #: String META_CORE_TYPE_String = -212 #: Predefined Object (`GXITR <geosoft.gxapi.GXITR>`,`GXIPJ <geosoft.gxapi.GXIPJ>`) META_CORE_TYPE_Object = -213 #: Enumeration META_CORE_TYPE_Enum = -214 #: Classtype META_CORE_TYPE_ClassType = -215 # # GXMPLY Constants # # # GXMULTIGRID3D Constants # # # DIRECTION3D constants # # Direction in 3D #: XYZ DIRECTION3D_XYZ = 0 #: YXZ DIRECTION3D_YXZ = 1 #: XZY DIRECTION3D_XZY = 2 #: YZX DIRECTION3D_YZX = 3 #: ZXY DIRECTION3D_ZXY = 4 #: ZYX DIRECTION3D_ZYX = 5 # # GOCAD_ORIENTATION constants # # GOCAD Orientations #: Normal GOCAD_ORIENTATIONS_NORMAL = 0 #: Inverted (Z) GOCAD_ORIENTATIONS_INVERTED = 1 #: Normal (ZFirst) GOCAD_ORIENTATIONS_NORMAL_ZFIRST = 2 #: Inverted (Z) (ZFirst) GOCAD_ORIENTATIONS_INVERTED_ZFIRST = 3 # # VECTOR_IMPORT constants # # Vector grid3d import direction #: X, Y and Z VECTOR_IMPORT_XYZ = 0 #: U, V and W VECTOR_IMPORT_UVW = 1 #: Amplitude, Inclination and Declination VECTOR_IMPORT_AID = 2 # # FILTER3D constants # # Voxel filter type #: Specify a file containing the 27-point filter FILTER3D_FILE = 0 #: Smoothing filter FILTER3D_SMOOTHING = 1 #: Laplace filter FILTER3D_LAPLACE = 2 #: X-Gradient filter FILTER3D_X_GRADIENT = 3 #: Y-Gradient filter FILTER3D_Y_GRADIENT = 4 #: Z-Gradient filter FILTER3D_Z_GRADIENT = 5 #: Total-Gradient filter FILTER3D_TOTAL_GRADIENT = 6 # # MULTIGRID3D_DIRECTGRID_METHOD constants # # How to calculate the cell values for direct gridding. #: Select the minimum value found in each cell MULTIGRID3D_DIRECTGRID_MINIMUM = 0 #: Select the maximum value found in each cell MULTIGRID3D_DIRECTGRID_MAXIMUM = 1 #: Select the mean of all values found in each cell MULTIGRID3D_DIRECTGRID_MEAN = 2 #: The number of valid (non-dummy) items found in each cell - 0 if no items found MULTIGRID3D_DIRECTGRID_ITEMS = 3 #: The number of valid (non-dummy) items found in each cell - DUMMY if no items found MULTIGRID3D_DIRECTGRID_DUMMYITEMS = 4 # # GXMULTIGRID3DUTIL Constants # # # RBFKERNEL constants # # Math kernel to use in the RBF Computation #: Distance RBFKERNEL_DISTANCE = 0 #: Multiquadratic RBFKERNEL_MULTIQUADRATIC = 1 # # GXMVIEW Constants # # # MAKER constants # # Maker defines #: GX MAKER_GX = 0 # # MVIEW_CLIP constants # # Boolean clipping defines #: Turn ON clipping CLIP_ON = 1 #: Turn OFF clipping CLIP_OFF = 0 # # MVIEW_COLOR constants # # 24-bit color defines # The `color <geosoft.gxapi.GXMVIEW.color>` function can be used to create a color int from a # color string description. # The iColorXXX_MVIEW macros can be used to create colors from component # intensities. #: Black C_BLACK = 33554432 #: Red C_RED = 33554687 #: Green C_GREEN = 33619712 #: Blue C_BLUE = 50266112 #: Cyan C_CYAN = 50331903 #: Magenta C_MAGENTA = 50396928 #: Yellow C_YELLOW = 67043328 #: Grey C_GREY = 41975936 #: Light Red C_LT_RED = 54542336 #: Light Green C_LT_GREEN = 54526016 #: Light Blue C_LT_BLUE = 50348096 #: Light Cyan C_LT_CYAN = 50331712 #: Light Magenta C_LT_MAGENTA = 50348032 #: Light Yellow C_LT_YELLOW = 54525952 #: Light Grey C_LT_GREY = 54542400 #: Grey 10% C_GREY10 = 51910680 #: Grey 25% C_GREY25 = 54542400 #: Grey 50% C_GREY50 = 41975936 #: White C_WHITE = 50331648 #: Transparent or no-draw C_TRANSPARENT = 0 # # MVIEW_CYLINDER3D constants # # What parts of the cylinder are closed #: Open MVIEW_CYLINDER3D_OPEN = 0 #: Closestart MVIEW_CYLINDER3D_CLOSESTART = 1 #: Closeend MVIEW_CYLINDER3D_CLOSEEND = 2 #: Closeall MVIEW_CYLINDER3D_CLOSEALL = 3 # # MVIEW_DRAW constants # # Polygon drawing defines #: Draw Polylines MVIEW_DRAW_POLYLINE = 0 #: Draw Polygons MVIEW_DRAW_POLYGON = 1 # # MVIEW_DRAWOBJ3D_ENTITY constants # # What types of entities to draw #: Draw 3D Points (no normals) [1 verticies per object] MVIEW_DRAWOBJ3D_ENTITY_POINTS = 0 #: Draw 3D Lines (no normals) [2 verticies per object] MVIEW_DRAWOBJ3D_ENTITY_LINES = 1 #: Draw 3D Line strip (no normals) [2+x verticies per object] MVIEW_DRAWOBJ3D_ENTITY_LINE_STRIPS = 2 #: Draw 3D Line loop (no normals, closes loop with first point) [2+x verticies per object] MVIEW_DRAWOBJ3D_ENTITY_LINE_LOOPS = 3 #: Draw 3D Triangles [3 verticies per object] MVIEW_DRAWOBJ3D_ENTITY_TRIANGLES = 4 #: Draw 3D Triangle strips [3+x verticies per object] MVIEW_DRAWOBJ3D_ENTITY_TRIANGLE_STRIPS = 5 #: Draw 3D Triangle fans [3+x verticies per object] MVIEW_DRAWOBJ3D_ENTITY_TRIANGLE_FANS = 6 #: Draw 3D Quads (Must be in the same plane) [4 verticies per object] MVIEW_DRAWOBJ3D_ENTITY_QUADS = 7 #: Draw 3D Quad Strips (Must be in the same plane) [4+2x verticies per object] MVIEW_DRAWOBJ3D_ENTITY_QUADS_STRIPS = 8 #: Draw 3D Quad Polygones (Must be in the same plane, must be convex and cannot intersect itself) MVIEW_DRAWOBJ3D_ENTITY_POLYGONS = 9 # # MVIEW_DRAWOBJ3D_MODE constants # # What types of entities to draw #: Draw flat shaded faces (one normal and color per object) MVIEW_DRAWOBJ3D_MODE_FLAT = 0 #: Draw smooth shaded faces (one normal and color per vertex) MVIEW_DRAWOBJ3D_MODE_SMOOTH = 1 # # MVIEW_EXTENT constants # # Types of extents defines #: All objects MVIEW_EXTENT_ALL = 0 #: Clipping regions MVIEW_EXTENT_CLIP = 1 #: Map extents MVIEW_EXTENT_MAP = 2 #: Visible objects MVIEW_EXTENT_VISIBLE = 3 # # MVIEW_FIT constants # # Fit area defines #: Fit it to the map area MVIEW_FIT_MAP = 0 #: Fit it to the view area MVIEW_FIT_VIEW = 1 # # MVIEW_FONT_WEIGHT constants # # Font weight defines #: Normal MVIEW_FONT_WEIGHT_NORMAL = 0 #: Ultralight MVIEW_FONT_WEIGHT_ULTRALIGHT = 1 #: Light MVIEW_FONT_WEIGHT_LIGHT = 2 #: Medium MVIEW_FONT_WEIGHT_MEDIUM = 3 #: Bold MVIEW_FONT_WEIGHT_BOLD = 4 #: Xbold MVIEW_FONT_WEIGHT_XBOLD = 5 #: Xxbold MVIEW_FONT_WEIGHT_XXBOLD = 6 # # MVIEW_GRID constants # # Grid Drawing defines #: Dot MVIEW_GRID_DOT = 0 #: Line MVIEW_GRID_LINE = 1 #: Cross MVIEW_GRID_CROSS = 2 # # MVIEW_GROUP constants # # Open Group defines #: New Group (destroy any existing group) MVIEW_GROUP_NEW = 1 #: Append to an existing Group MVIEW_GROUP_APPEND = 0 # # MVIEW_GROUP_LIST constants # # What groups to list #: All the groups. MVIEW_GROUP_LIST_ALL = 0 #: Those groups marked using the various mark functions. MVIEW_GROUP_LIST_MARKED = 1 #: Those groups checked as visible in the view/group manager. MVIEW_GROUP_LIST_VISIBLE = 2 # # MVIEW_HIDE constants # # Boolean hidding defines #: Turn ON hidding HIDE_ON = 1 #: Turn OFF hidding HIDE_OFF = 0 # # MVIEW_IS constants # # Defines for mview types #: Agg MVIEW_IS_AGG = 0 #: Movable MVIEW_IS_MOVABLE = 3 #: Csymb MVIEW_IS_CSYMB = 4 #: Linked MVIEW_IS_LINKED = 5 #: Made MVIEW_IS_MADE = 6 #: Hidden MVIEW_IS_HIDDEN = 7 #: Clipped MVIEW_IS_CLIPPED = 8 #: Meta MVIEW_IS_META = 9 #: Voxd MVIEW_IS_VOXD = 10 #: Shadow 2d interpretation MVIEW_IS_SHADOW_2D_INTERPRETATION = 11 #: Vector3d MVIEW_IS_VECTOR3D = 12 #: Gensurf MVIEW_IS_GENSURF = 13 #: Voxsurf MVIEW_IS_VOXSURF = 14 # # MVIEW_LABEL_BOUND constants # # Label Binding Defines #: Label Not Bound MVIEW_LABEL_BOUND_NO = 0 #: Label Bound MVIEW_LABEL_BOUND_YES = 1 # # MVIEW_LABEL_JUST constants # # Label Justification Defines #: Top MVIEW_LABEL_JUST_TOP = 0 #: Bottom MVIEW_LABEL_JUST_BOTTOM = 1 #: Left MVIEW_LABEL_JUST_LEFT = 2 #: Right MVIEW_LABEL_JUST_RIGHT = 3 # # MVIEW_LABEL_ORIENT constants # # Label Orientation Defines #: Horizontal MVIEW_LABEL_ORIENT_HORIZONTAL = 0 #: Top right MVIEW_LABEL_ORIENT_TOP_RIGHT = 1 #: Top left MVIEW_LABEL_ORIENT_TOP_LEFT = 2 # # MVIEW_NAME_LENGTH constants # # Maximum length for view and group names #: Maximum Length (1040) MVIEW_NAME_LENGTH = 1040 # # MVIEW_OPEN constants # # Open `GXMVIEW <geosoft.gxapi.GXMVIEW>` define #: Read Only - No changes MVIEW_READ = 0 #: Create new `GXMVIEW <geosoft.gxapi.GXMVIEW>` - destroys any existing `GXMVIEW <geosoft.gxapi.GXMVIEW>` MVIEW_WRITENEW = 1 #: Open existing `GXMVIEW <geosoft.gxapi.GXMVIEW>` for read/write (must exist) MVIEW_WRITEOLD = 2 # # MVIEW_PJ constants # # Projection modes #: No reprojection is used and all locations and #: attributes are assumed to be in the view coordinate #: system. MVIEW_PJ_OFF = 0 #: Only locations will be transformed to the view #: coordinate system. MVIEW_PJ_LOCATION = 1 #: Locations and attributes (sizes, thicknesses, angles) #: will be transformed to the view coordinate system. MVIEW_PJ_ALL = 2 #: Mode before the last `MVIEW_PJ_OFF <geosoft.gxapi.MVIEW_PJ_OFF>`. MVIEW_PJ_ON = 3 # # MVIEW_RELOCATE constants # # Relocation Defines #: Will fit the image to fill the specified area MVIEW_RELOCATE_FIT = 0 #: Will maintain aspect ratio MVIEW_RELOCATE_ASPECT = 1 #: Will maintain aspect ratio and center in specified area MVIEW_RELOCATE_ASPECT_CENTER = 2 # # MVIEW_SMOOTH constants # # Interpolation method to use for drawing line and polygon edges #: Nearest neighbour MVIEW_SMOOTH_NEAREST = 0 #: Cubic Spline MVIEW_SMOOTH_CUBIC = 1 #: Akima MVIEW_SMOOTH_AKIMA = 2 # # MVIEW_TILE constants # # Tiling defines #: Rectangular MVIEW_TILE_RECTANGULAR = 0 #: Diagonal MVIEW_TILE_DIAGONAL = 1 #: Triangular MVIEW_TILE_TRIANGULAR = 2 #: Random MVIEW_TILE_RANDOM = 3 # # MVIEW_UNIT constants # # Coordinate systems defines #: View coordinates MVIEW_UNIT_VIEW = 0 #: Plot hi-metric (mm*100) on the map. MVIEW_UNIT_PLOT = 1 #: Plot mm on the map. MVIEW_UNIT_MM = 2 #: View coordinates without a warp if there is one MVIEW_UNIT_VIEW_UNWARPED = 3 # # MVIEW_EXTENT_UNIT constants # # Types of units for extents (these map to the # :ref:`MVIEW_UNIT` defines directly) #: `MVIEW_UNIT_VIEW <geosoft.gxapi.MVIEW_UNIT_VIEW>` MVIEW_EXTENT_UNIT_VIEW = 0 #: `MVIEW_UNIT_PLOT <geosoft.gxapi.MVIEW_UNIT_PLOT>` MVIEW_EXTENT_UNIT_PLOT = 1 #: `MVIEW_UNIT_MM <geosoft.gxapi.MVIEW_UNIT_MM>` MVIEW_EXTENT_UNIT_MM = 2 #: `MVIEW_UNIT_VIEW_UNWARPED <geosoft.gxapi.MVIEW_UNIT_VIEW_UNWARPED>` MVIEW_EXTENT_UNIT_VIEW_UNWARPED = 3 # # TEXT_REF constants # # Text reference locations #: Bottom left TEXT_REF_BOTTOM_LEFT = 0 #: Bottom center TEXT_REF_BOTTOM_CENTER = 1 #: Bottom right TEXT_REF_BOTTOM_RIGHT = 2 #: Middle left TEXT_REF_MIDDLE_LEFT = 3 #: Middle center TEXT_REF_MIDDLE_CENTER = 4 #: Middle right TEXT_REF_MIDDLE_RIGHT = 5 #: Top left TEXT_REF_TOP_LEFT = 6 #: Top center TEXT_REF_TOP_CENTER = 7 #: Top right TEXT_REF_TOP_RIGHT = 8 # # MVIEW_3D_RENDER constants # # 3D Geometry rendering defines. These flags only affect mixed geometry groups and not the data # specific groups (e.g. voxels, vector voxels surfaces etc.). Each of those groups # has predefined optimum behaviour and any changes to these flags are ignored. #: This flag is enabled if the backfaces of geometry should be rendered MVIEW_3D_RENDER_BACKFACES = 1 #: If the exaggeration scales of the 3D view in X, Y and/or Z is set to anything other than 1.0 #: any geometric objects (spheres, cubes etc.) for 3D groups with the following flags #: will render untransformed while only the centers of the objects are changed. #: This ensures the objects appear in the correct place with respect to other data being rendered (and scaled). MVIEW_3D_DONT_SCALE_GEOMETRY = 2 # # GXMVU Constants # # # EMLAY_GEOMETRY constants # # Type of Geometry #: 0 EMLAY_V_COPLANAR = 0 #: 1 EMLAY_H_COPLANAR = 1 #: 2 EMLAY_V_COAXIAL = 2 # # ARROW_ALIGNMENT constants # # Direction of alignment #: Horizontal ARROW_ALIGNMENT_HORIZONTAL = 0 #: Vertical ARROW_ALIGNMENT_VERTICAL = 1 # # BARCHART_LABEL constants # # Place to draw bar labels #: No label BARCHART_LABEL_NO = 0 #: Label below X axis BARCHART_LABEL_BELOWX = 1 #: Label above X axis BARCHART_LABEL_ABOVEX = 2 #: Label at positive end of bar BARCHART_LABEL_PEND = 3 #: Label at negative end of bar BARCHART_LABEL_NEND = 4 #: Label at alternative ends,1st label at positive end BARCHART_LABEL_ALTERNAT1 = 5 #: Label at alternative ends,1st label at negative end BARCHART_LABEL_ALTERNAT2 = 6 # # COLORBAR_LABEL constants # # Label text orientation #: (default) COLORBAR_LABEL_HORIZONTAL = 0 #: Gives text an orientation of -90 degrees COLORBAR_LABEL_VERTICAL = 1 # # COLORBAR_STYLE constants # # Label text orientation #: Don't draw COLORBAR_STYLE_NONE = 0 #: Post max/min values COLORBAR_STYLE_MAXMIN = 1 # # MVU_ORIENTATION constants # # Orientation (of whatever) #: Vertical MVU_ORIENTATION_VERTICAL = 0 #: Horizontal MVU_ORIENTATION_HORIZONTAL = 1 # # MVU_DIVISION_STYLE constants # # Orientation (of whatever) #: No division marks MVU_DIVISION_STYLE_NONE = 0 #: Division line MVU_DIVISION_STYLE_LINES = 1 #: Inside tics, both sides MVU_DIVISION_STYLE_TICS = 2 # # MVU_ARROW constants # # Type Arrow. These definitions are used as binary flags, and can be # used together by passing sums. #: Plot the head as a solid triangle, otherwise plot a "stick arrow" #: with three lines for the tail and two barbs. MVU_ARROW_SOLID = 1 #: If used, input the actual length of the barbs on the arrow, in #: view X-axis units, as measured along the tail. If not used, enter the ratio #: between the length of the barbs and full length of the arrow (e.g. 0.4). #: In the latter case, the longer the arrow, the bigger the arrow head. MVU_ARROW_FIXED = 2 # # MVU_FLIGHT_COMPASS constants # # Compass direction #: None MVU_FLIGHT_COMPASS_NONE = -1 #: East MVU_FLIGHT_COMPASS_EAST = 0 #: North MVU_FLIGHT_COMPASS_NORTH = 1 #: West MVU_FLIGHT_COMPASS_WEST = 2 #: South MVU_FLIGHT_COMPASS_SOUTH = 3 # # MVU_FLIGHT_DUMMIES constants # # Show Dummies #: Notincluded MVU_FLIGHT_DUMMIES_NOTINCLUDED = 0 #: Included MVU_FLIGHT_DUMMIES_INCLUDED = 1 # # MVU_FLIGHT_LOCATE constants # # Line label locations #: No label MVU_FLIGHT_LOCATE_NONE = 0 #: :: #: #: L100.2 -------------------------- L100.2 #: #: dOffA controls distance from label to line. #: dOffB controls vertical offset from center. MVU_FLIGHT_LOCATE_END = 1 #: :: #: #: L100.2 L100.2 #: ---------------------------------------- #: #: dOffA controls label distance above the line. #: dOffB controls offset in from line end. MVU_FLIGHT_LOCATE_ABOVE = 2 #: :: #: #: ---------------------------------------- #: L100.2 L100.2 #: #: dOffA controls label distance below the line. #: dOffB controls offset in from line end. MVU_FLIGHT_LOCATE_BELOW = 3 #: To add '>' to label to indicate direction, for example: #: `MVU_FLIGHT_LOCATE_END <geosoft.gxapi.MVU_FLIGHT_LOCATE_END>`+`MVU_FLIGHT_DIRECTION <geosoft.gxapi.MVU_FLIGHT_DIRECTION>` MVU_FLIGHT_DIRECTION = 8 # # MVU_VOX_SURFACE_METHOD constants # # TODO #: Marching cubes MVU_VOX_SURFACE_METHOD_MARCHING_CUBES = 0 # # MVU_VOX_SURFACE_OPTION constants # # TODO #: Open MVU_VOX_SURFACE_OPTION_OPEN = 0 #: Closed MVU_VOX_SURFACE_OPTION_CLOSED = 1 # # MVU_TEXTBOX constants # # Type of Box #: Left MVU_TEXTBOX_LEFT = 0 #: Center MVU_TEXTBOX_CENTER = 1 #: Right MVU_TEXTBOX_RIGHT = 2 # # MVU_VPOINT constants # # Head Acuteness #: Sharp MVU_VPOINT_SHARP = 0 #: Medium MVU_VPOINT_MEDIUM = 1 #: Blunt MVU_VPOINT_BLUNT = 2 # # MVU_VPOS constants # # Head Position #: Head MVU_VPOS_HEAD = 0 #: Middle MVU_VPOS_MIDDLE = 1 #: Tail MVU_VPOS_TAIL = 2 # # MVU_VSIZE constants # # Head Size #: Nohead MVU_VSIZE_NOHEAD = 0 #: Smallhead MVU_VSIZE_SMALLHEAD = 1 #: Mediumhead MVU_VSIZE_MEDIUMHEAD = 2 #: Largehead MVU_VSIZE_LARGEHEAD = 3 #: Notail MVU_VSIZE_NOTAIL = 4 # # MVU_VSTYLE constants # # Head Style #: Lines MVU_VSTYLE_LINES = 0 #: Barb MVU_VSTYLE_BARB = 1 #: Triangle MVU_VSTYLE_TRIANGLE = 2 # # GXMXD Constants # # # GXPAT Constants # # # GXPG Constants # # # PG_3D_DIR constants # # 3D Pager direction #: Xyz PG_3D_DIR_XYZ = 0 #: Yxz PG_3D_DIR_YXZ = 1 #: Xzy PG_3D_DIR_XZY = 2 #: Yzx PG_3D_DIR_YZX = 3 #: Zxy PG_3D_DIR_ZXY = 4 #: Zyx PG_3D_DIR_ZYX = 5 # # PG_BF_CONV constants # # Pager binary conversions #: The Data is in Raw form PG_BF_CONV_NONE = 0 #: The data needs to be byte swapped PG_BF_CONV_SWAP = 1 # # GXPJ Constants # # # PJ_ELEVATION constants # # Elevation correction method #: Elevation transform not supported. PJ_ELEVATION_NONE = 0 #: elevation transformation uses earth-centre shift #: and is not accurate. PJ_ELEVATION_GEOCENTRIC = 1 #: elevation transformation uses a geoid model #: and is as accurate as the geoid data. PJ_ELEVATION_GEOID = 2 # # PJ_RECT constants # # Conversion direction #: Xy2ll PJ_RECT_XY2LL = 0 #: Ll2xy PJ_RECT_LL2XY = 1 # # GXPLY Constants # # # PLY_CLIP constants # # Polygon clipping mode #: The polygons do not intersect PLY_CLIP_NO_INTERSECT = 0 #: The polygons do intersect PLY_CLIP_INTERSECT = 1 #: Polygon A is completely inside polygon B PLY_CLIP_A_IN_B = 2 #: Polygon B is completely inside polygon A PLY_CLIP_B_IN_A = 3 # # PLY_POINT_CLIP constants # # Polygon point clipping mode #: The point is inside the polygon PLY_POINT_CLIP_INSIDE = 0 #: The point is outside the polygon PLY_POINT_CLIP_OUTSIDE = 1 #: An error occurred PLY_POINT_CLIP_ERROR = 2 # # PLY_LINE_CLIP constants # # Polygon line clip indicator #: The start point of the line is inside PLY_LINE_CLIP_INSIDE = 0 #: This name is a misnomer - it should have been `PLY_LINE_CLIP_INSIDE <geosoft.gxapi.PLY_LINE_CLIP_INSIDE>`, but is retained to support legacy code PLY_LINE_CLIP_NO_INTERSECT = 0 #: The start point of the line is outside PLY_LINE_CLIP_OUTSIDE = 1 #: Error PLY_LINE_CLIP_ERROR = 2 # # GXRA Constants # # # GXREG Constants # # # REG_MERGE constants # # `GXREG <geosoft.gxapi.GXREG>` merge options #: Replace Values REG_MERGE_REPLACE = 0 #: Only append values REG_MERGE_ADD = 1 # # GXSBF Constants # # # SBF_OPEN constants # # `GXSBF <geosoft.gxapi.GXSBF>` Open defines #: Read only SBF_READ = 0 #: Read/write - erases structured file SBF_READWRITE_NEW = 1 #: Read/write - open and append onto pre-existing structured file SBF_READWRITE_OLD = 2 # # SBF_TYPE constants # # `GXSBF <geosoft.gxapi.GXSBF>` Object type defines #: Embedded directory names SBF_TYPE_DIRS = 1 #: Embedded file names SBF_TYPE_FILES = 2 #: Embedded file and directory names SBF_TYPE_BOTH = 3 # # GXSEGYREADER Constants # # # GXST Constants # # # ST_INFO constants # # Information to retrieve #: Number of non-dummy items ST_ITEMS = 0 #: Number of items greater than zero ST_NPOS = 1 #: Number of items equal to zero ST_NZERO = 22 #: Total number of items (dummy and non-dummy) ST_TOTAL = 24 #: St dummies ST_DUMMIES = 2 #: St min ST_MIN = 3 #: St max ST_MAX = 4 #: St range ST_RANGE = 5 #: St mean ST_MEAN = 6 #: St median ST_MEDIAN = 7 #: Mode value (most often repeated value) ST_MODE = 8 #: Mode value (using different algorithm) ST_SIMPLE_MODE = 23 #: St geomean ST_GEOMEAN = 9 #: St variance ST_VARIANCE = 10 #: St stddev ST_STDDEV = 11 #: St stderr ST_STDERR = 12 #: St skew ST_SKEW = 13 #: St kurtosis ST_KURTOSIS = 14 #: St base ST_BASE = 15 #: Sums and sums of powers ST_SUM = 16 #: St sum2 ST_SUM2 = 17 #: St sum3 ST_SUM3 = 18 #: St sum4 ST_SUM4 = 19 #: Smallest value greater than zero. ST_MINPOS = 21 #: St hist maxcount ST_HIST_MAXCOUNT = 100 # # GXST2 Constants # # # ST2_CORRELATION constants # # Correlation style #: Simple correlation ST2_CORR = 0 #: Pearson's correlation (normalized to standard deviations) ST2_PCORR = 1 # # GXSTORAGEPROJECT Constants # # # GXSTR Constants # # # FILE_EXT constants # # Extension option #: Will add the extension only if no extension is present. FILE_EXT_ADD_IF_NONE = 0 #: Will cause a renaming of the file extension to the new extension. FILE_EXT_FORCE = 1 # # STR_CASE constants # # Case sensitivity #: Tolerant STR_CASE_TOLERANT = 0 #: Sensitive STR_CASE_SENSITIVE = 1 # # STR_ESCAPE constants # # How to handle escape #: Converts non-standard characters in a string to escape sequences. ESCAPE_CONVERT = 0 #: Replaces escape sequences with original characters. ESCAPE_REPLACE = 1 # # STR_FILE_PART constants # # Parts of a path string #: File Name STR_FILE_PART_NAME = 0 #: Extension STR_FILE_PART_EXTENSION = 1 #: Directory STR_FILE_PART_DIRECTORY = 2 #: Drive STR_FILE_PART_VOLUME = 3 #: Qualifiers STR_FILE_PART_QUALIFIERS = 4 #: Name and the Extension together STR_FILE_PART_NAME_EXTENSION = 5 #: Full name of file with no qualifiers STR_FILE_PART_FULLPATH_NO_QUALIFIERS = 6 # # STR_JUSTIFY constants # # String justification style #: Left STR_JUSTIFY_LEFT = 0 #: Center STR_JUSTIFY_CENTER = 1 #: Right STR_JUSTIFY_RIGHT = 2 # # STR_TRIM constants # # What to trim #: Str trimright STR_TRIMRIGHT = 1 #: Str trimleft STR_TRIMLEFT = 2 #: Str trimboth STR_TRIMBOTH = 3 # # GXSURFACE Constants # # # SURFACE_OPEN constants # # Open Modes #: Read SURFACE_OPEN_READ = 0 #: Readwrite SURFACE_OPEN_READWRITE = 1 # # GXSURFACEITEM Constants # # # SURFACERENDER_MODE constants # # Open Modes #: Surfacerender smooth SURFACERENDER_SMOOTH = 0 #: Surfacerender fill SURFACERENDER_FILL = 1 #: Surfacerender edges SURFACERENDER_EDGES = 2 # # GXSYS Constants # # # ARC_LICENSE constants # # ArcGIS platform types #: The Engine or any qualifying ArcGIS product is missing ARC_LICENSE_ENGINENOTPRESENT = 0 #: Desktop Engine ARC_LICENSE_DESKTOPENGINE = 1 #: ArcView ARC_LICENSE_ARCVIEW = 2 #: ArcEditor ARC_LICENSE_ARCEDITOR = 3 #: ArcInfo ARC_LICENSE_ARCINFO = 4 #: ArcServer ARC_LICENSE_ARCSERVER = 5 # # GEO_DIRECTORY constants # # Geosoft directory defines #: None GEO_DIRECTORY_NONE = 0 #: Geosoft\\ GEO_DIRECTORY_GEOSOFT = 1 #: Geosoft\\bin GEO_DIRECTORY_BIN = 2 #: Geosoft\\ger GEO_DIRECTORY_GER = 3 #: Geosoft\\omn GEO_DIRECTORY_OMN = 4 #: Geosoft\\tbl GEO_DIRECTORY_TBL = 5 #: Geosoft\\fonts GEO_DIRECTORY_FONTS = 6 #: Geosoft\\gx GEO_DIRECTORY_GX = 7 #: Geosoft\\gs GEO_DIRECTORY_GS = 8 #: Geosoft\\apps GEO_DIRECTORY_APPS = 9 #: Geosoft\\user\\etc and then Geosoft\\etc GEO_DIRECTORY_ETC = 10 #: Geosoft\\hlp GEO_DIRECTORY_HLP = 11 #: Geosoft\\user\\csv GEO_DIRECTORY_USER_CSV = 14 #: Geosoft\\user\\lic GEO_DIRECTORY_USER_LIC = 15 #: Geosoft\\user\\ini GEO_DIRECTORY_USER_INI = 16 #: Geosoft\\temp (or where the user put it) GEO_DIRECTORY_USER_TEMP = 17 #: Geosoft\\user\\etc GEO_DIRECTORY_USER_ETC = 18 #: Geosoft\\img GEO_DIRECTORY_IMG = 19 #: Geosoft\\bar GEO_DIRECTORY_BAR = 20 #: Geosoft\\maptemplate GEO_DIRECTORY_MAPTEMPLATE = 22 #: Geosoft\\user\\maptemplate GEO_DIRECTORY_USER_MAPTEMPLATE = 23 #: Geosoft\\pygx GEO_DIRECTORY_PYGX = 24 #: Geosoft\\user\\pygx GEO_DIRECTORY_USER_PYGX = 25 #: Geosoft\\user\\gx GEO_DIRECTORY_USER_GX = 26 # # REG_DOMAIN constants # # Registry key domains #: Same as HKEY_LOCAL_MACHINE in Windows REG_DOMAIN_MACHINE = 0 #: Same as HKEY_CURRENT_USER in Windows REG_DOMAIN_USER = 1 # # SHELL_EXECUTE constants # # Shell execute defines #: Sw hide SW_HIDE = 0 #: Sw shownormal SW_SHOWNORMAL = 1 #: Sw showminimized SW_SHOWMINIMIZED = 2 #: Sw showmaximized SW_SHOWMAXIMIZED = 3 #: Sw shownoactivate SW_SHOWNOACTIVATE = 4 #: Sw show SW_SHOW = 5 #: Sw minimize SW_MINIMIZE = 6 #: Sw showminnoactive SW_SHOWMINNOACTIVE = 7 #: Sw showna SW_SHOWNA = 8 #: Sw restore SW_RESTORE = 9 #: Sw showdefault SW_SHOWDEFAULT = 10 #: Sw forceminimize SW_FORCEMINIMIZE = 11 # # SYS_DIR constants # # `GXSYS <geosoft.gxapi.GXSYS>` Directory locations #: Is the workspace working directory SYS_DIR_LOCAL = 0 #: Is the geosoft installation directory (read-only) SYS_DIR_GEOSOFT = 1 #: is the geosoft installation directory that #: contains user read/write files. SYS_DIR_USER = 2 #: Geosoft Temp folder SYS_DIR_GEOTEMP = 3 #: Windows folder SYS_DIR_WINDOWS = 4 #: Windows SYSTEM folder SYS_DIR_SYSTEM = 5 #: Where the license file is stored SYS_DIR_LICENSE = 6 #: User RESOURCEFILES Folder SYS_DIR_RESOURCEFILES = 7 #: BAR folder SYS_DIR_GEOSOFT_BAR = 100 #: BIN folder SYS_DIR_GEOSOFT_BIN = 101 #: CSV folder SYS_DIR_GEOSOFT_CSV = 102 #: CSV ALIASES folder SYS_DIR_GEOSOFT_CSV_ALIASES = 103 #: DATA folder SYS_DIR_GEOSOFT_DATA = 104 #: DBG folder SYS_DIR_GEOSOFT_DBG = 105 #: Encrypted Files folder SYS_DIR_GEOSOFT_ENCRYPTEDFILES = 106 #: ETC folder SYS_DIR_GEOSOFT_ETC = 107 #: FONTS folder SYS_DIR_GEOSOFT_FONTS = 108 #: `GXGER <geosoft.gxapi.GXGER>` folder SYS_DIR_GEOSOFT_GER = 109 #: GS folder SYS_DIR_GEOSOFT_GS = 110 #: GX folder SYS_DIR_GEOSOFT_GX = 111 #: HLP folder SYS_DIR_GEOSOFT_HLP = 112 #: `GXIMG <geosoft.gxapi.GXIMG>` folder SYS_DIR_GEOSOFT_IMG = 113 #: INI folder SYS_DIR_GEOSOFT_INI = 114 #: `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>` folder SYS_DIR_GEOSOFT_MAPTEMPLATE = 115 #: OMN folder SYS_DIR_GEOSOFT_OMN = 116 #: PAGE folder SYS_DIR_GEOSOFT_PAGE = 117 #: SCHEMA folder SYS_DIR_GEOSOFT_SCHEMA = 118 #: SPEC INI older SYS_DIR_GEOSOFT_SPEC_INI = 119 #: STYLE SHEETS folder SYS_DIR_GEOSOFT_STYLESHEETS = 120 #: TBL folder SYS_DIR_GEOSOFT_TBL = 121 #: PYTHON folder SYS_DIR_GEOSOFT_PYTHON = 127 #: User CSV Folder SYS_DIR_USER_CSV = 200 #: User ETC Folder SYS_DIR_USER_ETC = 201 #: User GS Folder SYS_DIR_USER_GS = 202 #: User HLP Folder SYS_DIR_USER_HLP = 203 #: User INI Folder SYS_DIR_USER_INI = 204 #: User LIC Folder SYS_DIR_USER_LIC = 205 #: User `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>` Folder SYS_DIR_USER_MAPTEMPLATE = 206 #: User OMN Folder SYS_DIR_USER_OMN = 207 #: User BAR Folder SYS_DIR_USER_BAR = 214 #: User `GXIMG <geosoft.gxapi.GXIMG>` Folder SYS_DIR_USER_IMG = 215 #: User STACKS Folder SYS_DIR_USER_STACKS = 209 #: User TEMP Folder SYS_DIR_USER_TEMP = 210 #: User SERVICES Folder SYS_DIR_USER_SERVICES = 211 #: User STYLESHEETS Folder SYS_DIR_USER_STYLESHEETS = 212 # # SYS_FONT constants # # Font types #: Geosoft GFN fonts. SYS_FONT_GFN = 1 #: Available TrueType fonts SYS_FONT_TT = 0 # # SYS_INFO constants # # System information #: Version major SYS_INFO_VERSION_MAJOR = 0 #: Version minor SYS_INFO_VERSION_MINOR = 1 #: Version sp SYS_INFO_VERSION_SP = 2 #: Build number SYS_INFO_BUILD_NUMBER = 3 #: Build label SYS_INFO_BUILD_LABEL = 4 #: Version label SYS_INFO_VERSION_LABEL = 5 #: Productname SYS_INFO_PRODUCTNAME = 6 #: Servername SYS_INFO_SERVERNAME = 7 #: Legalcopyright SYS_INFO_LEGALCOPYRIGHT = 8 #: Registry SYS_INFO_REGISTRY = 9 #: Registry environment SYS_INFO_REGISTRY_ENVIRONMENT = 10 #: Registry support SYS_INFO_REGISTRY_SUPPORT = 11 #: Registry interapp SYS_INFO_REGISTRY_INTERAPP = 12 #: Ois registry SYS_INFO_OIS_REGISTRY = 13 #: Test registry SYS_INFO_TEST_REGISTRY = 14 # # SYS_LINEAGE_SOURCE constants # # Type of lineage sources #: Map SYS_LINEAGE_SOURCE_MAP = 0 #: Mxd SYS_LINEAGE_SOURCE_MXD = 1 #: Db SYS_LINEAGE_SOURCE_DB = 2 #: Maptemplate SYS_LINEAGE_SOURCE_MAPTEMPLATE = 3 #: Grid SYS_LINEAGE_SOURCE_GRID = 4 #: Voxel SYS_LINEAGE_SOURCE_VOXEL = 5 # # SYS_MENU_CLEAR constants # # Font types #: Clear all menus excluding the coremenus.omn #: but including any default menus specified in the settings (these will not come back in this project). SYS_MENU_CLEAR_ALL = 0 #: Clear all menus excluding the coremenus.omn #: but reload any default menus specified in the settings (essentially this is a refresh #: back to the default state again). SYS_MENU_CLEAR_DEFAULT = 1 # # SYS_PATH constants # # Get specific Geosoft paths. The path name will # have a directory separator at the end. #: Is the workspace working directory SYS_PATH_LOCAL = 0 #: Is the geosoft installation directory (read-only) SYS_PATH_GEOSOFT = 1 #: is the geosoft installation directory that #: contains user read/write files. SYS_PATH_GEOSOFT_USER = 2 #: Geosoft Temp folder SYS_PATH_GEOTEMP = 3 #: Windows folder SYS_PATH_WINDOWS = 4 #: System folder SYS_PATH_SYSTEM = 5 #: Where the license file is stored SYS_PATH_LICENSE = 6 #: User RESOURCEFILES Folder SYS_PATH_RESOURCEFILES = 7 #: BAR folder SYS_PATH_GEOSOFT_BAR = 100 #: BIN folder SYS_PATH_GEOSOFT_BIN = 101 #: CSV folder SYS_PATH_GEOSOFT_CSV = 102 #: CSV ALIASES folder SYS_PATH_GEOSOFT_CSV_ALIASES = 103 #: DATA folder SYS_PATH_GEOSOFT_DATA = 104 #: DBG folder SYS_PATH_GEOSOFT_DBG = 105 #: Encrypted Files folder SYS_PATH_GEOSOFT_ENCRYPTEDFILES = 106 #: ETC folder SYS_PATH_GEOSOFT_ETC = 107 #: FONTS folder SYS_PATH_GEOSOFT_FONTS = 108 #: `GXGER <geosoft.gxapi.GXGER>` folder SYS_PATH_GEOSOFT_GER = 109 #: GS folder SYS_PATH_GEOSOFT_GS = 110 #: PYGX folder SYS_PATH_GEOSOFT_PYGX = 126 #: GX folder SYS_PATH_GEOSOFT_GX = 111 #: HLP folder SYS_PATH_GEOSOFT_HLP = 112 #: `GXIMG <geosoft.gxapi.GXIMG>` folder SYS_PATH_GEOSOFT_IMG = 113 #: INI folder SYS_PATH_GEOSOFT_INI = 114 #: `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>` folder SYS_PATH_GEOSOFT_MAPTEMPLATE = 115 #: OMN folder SYS_PATH_GEOSOFT_OMN = 116 #: PAGE folder SYS_PATH_GEOSOFT_PAGE = 117 #: SCHEMA folder SYS_PATH_GEOSOFT_SCHEMA = 118 #: SPEC INI older SYS_PATH_GEOSOFT_SPEC_INI = 119 #: STYLE SHEETS folder SYS_PATH_GEOSOFT_STYLESHEETS = 120 #: TBL folder SYS_PATH_GEOSOFT_TBL = 121 #: User CSV Folder SYS_PATH_GEOSOFT_USER_CSV = 200 #: User ETC Folder SYS_PATH_GEOSOFT_USER_ETC = 201 #: User GS Folder SYS_PATH_GEOSOFT_USER_GS = 202 #: User GX Folder SYS_PATH_GEOSOFT_USER_GX = 217 #: User PYGX Folder SYS_PATH_GEOSOFT_USER_PYGX = 216 #: User HLP Folder SYS_PATH_GEOSOFT_USER_HLP = 203 #: User INI Folder SYS_PATH_GEOSOFT_USER_INI = 204 #: User LIC Folder SYS_PATH_GEOSOFT_USER_LIC = 205 #: User `GXMAPTEMPLATE <geosoft.gxapi.GXMAPTEMPLATE>` Folder SYS_PATH_GEOSOFT_USER_MAPTEMPLATE = 206 #: User OMN Folder SYS_PATH_GEOSOFT_USER_OMN = 207 #: User STACKS Folder SYS_PATH_GEOSOFT_USER_STACKS = 209 #: User TEMP Folder SYS_PATH_GEOSOFT_USER_TEMP = 210 #: User SERVICES Folder SYS_PATH_USER_SERVICES = 211 #: User STYLESHEETS Folder SYS_PATH_USER_STYLESHEETS = 212 # # SYS_RUN_DISPLAY constants # # Windows Display Options # Determine how applications are started. # These options are not yet implemented. #: In a window (default) SYS_RUN_DISPLAY_WINDOW = 0 #: Maximized SYS_RUN_DISPLAY_MINIMIZE = 8 #: Full Screen SYS_RUN_DISPLAY_FULLSCREEN = 16 # # SYS_RUN_HOLD constants # # DOS Console Options # These options determine if and when the DOS/EXE # console window is held. These options only work # on DOS and EXE programs. #: Don't wait (Default) SYS_RUN_HOLD_NEVER = 0 #: Hold the screen if there is an error SYS_RUN_HOLD_ONERROR = 512 #: Always hold the screen SYS_RUN_HOLD_ALWAYS = 1024 # # SYS_RUN_TYPE constants # # Type of application to run #: Things such as .BAT files, copy commands, etc. (A console window is created) SYS_RUN_TYPE_DOS = 1 #: Any program (.EXE) (a console window is created) SYS_RUN_TYPE_EXE = 0 #: Windows applications that do not require a console window. SYS_RUN_TYPE_WINDOWS = 2 # # SYS_RUN_WIN constants # # Windows Options # Should we wait for the application to # finish or should we continue executing. If you wait # for another program, Oasis montaj will not # redraw or respond. We always wait for EXE and DOS programs. #: Never wait (default) SYS_RUN_WIN_NOWAIT = 0 #: Always wait SYS_RUN_WIN_WAIT = 2048 # # SYS_SEARCH_PATH constants # # Find File define #: Local and then Geosoft directory FIND_LOCAL_GEOSOFT = 0 #: Geosoft directory FIND_GEOSOFT = 1 #: Local directory FIND_LOCAL = 2 #: Make the name short (FLAG that is added on) FIND_SHORT = 1024 # # SYS_ENCRYPTION_KEY constants # # How to encrypt a string. Determines the portability of the encrypted string. #: Encrypt string to currently signed-in user. The string can be decrypted #: by the same user on any machine. SYS_ENCRYPTION_KEY_GEOSOFT_ID = 0 #: Encrypt string to current machine. The string can be decrypted by any #: user on the same machine. SYS_ENCRYPTION_KEY_GLOBAL_ID = 1 # # TD_ICON constants # # TaskDialog Icon #: No icon. TD_ICON_NONE = 0 #: An exclamation-point icon appears in the task dialog. TD_ICON_WARNING = 1 #: A stop-sign icon appears in the task dialog. TD_ICON_ERROR = 2 #: An icon consisting of a lowercase letter i in a circle appears in the task dialog. TD_ICON_INFORMATION = 3 #: A shield icon appears in the task dialog. TD_ICON_SUCCESS = 4 #: A shield icon appears in the task dialog. TD_ICON_CONFIRMATION = 5 # # TD_BUTTON constants # # TaskDialog Common Buttons #: No common buttons. TD_BUTTON_NONE = 0 #: Button results in `TD_ID_OK <geosoft.gxapi.TD_ID_OK>` return value. TD_BUTTON_OK = 1 #: Button results in `TD_ID_YES <geosoft.gxapi.TD_ID_YES>` return value. TD_BUTTON_YES = 2 #: Button results in `TD_ID_NO <geosoft.gxapi.TD_ID_NO>` return value. TD_BUTTON_NO = 4 #: Button results in `TD_ID_CANCEL <geosoft.gxapi.TD_ID_CANCEL>` return value. TD_BUTTON_CANCEL = 8 #: Button results in `TD_ID_RETRY <geosoft.gxapi.TD_ID_RETRY>` return value. TD_BUTTON_RETRY = 16 #: Button results in `TD_ID_CLOSE <geosoft.gxapi.TD_ID_CLOSE>` return value. TD_BUTTON_CLOSE = 32 # # TD_ID constants # # TaskDialog Common Button Return Values #: `TD_BUTTON_OK <geosoft.gxapi.TD_BUTTON_OK>` pressed. TD_ID_OK = 1 #: `TD_BUTTON_CANCEL <geosoft.gxapi.TD_BUTTON_CANCEL>` pressed. TD_ID_CANCEL = 2 #: `TD_BUTTON_RETRY <geosoft.gxapi.TD_BUTTON_RETRY>` pressed. TD_ID_RETRY = 4 #: `TD_BUTTON_YES <geosoft.gxapi.TD_BUTTON_YES>` pressed. TD_ID_YES = 6 #: `TD_BUTTON_NO <geosoft.gxapi.TD_BUTTON_NO>` pressed. TD_ID_NO = 7 #: `TD_BUTTON_CLOSE <geosoft.gxapi.TD_BUTTON_CLOSE>` pressed. TD_ID_CLOSE = 8 # # GXTB Constants # # # TB_SEARCH constants # # `GXTB <geosoft.gxapi.GXTB>` Searching mode #: Random searches in a table. TB_SEARCH_BINARY = 0 #: Linear searches up or down a table (Default). TB_SEARCH_LINEAR = 1 # # TB_SORT constants # # `GXTB <geosoft.gxapi.GXTB>` Sorting mode #: Unique values only when sorting. TB_SORT_UNIQUE = 0 #: Allow duplicates when sorting. TB_SORT_ALLOW_DUPLICATES = 1 # # GXTPAT Constants # # # TPAT_STRING_SIZE constants # # Default string sizes. #: Tpat code size TPAT_CODE_SIZE = 21 #: Tpat label size TPAT_LABEL_SIZE = 32 #: Tpat desc size TPAT_DESC_SIZE = 128 #: Tpat symbfont size TPAT_SYMBFONT_SIZE = 32 # # GXTR Constants # # # GXUSERMETA Constants # # # USERMETA_FORMAT constants # # `GXUSERMETA <geosoft.gxapi.GXUSERMETA>` Format Types #: Use the standard type for the system USERMETA_FORMAT_DEFAULT = -1 #: ISO 19139 standard USERMETA_FORMAT_ISO = 0 #: FGDC Metadata Standard USERMETA_FORMAT_FGDC = 1 # # GXVA Constants # # # VA_AVERAGE constants # # `GXVA <geosoft.gxapi.GXVA>` Object to average #: Average the Rows VA_AVERAGE_ROWS = 0 #: Average the Columns VA_AVERAGE_COLUMNS = 1 # # VA_OBJECT constants # # `GXVA <geosoft.gxapi.GXVA>` Object to select #: Row VA_ROW = 0 #: Column VA_COL = 1 # # GXVECTOR3D Constants # # # GXVM Constants # # # GXVOX Constants # # # VOX_DIR constants # # Voxel direction #: X/Y Plane (Fastest) VOX_DIR_XY = 0 #: X/Z Plane (Middle) VOX_DIR_XZ = 1 #: Y/Z Plane (Slowest) VOX_DIR_YZ = 2 # # VOX_DIRECTION constants # # Voxel export direction #: XYZ VOX_3D_DIR_XYZ = 0 #: YXZ VOX_3D_DIR_YXZ = 1 #: XZY VOX_3D_DIR_XZY = 2 #: YZX VOX_3D_DIR_YZX = 3 #: ZXY VOX_3D_DIR_ZXY = 4 #: ZYX VOX_3D_DIR_ZYX = 5 # # VOX_FILTER3D constants # # Voxel filter type #: Specify a file containing the 27-point filter VOX_FILTER3D_FILE = 0 #: Smoothing filter VOX_FILTER3D_SMOOTHING = 1 #: Laplace filter VOX_FILTER3D_LAPLACE = 2 #: X-Gradient filter VOX_FILTER3D_X_GRADIENT = 3 #: Y-Gradient filter VOX_FILTER3D_Y_GRADIENT = 4 #: Z-Gradient filter VOX_FILTER3D_Z_GRADIENT = 5 #: Total-Gradient filter VOX_FILTER3D_TOTAL_GRADIENT = 6 # # VOX_GOCAD_ORIENTATION constants # # GOCAD Orientations #: Normal VOX_GOCAD_ORIENTATIONS_NORMAL = 0 #: Inverted (Z) VOX_GOCAD_ORIENTATIONS_INVERTED = 1 #: Normal (ZFirst) VOX_GOCAD_ORIENTATIONS_NORMAL_ZFIRST = 2 #: Inverted (Z) (ZFirst) VOX_GOCAD_ORIENTATIONS_INVERTED_ZFIRST = 3 # # VOX_GRID_LOGOPT constants # # Voxel log gridding options #: Linear VOX_GRID_LOGOPT_LINEAR = 0 #: Log, save as linear VOX_GRID_LOGOPT_LOG_SAVELINEAR = -1 #: Log-linear, save as linear VOX_GRID_LOGOPT_LOGLINEAR_SAVELINEAR = -2 #: Log, save as log VOX_GRID_LOGOPT_LOG_SAVELOG = 1 #: Log-linear, save as log VOX_GRID_LOGOPT_LOGLINEAR_SAVELOG = 2 # # VOX_ORIGIN constants # # Voxel origin #: Bottom corner (standard Geosoft) VOX_ORIGIN_BOTTOM = 0 #: Top corner VOX_ORIGIN_TOP = 1 # # VOX_SLICE_MODE constants # # Voxel export direction #: Linear VOX_SLICE_MODE_LINEAR = 1 #: Nearest VOX_SLICE_MODE_NEAREST = 0 # # VOX_VECTORVOX_IMPORT constants # # Voxel direction #: X, Y and Z VOX_VECTORVOX_XYZ = 0 #: U, V and W VOX_VECTORVOX_UVW = 1 #: Amplitude, Inclination and Declination VOX_VECTORVOX_AID = 2 # # GXVOXD Constants # # # VOXELRENDER_MODE constants # # Render Modes #: Render voxel cells VOXELRENDER_FILL = 0 #: Render wireframe only VOXELRENDER_EDGES = 1 #: Render both voxel cells and wireframe VOXELRENDER_FILL_EDGES = 2 #: Trilinear interpolation VOXELRENDER_SMOOTH = 3 # # GXVOXE Constants # # # VOXE_EVAL constants # # Voxel Evaluation modes #: Nearest value VOXE_EVAL_NEAR = 0 #: Linear Interpolation VOXE_EVAL_INTERP = 1 #: Best Interpolation VOXE_EVAL_BEST = 2 # # GXVULCAN Constants # # # BLOCK_MODEL_VARIABLE_TYPE constants # # Which variables to return from sReadBlockModelVariableInfo #: Return numeric variable names BLOCK_MODEL_NUMERIC_VARIABLE = 1 #: Return string variable names BLOCK_MODEL_STRING_VARIABLE = 2 # # GXVV Constants # # # VV_DOUBLE_CRC_BITS constants # # Number of bits to use in double CRC's #: Exact CRC VV_DOUBLE_CRC_BITS_EXACT = 0 #: Default inaccuracy in double (10 Bits) VV_DOUBLE_CRC_BITS_DEFAULT = 10 #: Maximum number of inaccuracy bits VV_DOUBLE_CRC_BITS_MAX = 51 # # VV_FLOAT_CRC_BITS constants # # Number of bits to use in float CRC's #: Exact CRC VV_FLOAT_CRC_BITS_EXACT = 0 #: Default inaccuracy in floats (7 Bits) VV_FLOAT_CRC_BITS_DEFAULT = 7 #: Maximum number of inaccuracy bits VV_FLOAT_CRC_BITS_MAX = 22 # # VV_LOG_BASE constants # # Type of log to use #: Base 10 VV_LOG_BASE_10 = 0 #: Base e VV_LOG_BASE_E = 1 # # VV_LOGMODE constants # # Ways to handle negatives #: Dummies out value less than the minimum. VV_LOGMODE_CLIPPED = 0 #: if the data is in the range +/- minimum, #: it is left alone. Otherwise, the data #: is divided by the minimum, the log is #: applied, the minimum is added and the #: sign is reapplied. Use `log_linear <geosoft.gxapi.GXVV.log_linear>` function #: if decades in results are required. VV_LOGMODE_SCALED = 1 #: Any values below the minimum are turned to the minimum. VV_LOGMODE_CLAMPED = 2 #: Similar to Scaled but using a smoother function. Identical to LogLinear_VV. VV_LOGMODE_LINEAR = 3 # # VV_LOOKUP constants # # Lookup style #: Only exact matches are used VV_LOOKUP_EXACT = 0 #: Nearest match is used (regardless of sampling range) VV_LOOKUP_NEAREST = 1 #: Interpolate between values (regardless of sampling range) VV_LOOKUP_INTERPOLATE = 2 #: Use nearest match only if within sampling range VV_LOOKUP_NEARESTCLOSE = 3 #: Interpolate only if within sampling range VV_LOOKUP_INTERPCLOSE = 4 # # VV_MASK constants # # Where to mask #: Inside VV_MASK_INSIDE = 0 #: Outside VV_MASK_OUTSIDE = 1 # # VV_ORDER constants # # Specify if the data is montonically increasing or decreasing. #: There is no specific data size ordering in the `GXVV <geosoft.gxapi.GXVV>`. VV_ORDER_NONE = 0 #: Every value is greater than or equal to the previous value. VV_ORDER_INCREASING = 1 #: Every value is less than or equal to the previous value. VV_ORDER_DECREASING = 2 # # VV_SORT constants # # Sort order #: Ascending VV_SORT_ASCENDING = 0 #: Descending VV_SORT_DESCENDING = 1 # # VV_WINDOW constants # # How to handle `GXVV <geosoft.gxapi.GXVV>` limits #: Dummy values outside the limits VV_WINDOW_DUMMY = 0 #: Set values outside the limits to the limits VV_WINDOW_LIMIT = 1 # # GXWA Constants # # # WA_ENCODE constants # # `GXWA <geosoft.gxapi.GXWA>` Encode defines #: Current Ansi Code Page (Conversion from UTF-8 data, if an exisiting BOM header found with `WA_APPEND <geosoft.gxapi.WA_APPEND>`, #: encoding will switch to `WA_ENCODE_UTF8 <geosoft.gxapi.WA_ENCODE_UTF8>`) WA_ENCODE_ANSI = 0 #: Write all data without any conversion check WA_ENCODE_RAW = 1 #: :ref:`UTF8` (If no exisiting BOM header found with `WA_APPEND <geosoft.gxapi.WA_APPEND>`, encoding will switch to `WA_ENCODE_ANSI <geosoft.gxapi.WA_ENCODE_ANSI>`) WA_ENCODE_UTF8 = 2 #: :ref:`UTF8` w.o. header (will assume :ref:`UTF8` encoding if `WA_APPEND <geosoft.gxapi.WA_APPEND>` is used) WA_ENCODE_UTF8_NOHEADER = 3 #: UTF16 w.o. header (will assume UTF16 encoding if `WA_APPEND <geosoft.gxapi.WA_APPEND>` is used) WA_ENCODE_UTF16_NOHEADER = 4 # # WA_OPEN constants # # `GXWA <geosoft.gxapi.GXWA>` Open defines #: Create new file WA_NEW = 0 #: Append to existing file WA_APPEND = 1 # # GXACQUIRE Constants # # # ACQUIRE_SEL constants # # Type of Selection #: Holes ACQUIRE_SEL_HOLES = 0 #: Point ACQUIRE_SEL_POINT = 1 # # GXARCDB Constants # # # ARC_SELTBL_TYPE constants # # Describes what kind of table was selected #: Standalone Table ARC_SELTBL_STANDALONE = 0 #: Feature Layer ARC_SELTBL_FEATURELAYER = 1 #: User Canceled ARC_SELTBL_CANCELED = -1 # # GXARCDH Constants # # # GXARCMAP Constants # # # ARCMAP_LOAD_FLAGS constants # # Flags that can be combined and passed to iLoadMap_ARCMAP #: If an existing frame is found delete it ARCMAP_LOAD_DELFRAME = 1 #: If an existing layer is found delete it ARCMAP_LOAD_DELLAYER = 2 #: If an existing frame is found add new layers to it ARCMAP_LOAD_EXISTFRAME = 4 #: If an existing layer is found make a copy ARCMAP_LOAD_COPYLAYER = 8 #: Hide all other existing layers in frame ARCMAP_LOAD_HIDESIBLINGS = 16 #: Prefix the map filename part as part of the frame name ARCMAP_LOAD_PREFIXMAPFRAME = 32 #: Prefix the map filename part as part of the layer name ARCMAP_LOAD_PREFIXMAPLAYER = 64 #: Will render all views in single layer with the data view defining the coordinate system ARCMAP_LOAD_MERGETOSINGLEVIEW = 128 #: Load everything into the current data frame ARCMAP_LOAD_INTOCURRENTFRAME = 256 #: Use the map only for sizing data frames in layout, only load extra datasets. ARCMAP_LOAD_NOMAPLAYERS = 512 #: Activates the main quickmap layer when done (e.g. 3D Viewer) ARCMAP_LOAD_ACTIVATE = 1024 #: New method for loading maps introduced in 7.1. Will mimic what happens in montaj (i.e. base groups and 3D become graphics and views gets split into separate LYRs). ARCMAP_LOAD_NEW = 2048 #: Use a provided name tag as prefix when naming a newly created map layer. ARCMAP_LOAD_NAMETAGISPREFIX = 4096 # # GXARCPY Constants # # # GXARCSYS Constants # # # GXBIGRID Constants # # # GXCHIMERA Constants # # # CHIMERA_MAX_CHAN constants # # Maximum channels in Chimera database #: Chimera max chan CHIMERA_MAX_CHAN = 128 # # CHIMERA_PLOT constants # # Chimera plot type #: Rose CHIMERA_PLOT_ROSE = 0 #: Pie CHIMERA_PLOT_PIE = 1 #: Bar CHIMERA_PLOT_BAR = 2 # # GXCOM Constants # # # COM_BAUD constants # # Connection Speed #: 110 COM_BAUD_110 = 0 #: 300 COM_BAUD_300 = 1 #: 600 COM_BAUD_600 = 2 #: 1200 COM_BAUD_1200 = 3 #: 2400 COM_BAUD_2400 = 4 #: 4800 COM_BAUD_4800 = 5 #: 9600 COM_BAUD_9600 = 6 #: 14400 COM_BAUD_14400 = 7 #: 19200 COM_BAUD_19200 = 8 #: 56000 COM_BAUD_56000 = 9 #: 57600 COM_BAUD_57600 = 10 #: 115200 COM_BAUD_115200 = 11 #: 128000 COM_BAUD_128000 = 12 #: 256000 COM_BAUD_256000 = 13 #: 38400 COM_BAUD_38400 = 14 # # COM_DATASIZE constants # # Data Bits #: Five COM_DATASIZE_FIVE = 5 #: Six COM_DATASIZE_SIX = 6 #: Seven COM_DATASIZE_SEVEN = 7 #: Eight COM_DATASIZE_EIGHT = 8 # # COM_FLOWCONTROL constants # # Flow Control Options #: None COM_FLOWCONTROL_NONE = 0 #: Rts cts COM_FLOWCONTROL_RTS_CTS = 1 #: Dtr dsr COM_FLOWCONTROL_DTR_DSR = 2 #: Xon xoff COM_FLOWCONTROL_XON_XOFF = 3 # # COM_PARITY constants # # Parity #: Even COM_PARITY_EVEN = 0 #: Nark COM_PARITY_NARK = 1 #: None COM_PARITY_NONE = 2 #: Odd COM_PARITY_ODD = 3 #: Space COM_PARITY_SPACE = 4 # # COM_STOPBITS constants # # Stop Bits #: One COM_STOPBITS_ONE = 0 #: One5 COM_STOPBITS_ONE5 = 1 #: Two COM_STOPBITS_TWO = 2 # # GXDCOL Constants # # # BRIGHT constants # # Brightness type #: Set the brightness of all the layers BRIGHT_ALL = 0 #: Set the brightness of the current layer BRIGHT_LAYER = 1 # # BRIGHTNESS_TYPES constants # # Detrending option #: Can set the brightness only for object as a whole BRIGHTNESS_ALL = 0 #: Can set the brightness for object as a whole and for individual layers BRIGHTNESS_ALL_AND_LAYERS = 1 # # DCOL_TYPE constants # # Layer type #: Unknown DCOL_TYPE_UNKNOWN = 0 #: Grid DCOL_TYPE_GRID = 1 #: Symbols DCOL_TYPE_SYMBOLS = 2 #: Voxel DCOL_TYPE_VOXEL = 3 #: Vector voxel DCOL_TYPE_VECTOR_VOXEL = 4 # # GXDGW Constants # # # DGW_OBJECT constants # # Dialog object defines # INFO TYPE EDIT FEDIT LEDIT CEDIT EBUT # ========= ===== ===== ===== ===== ===== # LABEL RW RW RW RW RW R - can use GetInfo_DGW # TEXT RW RW RW RW . W - can use `set_info <geosoft.gxapi.GXDGW.set_info>` # PATH . RW . . . # FILEPATH . RW . . . # LISTVAL . . R . . # LISTALIAS . . RW . . #: The text label tied to each Dialog component. DGW_LABEL = 0 #: The edit field text. DGW_TEXT = 1 #: The file edit path. DGW_PATH = 2 #: The complete file name, path included. DGW_FILEPATH = 3 #: The alias value associated with the list entry. DGW_LISTVAL = 4 #: The text value associated with the list entry. DGW_LISTALIAS = 5 #: The extension associated with a filename entry. DGW_EXT = 7 #: Hide the button or entry and its label, if string is not "0" DGW_HIDE = 8 # # GXDH Constants # # # DH_DEFAULT_FILENAMES constants # # Default filenames #: Dh default rockcode file DH_DEFAULT_ROCKCODE_FILE = "agso.csv" #: Dh default structurecode file DH_DEFAULT_STRUCTURECODE_FILE = "structcodes.csv" # # STR_DH_HOLES constants # # This declares the size of the string used in various # `GXDH <geosoft.gxapi.GXDH>` GXs to store all the currently selected holes, as input to the two-panel # selection tool. This should be big enough for 65,000 16-character hole names! #: Str dh holes STR_DH_HOLES = 1048576 # # DH_COMP_CHOICE constants # # Composition #: User is done DH_COMP_DONE = 0 #: User canceled DH_COMP_CANCEL = -1 #: User chose to select an interval interactively DH_COMP_SELECT = 1 #: User chose to refresh DH_COMP_REFRESH = 2 # # DH_COMPSTDB_HOLSEL constants # # Composite Hole Selection #: All DH_COMPSTDB_HOLSEL_ALL = 0 #: Selected DH_COMPSTDB_HOLSEL_SELECTED = 1 # # DH_COMPSTDB_INTSEL constants # # Composite Interval #: Fixed DH_COMPSTDB_INTSEL_FIXED = 0 #: Lithology DH_COMPSTDB_INTSEL_LITHOLOGY = 1 #: Bestfitlith DH_COMPSTDB_INTSEL_BESTFITLITH = 2 #: Intfile DH_COMPSTDB_INTSEL_INTFILE = 3 # # DH_DATA constants # # What to import #: Dipazimuth DH_DATA_DIPAZIMUTH = 0 #: Eastnorth DH_DATA_EASTNORTH = 1 #: Fromto DH_DATA_FROMTO = 2 #: Point DH_DATA_POINT = 3 #: Collar DH_DATA_COLLAR = 4 #: The type is not known DH_DATA_UNKNOWN = 100 # # DH_DEFINE_PLAN constants # # Plans #: Dh define plan DH_DEFINE_PLAN = 1 # # DH_DEFINE_SECT constants # # Types of Sections #: Ns DH_DEFINE_SECT_NS = 1 #: Ew DH_DEFINE_SECT_EW = 2 #: Angled DH_DEFINE_SECT_ANGLED = 3 # # DH_EXP constants # # Type of Export #: Csv DH_EXP_CSV = 0 #: Ascii DH_EXP_ASCII = 1 #: Access DH_EXP_ACCESS = 2 #: Collars as points DH_EXP_SHP = 3 #: To Surpace Geological database (special format ACCESS) DH_EXP_SURPAC = 4 #: Hole traces as polylines DH_EXP_SHP_TRACES = 5 # # DH_HOLES constants # # Holes to select #: All DH_HOLES_ALL = 0 #: Selected DH_HOLES_SELECTED = 1 # # DH_MASK constants # # Masks #: Append DH_MASK_APPEND = 0 #: New DH_MASK_NEW = 1 # # DH_PLOT constants # # Type of Plot #: Plan DH_PLOT_PLAN = 0 #: Section DH_PLOT_SECTION = 1 #: Striplog DH_PLOT_STRIPLOG = 2 #: Hole traces DH_PLOT_HOLE_TRACES = 3 #: 3d DH_PLOT_3D = 4 #: Section stack DH_PLOT_SECTION_STACK = 5 #: Section fence DH_PLOT_SECTION_FENCE = 6 #: Section crooked DH_PLOT_SECTION_CROOKED = 7 # # DH_SECT_PAGE constants # # Sections #: Section DH_SECT_PAGE_SECTION = 1 # # DH_SURFACE constants # # Surface selection for creation of geological # top or bottom surfaces. #: First layer from DH_SURFACE_FIRST_LAYER_FROM = 0 #: First layer to DH_SURFACE_FIRST_LAYER_TO = 1 #: Second layer from DH_SURFACE_SECOND_LAYER_FROM = 2 #: Second layer to DH_SURFACE_SECOND_LAYER_TO = 3 #: Last layer from DH_SURFACE_LAST_LAYER_FROM = 4 #: Last layer to DH_SURFACE_LAST_LAYER_TO = 5 # # DIP_CONVENTION constants # # Dip convention #: Negative DIP_CONVENTION_NEGATIVE = -1 #: Positive DIP_CONVENTION_POSITIVE = 1 # # DH_DESURVEY constants # # Desurvey method #: Rad curve DH_DESURVEY_RAD_CURVE = 0 #: Polynomial DH_DESURVEY_POLYNOMIAL = 1 #: Straight seg DH_DESURVEY_STRAIGHT_SEG = 2 # # GXDMPPLY Constants # # # GXDOCU Constants # # # DOCU_OPEN constants # # How to open document #: View DOCU_OPEN_VIEW = 0 #: Edit DOCU_OPEN_EDIT = 1 # # GXDU Constants # # # DB_DUP constants # # Duplicate Types #: First DB_DUP_FIRST = 1 #: Average DB_DUP_AVERAGE = 2 #: Minimum DB_DUP_MINIMUM = 3 #: Maximum DB_DUP_MAXIMUM = 4 #: Median DB_DUP_MEDIAN = 5 #: Last DB_DUP_LAST = 6 # # DB_DUPEDIT constants # # Duplicate Edit Flags #: Single DB_DUPEDIT_SINGLE = 0 #: All DB_DUPEDIT_ALL = 1 # # DU_CHANNELS constants # # Channels to Display #: Displayed DU_CHANNELS_DISPLAYED = 0 #: All DU_CHANNELS_ALL = 1 # # DU_EXPORT constants # # Export Type #: Csv DU_EXPORT_CSV = 0 #: Oddf DU_EXPORT_ODDF = 1 #: Post pc DU_EXPORT_POST_PC = 2 #: Post unix DU_EXPORT_POST_UNIX = 3 # # DU_FILL constants # # Filling Options #: Inside DU_FILL_INSIDE = 0 #: Outside DU_FILL_OUTSIDE = 1 # # DU_IMPORT constants # # Import Mode #: Append DU_IMPORT_APPEND = 0 #: Replace DU_IMPORT_REPLACE = 1 #: Merge DU_IMPORT_MERGE = 2 #: Merge append DU_IMPORT_MERGE_APPEND = 3 # # DU_INTERP constants # # Inside Interpolation Method #: Nearest DU_INTERP_NEAREST = 1 #: Linear DU_INTERP_LINEAR = 2 #: Cubic DU_INTERP_CUBIC = 3 #: Akima DU_INTERP_AKIMA = 4 #: Predict DU_INTERP_PREDICT = 5 # # DU_INTERP_EDGE constants # # Edge Interpolation Method #: None DU_INTERP_EDGE_NONE = 0 #: Same DU_INTERP_EDGE_SAME = 1 #: Nearest DU_INTERP_EDGE_NEAREST = 2 #: Linear DU_INTERP_EDGE_LINEAR = 3 # # DU_LAB_TYPE constants # # File Types #: The delimiter string identifies #: characters to be used as delimiters. Use C style escape #: sequences to identify non-printable characters. The #: default delimiters for FREE format files are " \\t,". DU_LAB_TYPE_FREE = 1 #: For COMMA type files, the delimiter string identifies #: characters to be removed before comma delimiting. The #: default for COMMA delimited files is " \\t". DU_LAB_TYPE_COMMA = 2 # # DU_LEVEL constants # # Leveling Options #: Extract line corrections DU_LEVEL_LINES = 0 #: Extract tie corrections DU_LEVEL_TIES = 1 #: Extract all corrections DU_LEVEL_ALL = 2 # # DU_LINEOUT constants # # Lineout Options (du.h) #: Single DU_LINEOUT_SINGLE = 0 #: Multiple DU_LINEOUT_MULTIPLE = 1 # # DU_FEATURE_TYPE_OUTPUT constants # # Export to geodatabase feature type (du.h) #: Point DU_FEATURE_TYPE_OUTPUT_POINT = 0 #: Line DU_FEATURE_TYPE_OUTPUT_LINE = 1 # # DU_GEODATABASE_EXPORT_TYPE constants # # Export to geodatabase overwrite mode(du.h) #: Overwrite geodatabase DU_GEODATABASE_EXPORT_TYPE_OVERWRITE_GEODATABASE = 0 #: Overwrite featureclass DU_GEODATABASE_EXPORT_TYPE_OVERWRITE_FEATURECLASS = 1 #: Append DU_GEODATABASE_EXPORT_TYPE_APPEND = 2 # # DU_LINES constants # # Lines to display #: Displayed DU_LINES_DISPLAYED = 0 #: Selected DU_LINES_SELECTED = 1 #: All DU_LINES_ALL = 2 # # DU_LOADLTB constants # # Load table options #: Replace DU_LOADLTB_REPLACE = 0 #: Append DU_LOADLTB_APPEND = 1 # # DU_LOOKUP constants # # Lookup Mode #: Requires an exact match in all indexes. #: Results will dummy if Indexes are not found. DU_LOOKUP_EXACT = 0 #: Requires that the first index match exactly. #: The nearest second index will be used for the finding #: the lookup value. #: The results will be dummy only if the first index #: does not have a match. DU_LOOKUP_NEAREST = 1 #: The same as _NEAREST, except that the value will #: be interpolated between the two nearest #: framing values in the table. DU_LOOKUP_INTERPOLATE = 2 #: Same as _NEAREST mode except that the target #: value must be within the CLOSE distance to a #: table value. #: a) the primary index channel for single index #: lookups; #: b) the secondary index channel for #: double index lookups. #: Values not in data spacing are dummy. DU_LOOKUP_NEARESTCLOSE = 3 #: Same as _INTERPOLATE mode except that the target #: value must be within the CLOSE distance to a #: table value. #: a) the primary index channel for single index #: lookups; #: b) the secondary index channel for #: double index lookups. #: Values not in data spacing are dummy. DU_LOOKUP_INTERPCLOSE = 4 #: Interpolate between values, dummy beyond two ends DU_LOOKUP_INTERPOLATE_DUMMYOUTSIDE = 5 #: Interpolate between values, constant end values beyond two ends DU_LOOKUP_INTERPOLATE_CONSTOUTSIDE = 6 #: Interpolate between values, extrapolate beyond two ends DU_LOOKUP_INTERPOLATE_EXTPLOUTSIDE = 7 #: Maximum option value DU_LOOKUP_MAXOPTION = 8 # # DU_MASK constants # # Masking Options #: Inside DU_MASK_INSIDE = 0 #: Outside DU_MASK_OUTSIDE = 1 # # DU_MERGE constants # # Merge flags #: Append DU_MERGE_APPEND = 0 # # DU_MODFID constants # # Fid Update Options #: Will insert fid range by moving data. Inserted #: range will always be dummied out. If the insertion point #: is before start of data, the fid start is changed. DU_MODFID_INSERT = 0 #: Will delete the range of fids. DU_MODFID_DELETE = 1 #: Is like INSERT, except that it is only used to #: add fids to the start or end of the existing data. The #: data is not moved with repect to the current fid locations. DU_MODFID_APPEND = 2 # # DU_MOVE constants # # Move Style #: Move input to absolute value in control channel DU_MOVE_ABSOLUTE = 0 #: Subtract control channel from input channel DU_MOVE_MINUS = 1 #: Add control channel to input channel DU_MOVE_PLUS = 2 #: data is NOT moved, but dummies in the input are interpolated #: based on the control channel, assuming both the input and control #: vary linearly inside the gaps DU_MOVE_INTERP = 3 # # DU_REFID constants # # Interpolation mode #: 0 DU_REFID_LINEAR = 0 #: 1 DU_REFID_MINCUR = 1 #: 2 DU_REFID_AKIMA = 2 #: 3 DU_REFID_NEAREST = 3 # # DU_SORT constants # # Sort Direction #: Ascending DU_SORT_ASCENDING = 0 #: Descending DU_SORT_DESCENDING = 1 # # DU_SPLITLINE constants # # Sort Direction #: Xyposition DU_SPLITLINE_XYPOSITION = 0 #: Sequential DU_SPLITLINE_SEQUENTIAL = 1 #: Toversions DU_SPLITLINE_TOVERSIONS = 2 # # DU_STORAGE constants # # Storage Type #: Line DU_STORAGE_LINE = 0 #: Group DU_STORAGE_GROUP = 1 # # QC_PLAN_TYPE constants # # Type Plan #: Qc plan surveyline QC_PLAN_SURVEYLINE = 0 #: Qc plan tieline QC_PLAN_TIELINE = 1 #: Qc plan bothlines QC_PLAN_BOTHLINES = 2 # # DU_DISTANCE_CHANNEL_TYPE constants # # Distance channel direction type #: Zero distance is always at the start of the line. DU_DISTANCE_CHANNEL_MAINTAIN_DIRECTION = 0 #: Put zero at the end of the line with min X if X changes most, or min Y if Y changes most DU_DISTANCE_CHANNEL_CARTESIAN_COORDINATES = 1 # # DU_DIRECTGRID_METHOD constants # # How to calculate the cell values for direct gridding. #: Du directgrid min DU_DIRECTGRID_MIN = 0 #: Du directgrid max DU_DIRECTGRID_MAX = 1 #: Du directgrid mean DU_DIRECTGRID_MEAN = 2 # # GXDXFI Constants # # # GXEDB Constants # # # MAX_PROF_WND constants # # The following value should be kept synchronized with the value defined in src\\geoguilib\\stdafx.h #: Max prof wnd MAX_PROF_WND = 5 # # EDB_PATH constants # # Four forms #: d:\\directory\\file.gdb EDB_PATH_FULL = 0 #: \\directory\\file.gdb EDB_PATH_DIR = 1 #: File.gdb EDB_PATH_NAME_EXT = 2 #: File EDB_PATH_NAME = 3 # # EDB_PROF constants # # Profile data #: DB_SYMB EDB_PROF_I_CHANNEL = 0 #: 0 - no line #: 1 - solid #: 2 - long dash #: 3 - short dash EDB_PROF_I_LINE_STYLE = 1 #: 0 - no line #: 1 - normal #: 2 - medium #: 3 - heavy EDB_PROF_I_LINE_WEIGHT = 2 #: 0 - no symbol #: 1 - rectangle #: 2 - circle #: 3 - triangle #: 4 - diamond #: 5 - x #: 6 - + EDB_PROF_I_SYMBOL = 3 #: 0 - normal #: 1 - large EDB_PROF_I_SYMBOL_WEIGHT = 4 #: `GXMVIEW <geosoft.gxapi.GXMVIEW>` Color Value EDB_PROF_I_COLOR = 5 #: 0-no, 1-yes EDB_PROF_I_WRAP = 6 #: 0-no, 1-yes EDB_PROF_I_BREAK_ON_DUMMY = 7 #: 0-no, 1-yes EDB_PROF_I_GRID_LINE = 8 #: 0-no, 1-yes EDB_PROF_R_GRID_LINE_INTERVAL = 9 #: 0-Linear, 1-Log, 2-LogLinear EDB_PROF_I_LOG = 10 #: Minimum Value EDB_PROF_R_LOG_MINIMUM = 11 #: 0-no, 1-yes EDB_PROF_I_SAMESCALE = 12 #: 0 - current line #: -1 - previous line #: -2 - next line EDB_PROF_I_SOURCELINE = 13 #: 0 - scale to fit for each line #: 1 - fix the range #: 2 - fix the scale, center the range EDB_PROF_I_SCALEOPTION = 14 #: 0-no, 1-yes EDB_PROF_I_SAMERANGE = 15 # # EDB_PROFILE_SCALE constants # # Profile Scale Options #: Linear EDB_PROFILE_SCALE_LINEAR = 0 #: Log EDB_PROFILE_SCALE_LOG = 1 #: Loglinear EDB_PROFILE_SCALE_LOGLINEAR = 2 # # EDB_REMOVE constants # # How to handle pending changes in document #: Save EDB_REMOVE_SAVE = 0 #: Prompt EDB_REMOVE_PROMPT = 1 #: Discard EDB_REMOVE_DISCARD = 2 # # EDB_UNLOAD constants # # What type of prompt #: No prompt EDB_UNLOAD_NO_PROMPT = 0 #: Single prompt EDB_UNLOAD_SINGLE_PROMPT = 1 #: Obsolete EDB_UNLOAD_MULTI_PROMPT = 2 # # EDB_WINDOW_POSITION constants # # Window Positioning Options #: Docked EDB_WINDOW_POSITION_DOCKED = 0 #: Floating EDB_WINDOW_POSITION_FLOATING = 1 # # EDB_WINDOW_STATE constants # # Window State Options #: Edb window restore EDB_WINDOW_RESTORE = 0 #: Edb window minimize EDB_WINDOW_MINIMIZE = 1 #: Edb window maximize EDB_WINDOW_MAXIMIZE = 2 # # EDB_YAXIS_DIRECTION constants # # Window State Options #: Edb yaxis normal EDB_YAXIS_NORMAL = 0 #: Edb yaxis inverted EDB_YAXIS_INVERTED = 1 # # GXEDOC Constants # # # EDOC_PATH constants # # Four forms #: d:\\directory\\file.gdb EDOC_PATH_FULL = 0 #: \\directory\\file.gdb EDOC_PATH_DIR = 1 #: file.gdb EDOC_PATH_NAME_EXT = 2 #: file EDOC_PATH_NAME = 3 # # EDOC_TYPE constants # # Avaialable generic document types #: `GXGMSYS <geosoft.gxapi.GXGMSYS>` 3D Model EDOC_TYPE_GMS3D = 0 #: Voxel EDOC_TYPE_VOXEL = 1 #: Voxel Inversion EDOC_TYPE_VOXEL_INVERSION = 2 #: `GXGMSYS <geosoft.gxapi.GXGMSYS>` 2D Model EDOC_TYPE_GMS2D = 3 #: Geosurface EDOC_TYPE_GEOSURFACE = 4 # # EDOC_UNLOAD constants # # What type of prompt #: No prompt EDOC_UNLOAD_NO_PROMPT = 0 #: Prompt EDOC_UNLOAD_PROMPT = 1 # # EDOC_WINDOW_POSITION constants # # Window Positioning Options #: Docked EDOC_WINDOW_POSITION_DOCKED = 0 #: Floating EDOC_WINDOW_POSITION_FLOATING = 1 # # EDOC_WINDOW_STATE constants # # Window State Options #: Edoc window restore EDOC_WINDOW_RESTORE = 0 #: Edoc window minimize EDOC_WINDOW_MINIMIZE = 1 #: Edoc window maximize EDOC_WINDOW_MAXIMIZE = 2 # # GMS3D_MODELTYPE constants # # Avaialable model types #: Depth Model GMS3D_MODELTYPE_DEPTH = 0 #: Time Model GMS3D_MODELTYPE_TIME = 1 # # GMS2D_MODELTYPE constants # # Avaialable model types #: Depth Model GMS2D_MODELTYPE_DEPTH = 0 #: Time Model GMS2D_MODELTYPE_TIME = 1 # # GXEMAP Constants # # # EMAP_FONT constants # # Font Types #: Tt EMAP_FONT_TT = 0 #: Gfn EMAP_FONT_GFN = 1 # # EMAP_PATH constants # # Four forms #: d:\\directory\\file.gdb EMAP_PATH_FULL = 0 #: \\directory\\file.gdb EMAP_PATH_DIR = 1 #: File.gdb EMAP_PATH_NAME_EXT = 2 #: File EMAP_PATH_NAME = 3 # # EMAP_REDRAW constants # # Redraw Options #: No EMAP_REDRAW_NO = 0 #: Yes EMAP_REDRAW_YES = 1 # # EMAP_REMOVE constants # # How to handle pending changes in document #: Save EMAP_REMOVE_SAVE = 0 #: Prompt EMAP_REMOVE_PROMPT = 1 #: Discard EMAP_REMOVE_DISCARD = 2 # # EMAP_TRACK constants # # Tracking Options #: Erase Object after you return? EMAP_TRACK_ERASE = 1 #: Allow use of right-menu EMAP_TRACK_RMENU = 2 #: If user holds down left-mouse, will return many times EMAP_TRACK_CYCLE = 4 # # EMAP_VIEWPORT constants # # Tracking Options #: Normal map usage EMAP_VIEWPORT_NORMAL = 0 #: Zoom Mode EMAP_VIEWPORT_BROWSEZOOM = 1 #: Change Area Of Interest Mode EMAP_VIEWPORT_BROWSEAOI = 2 # # EMAP_WINDOW_POSITION constants # # Window Positioning Options #: Docked EMAP_WINDOW_POSITION_DOCKED = 0 #: Floating EMAP_WINDOW_POSITION_FLOATING = 1 # # EMAP_WINDOW_STATE constants # # Window State Options #: Emap window restore EMAP_WINDOW_RESTORE = 0 #: Emap window minimize EMAP_WINDOW_MINIMIZE = 1 #: Emap window maximize EMAP_WINDOW_MAXIMIZE = 2 # # LAYOUT_VIEW_UNITS constants # # Base dlayout display units #: Millimeters LAYOUT_VIEW_MM = 0 #: Centimeters LAYOUT_VIEW_CM = 1 #: Inches LAYOUT_VIEW_IN = 2 # # GXEMAPTEMPLATE Constants # # # EMAPTEMPLATE_PATH constants # # Four forms #: d:\\directory\\file.gdb EMAPTEMPLATE_PATH_FULL = 0 #: \\directory\\file.gdb EMAPTEMPLATE_PATH_DIR = 1 #: file.gdb EMAPTEMPLATE_PATH_NAME_EXT = 2 #: file EMAPTEMPLATE_PATH_NAME = 3 # # EMAPTEMPLATE_TRACK constants # # Tracking Options #: Erase Object after you return? EMAPTEMPLATE_TRACK_ERASE = 1 #: Allow use of right-menu EMAPTEMPLATE_TRACK_RMENU = 2 #: If user holds down left-mouse, will return many times EMAPTEMPLATE_TRACK_CYCLE = 4 # # EMAPTEMPLATE_WINDOW_POSITION constants # # Window Positioning Options #: Docked EMAPTEMPLATE_WINDOW_POSITION_DOCKED = 0 #: Floating EMAPTEMPLATE_WINDOW_POSITION_FLOATING = 1 # # EMAPTEMPLATE_WINDOW_STATE constants # # Window State Options #: Emaptemplate window restore EMAPTEMPLATE_WINDOW_RESTORE = 0 #: Emaptemplate window minimize EMAPTEMPLATE_WINDOW_MINIMIZE = 1 #: Emaptemplate window maximize EMAPTEMPLATE_WINDOW_MAXIMIZE = 2 # # GXEUL3 Constants # # # EUL3_RESULT constants # # Euler result types #: X EUL3_RESULT_X = 1 #: Y EUL3_RESULT_Y = 2 #: Depth EUL3_RESULT_DEPTH = 3 #: Background EUL3_RESULT_BACKGROUND = 4 #: Deptherror EUL3_RESULT_DEPTHERROR = 5 #: Locationerror EUL3_RESULT_LOCATIONERROR = 6 #: Windowx EUL3_RESULT_WINDOWX = 7 #: Windowy EUL3_RESULT_WINDOWY = 8 # # GXEXP Constants # # # GXFFT Constants # # # FFT_DETREND constants # # Detrending option #: No trend remove FFT_DETREND_NONE = 0 #: Detrend order 1 using only two end points FFT_DETREND_ENDS = 1 #: Detrend order 1 using all data points FFT_DETREND_ALL = 2 #: Remove mean value FFT_DETREND_MEAN = 3 # # FFT_WHITE_NOISE constants # # Add white noise option #: Add the input value to the real part of all non-DC components of the current power spectrum FFT_WHITE_NOISE_ADD = 0 #: Set non-DC components to the input value and set the DC value to 0.0 FFT_WHITE_NOISE_SET = 1 # # GXFFT2 Constants # # # FFT2_PG constants # # Pager Direction #: Forward FFT2_PG_FORWARD = 0 #: Inverse FFT2_PG_INVERSE = 1 # # GXFLT Constants # # # GXGD Constants # # # GD_STATUS constants # # Grid open mode #: Readonly GD_STATUS_READONLY = 0 #: New GD_STATUS_NEW = 1 #: Old GD_STATUS_OLD = 2 # # GXGER Constants # # # GXGMSYS Constants # # # GXGU Constants # # # EM_ERR constants # # Error Scaling #: Unscaled EM_ERR_UNSCALED = 0 #: Logscaling EM_ERR_LOGSCALING = 1 # # EM_INV constants # # Type of Inversion #: Inphase EM_INV_INPHASE = 0 #: Quadrature EM_INV_QUADRATURE = 1 #: Both EM_INV_BOTH = 2 # # EMPLATE_DOMAIN constants # # Type of Domain #: Emplate frequency EMPLATE_FREQUENCY = 1 #: Emplate time EMPLATE_TIME = 9 # # EMPLATE_TX constants # # Orientation #: X EMPLATE_TX_X = 1 #: Y EMPLATE_TX_Y = 2 #: Z EMPLATE_TX_Z = 3 # # GU_DAARC500_DATATYPE constants # # Supported serial data types for import #: Gu daarc500 unknown GU_DAARC500_UNKNOWN = 0 #: Gu daarc500 generic ascii GU_DAARC500_GENERIC_ASCII = 1 #: Gu daarc500 gps GU_DAARC500_GPS = 2 #: Gu daarc500 gr820 256d GU_DAARC500_GR820_256D = 3 #: Gu daarc500 gr820 256du GU_DAARC500_GR820_256DU = 4 #: Gu daarc500 gr820 512du GU_DAARC500_GR820_512DU = 5 #: Gu daarc500 nav GU_DAARC500_NAV = 6 # # PEAKEULER_XY constants # # Fit Options #: Nofit PEAKEULER_XY_NOFIT = 0 #: Fit PEAKEULER_XY_FIT = 1 # # GXGUI Constants # # # AOI_RETURN_STATE constants # # AOI query return state #: User canceled AOI_RETURN_CANCEL = -1 #: User chose to continue with no AOI defined or available AOI_RETURN_NODEFINE = 0 #: User chose to continue and defined valid AOI parameters AOI_RETURN_DEFINE = 1 # # COORDSYS_MODE constants # # Coordinate system wizard `GXIPJ <geosoft.gxapi.GXIPJ>` types allowed on return. # The wizard present three types of projections for selection # by the user, Geographic (GCS), Projected (PCS), and Unknown. # (Unknown requires only that the units be defined.) # The Editable flag must be Yes for this option to take affect, # and is overridden internally if the user's license does not # allow modification of projections (e.g. the OM Viewer). #: Allow Geographic (GCS), Projected (PCS), and Unknown COORDSYS_MODE_ALL = 0 #: Allow only Geographic (GCS) COORDSYS_MODE_GCS = 1 #: Allow only Projected (PCS) COORDSYS_MODE_PCS = 2 #: Allow only Geographic (GCS) and Projected (PCS) COORDSYS_MODE_GCS_PCS = 3 #: Allow only Projected (PCS), or Unknown COORDSYS_MODE_PCS_UNKNOWN = 4 # # DAT_TYPE constants # # Type of files (grids, images) to support #: Display only grid formats DAT_TYPE_GRID = 0 #: Display only image formats DAT_TYPE_IMAGE = 1 #: Displays both grids and image formats DAT_TYPE_GRID_AND_IMAGE = 2 # # FILE_FILTER constants # # File filters #: All files ``*.*`` ANYWHERE FILE_FILTER_ALL = 1 #: Geosoft Database ``*.gdb`` LOCAL FILE_FILTER_GDB = 2 #: Geosoft Executable ``*.gx`` GEOSOFT FILE_FILTER_GX = 3 #: Geosoft Script ``*.gs`` BOTH FILE_FILTER_GS = 4 #: Parameter files ``*.ini`` GEOSOFT FILE_FILTER_INI = 5 #: Oasis Menu files ``*.omn`` GEOSOFT FILE_FILTER_OMN = 6 #: Oasis View files ``*.vu`` LOCAL FILE_FILTER_VU = 7 #: Oasis Map files ``*.map`` LOCAL FILE_FILTER_MAP = 8 #: Projection file ``*.prj`` LOCAL FILE_FILTER_PRJ = 9 #: Configuration file ``*.con`` LOCAL FILE_FILTER_CON = 10 #: Sushi MNU files ``*.mnu`` GEOSOFT FILE_FILTER_MNU = 11 #: PDF files ``*.pdf`` GEOSOFT FILE_FILTER_PDF = 12 #: Geosoft PLT files ``*.plt`` LOCAL FILE_FILTER_PLT = 13 #: Geosoft workspace ``*.gws`` LOCAL FILE_FILTER_GWS = 14 #: Aggregate ``*.agg`` LOCAL FILE_FILTER_AGG = 15 #: Color table ``*.tbl`` GEOSOFT FILE_FILTER_TBL = 16 #: Zone ``*.zon`` LOCAL FILE_FILTER_ZON = 17 #: Image transform ``*.itr`` LOCAL FILE_FILTER_ITR = 18 #: AutoCAD DXF files ``*.dxf`` LOCAL FILE_FILTER_DXF = 19 #: TIFF files ``*.tif`` LOCAL FILE_FILTER_TIF = 20 #: Enhanced Metafies ``*.emf`` LOCAL FILE_FILTER_EMF = 21 #: Bitmap files ``*.bmp`` LOCAL FILE_FILTER_BMP = 22 #: ER Mapper LUT ``*.lut`` GEOSOFT FILE_FILTER_LUT = 23 #: PNG files ``*.png`` LOCAL FILE_FILTER_PNG = 24 #: JPG files ``*.jpg`` LOCAL FILE_FILTER_JPG = 25 #: PCX files ``*.pcx`` LOCAL FILE_FILTER_PCX = 26 #: GIF files ``*.gif`` LOCAL FILE_FILTER_GIF = 27 #: GRD files ``*.grd`` LOCAL FILE_FILTER_GRD = 28 #: ERS files ``*.ers`` LOCAL FILE_FILTER_ERS = 29 #: EPS files ``*.eps`` LOCAL FILE_FILTER_EPS = 30 #: ArcView Shape files ``*.shp`` LOCAL FILE_FILTER_SHP = 31 #: CGM files ``*.cgm`` LOCAL FILE_FILTER_CGM = 32 #: MapInfo Tab files ``*.tab`` LOCAL FILE_FILTER_TAB = 33 #: Software Components Components LOCAL FILE_FILTER_COMPS = 34 #: MapInfo Tab files ``*.tab`` LOCAL FILE_FILTER_CSV = 35 #: Geosoft Project ``*.gpf`` LOCAL FILE_FILTER_GPF = 36 #: Geosoft Polygons ``*.ply`` LOCAL FILE_FILTER_PLY = 37 #: Scatter templates ``*.stm`` LOCAL FILE_FILTER_STM = 38 #: Triplot templates ``*.ttm`` LOCAL FILE_FILTER_TTM = 39 #: Geosoft XYZ files ``*.xyz`` LOCAL FILE_FILTER_XYZ = 40 #: Geosoft Bar file ``*.geobar`` LOCAL FILE_FILTER_BAR = 41 #: Geosoft License files ``*.geosoft_license`` LOCAL FILE_FILTER_GEOSOFT_LICENSE = 42 #: XML files ``*.xml`` LOCAL FILE_FILTER_XML = 43 #: GX.NET files ``*.dll`` GEOSOFT FILE_FILTER_GXNET = 44 #: ECW files ``*.ecw`` LOCAL FILE_FILTER_ECW = 45 #: J2K JPEG 2000 files ``*.j2k`` LOCAL FILE_FILTER_J2K = 46 #: JP2 JPEG 2000 files ``*.jp2`` LOCAL FILE_FILTER_JP2 = 47 #: acQuire parameters ``*.sel`` LOCAL FILE_FILTER_SEL = 48 #: SVG file ``*.svg`` LOCAL FILE_FILTER_SVG = 49 #: SVG Compressed file ``*.svz`` LOCAL FILE_FILTER_SVZ = 50 #: Warp file ``*.wrp`` LOCAL FILE_FILTER_WRP = 51 #: MAPPLOT file ``*.con`` LOCAL FILE_FILTER_MAPPLOT = 52 #: Surpac DTM files ``*.dtm`` LOCAL FILE_FILTER_DTM = 53 #: Geosoft Voxel ``*.geosoft_voxel`` LOCAL FILE_FILTER_VOXEL = 54 #: Map Template file ``*.geosoft_maptemplate`` LOCAL FILE_FILTER_MAPTEMPLATE = 55 #: Action Scripts ``*.action`` LOCAL FILE_FILTER_ACTION = 56 #: Datamine files ``*.dm`` LOCAL FILE_FILTER_DM = 57 #: Google Earth KML ``*.kml`` LOCAL FILE_FILTER_KML = 58 #: Google Earth Compressed KML ``*.kmz`` LOCAL FILE_FILTER_KMZ = 59 #: Target parameter ini file for plans ``*.inp`` LOCAL FILE_FILTER_TARGET_PLAN = 60 #: Target parameter ini file for sections ``*.ins`` LOCAL FILE_FILTER_TARGET_SECTION = 61 #: Target parameter ini file for strip logs ``*.inl`` LOCAL FILE_FILTER_TARGET_STRIPLOG = 62 #: Target parameter ini file for 3D plots ``*.in3`` LOCAL FILE_FILTER_TARGET_3D = 63 #: ArcGIS Layer files ``*.lyr`` LOCAL FILE_FILTER_ARGIS_LYR = 64 #: ArcGIS Map Document files ``*.mxd`` LOCAL FILE_FILTER_ARGIS_MXD = 65 #: GOCAD TSurf files ``*.ts`` LOCAL FILE_FILTER_GOCAD_TS = 66 #: Geosoft list of items: names, values ``*.lst`` LOCAL FILE_FILTER_LST = 67 #: GM-SYS external coordinate system ``*.ecs`` LOCAL FILE_FILTER_ECS = 68 #: Target parameter ini file for fence sections ``*.ins`` LOCAL FILE_FILTER_TARGET_FENCE = 69 #: GM-SYS 3D model ``*.geosoft_gmsys3d`` LOCAL FILE_FILTER_GMS3D = 70 #: GEMCOM BT2 ``*.bt2`` LOCAL FILE_FILTER_BT2 = 71 #: GEMCOM BPR ``*.bpr`` LOCAL FILE_FILTER_BPR = 72 #: GEMCOM BPR2 ``*.bpr2`` LOCAL FILE_FILTER_BPR2 = 73 #: Excel 97-2003 workbook ``*.xls`` LOCAL FILE_FILTER_XLS = 74 #: Excel 2007 workbook ``*.xlsx`` LOCAL FILE_FILTER_XLSX = 75 #: Access 97-2003 ``*.mdb`` LOCAL FILE_FILTER_MDB = 76 #: Access 2007 ``*.accdb`` LOCAL FILE_FILTER_ACCDB = 77 #: Levelling intersection ``*.tbl`` LOCAL FILE_FILTER_INTERSECTION_TBL = 78 #: UBC DCIP2D Conductivity model files ``*.con`` LOCAL FILE_FILTER_UBC_CON = 79 #: UBC DCIP2D Chargeability model files ``*.chg`` LOCAL FILE_FILTER_UBC_CHG = 80 #: UBC DCIP2D Mesh files ``*.msh`` LOCAL FILE_FILTER_UBC_MSH = 81 #: UBC DCIP2D Mesh files ``*.dat`` LOCAL FILE_FILTER_UBC_MSH_DAT = 82 #: UBC DCIP2D Topo files ``*.dat`` LOCAL FILE_FILTER_UBC_TOPO_DAT = 83 #: UBC DCIP2D Topo files ``*.xyz`` LOCAL FILE_FILTER_UBC_TOPO_XYZ = 84 #: XYZ Import Templates ``*.i0`` LOCAL FILE_FILTER_XYZ_TEMPLATE_I0 = 85 #: Picodas Import Templates ``*.i1`` LOCAL FILE_FILTER_PICO_TEMPLATE_I1 = 86 #: Block Binary Import Templates ``*.i2`` LOCAL FILE_FILTER_BB_TEMPLATE_I2 = 87 #: ASCII Import Templates ``*.i3`` LOCAL FILE_FILTER_ASCII_TEMPLATE_I3 = 88 #: ODBC Import Templates ``*.i4`` LOCAL FILE_FILTER_ODBC_TEMPLATE_I4 = 89 #: Math expression files ``*.exp`` LOCAL FILE_FILTER_EXP = 90 #: SEG-Y files ``*.sgy`` LOCAL FILE_FILTER_SEGY = 91 #: DAARC500 files xYYMMDD LOCAL FILE_FILTER_DAARC500 = 92 #: Text files ``*.txt`` LOCAL FILE_FILTER_TXT = 93 #: Voxi ``*.geosoft_voxi`` LOCAL FILE_FILTER_VOXEL_INVERSION = 94 #: GM-SYS Profile model file ``*.gms`` LOCAL FILE_FILTER_GMS = 95 #: Geosoft 3D filter files ``*.flt3d`` LOCAL FILE_FILTER_FLT3D = 96 #: Geosoft Resource Update Packages ``*.geosoft_resource_pack`` LOCAL FILE_FILTER_RESOURCE_PACK = 97 #: Geostring files ``*.geosoft_string`` LOCAL FILE_FILTER_GEOSTRING = 98 #: Geosurface files ``*.geosoft_surface`` LOCAL FILE_FILTER_GEOSURFACE = 99 #: Geosoft `GX3DV <geosoft.gxapi.GX3DV>` ``*.geosoft_3dv`` LOCAL FILE_FILTER_GEOSOFT3DV = 100 #: Geosoft Vector Voxel ``*.geosoft_vectorvoxel`` LOCAL FILE_FILTER_VECTORVOXEL = 101 #: Geosoft Filters ``*.flt`` LOCAL FILE_FILTER_FLT = 102 #: XYZ Export Templates ``*.o0`` LOCAL FILE_FILTER_XYZ_TEMPLATE_O0 = 103 #: GM-SYS Profile model ``*.geosoft_gmsys2d`` LOCAL FILE_FILTER_GMS2D = 104 #: `GXIP <geosoft.gxapi.GXIP>` Database Template ``*.geosoft_ipdatabasetemplate`` LOCAL FILE_FILTER_IP_DATABASE_TEMPLATE = 105 #: Geosoft Resource Module ``*.geosoft_resources`` LOCAL FILE_FILTER_GEOSOFT_RESOURCE_MODULE = 106 #: Shell VT files ``*.vt`` LOCAL FILE_FILTER_VT = 107 #: Shell INT files ``*.int`` LOCAL FILE_FILTER_INT = 108 #: Shell SGT files ``*.sgt`` LOCAL FILE_FILTER_SGT = 109 #: Image Viewer files ``*.imgview`` LOCAL FILE_FILTER_IMGVIEW = 110 #: Zip files ``*.zip`` LOCAL FILE_FILTER_ZIP = 111 #: GPS Table ``*.tbl`` GEOSOFT FILE_FILTER_GPS_TABLE = 112 #: Maptek Vulcan trianguilation file ``*.tbl`` LOCAL FILE_FILTER_VULCAN_TRIANGULATION = 113 #: Maptek Vulcan block model file ``*.bmf`` LOCAL FILE_FILTER_VULCAN_BLOCK_MODEL = 114 #: Layout files ``*.prjview`` LOCAL FILE_FILTER_PRJVIEW = 115 #: Leapfrog model files ``*.lfm`` LOCAL FILE_FILTER_LEAPFROG_MODEL = 116 #: Reflex ioGAS files ``*.gas`` LOCAL FILE_FILTER_IOGAS = 117 #: ASEG ESF file ``*.esf`` LOCAL FILE_FILTER_ASEG_ESF = 118 #: Micro-g LaCoste MGS-6 gravity files ``*.:class``:`DAT` LOCAL FILE_FILTER_LACOSTE_DAT = 119 #: Geosoft variogram file ``*.var`` LOCAL FILE_FILTER_VAR = 120 #: UKOOA data exchange file ``*.p190`` LOCAL FILE_FILTER_P190 = 121 #: UBC observation files ``*.dat`` LOCAL FILE_FILTER_UBC_OBS_DAT = 122 #: UBC location files ``*.loc`` LOCAL FILE_FILTER_UBC_LOC = 123 #: UBC model files ``*.mod`` LOCAL FILE_FILTER_UBC_MOD = 124 #: UBC density model files ``*.den`` LOCAL FILE_FILTER_UBC_DEN = 125 #: UBC susceptibility model files ``*.sus`` LOCAL FILE_FILTER_UBC_SUS = 126 #: GOCAD voxet files ``*.vo`` LOCAL FILE_FILTER_GOCAD_VOXET = 127 #: Scintrex gravity files ``*.dat` LOCAL FILE_FILTER_SCINTREX_DAT = 128 #: Dump files ``*.dmp` LOCAL FILE_FILTER_DMP = 129 #: Geosoft RAW gravity files ``*.raw` LOCAL FILE_FILTER_RAW = 130 #: Data files ``*.dat` LOCAL FILE_FILTER_DAT = 131 #: OMF files ``*.omf` LOCAL FILE_FILTER_OMF = 132 #: 3d survey files ``*.3dSurvey` LOCAL FILE_FILTER_3DSURVEY = 133 # # FILE_FORM constants # # File Form Defines #: Open a file FILE_FORM_OPEN = 0 #: Save a file FILE_FORM_SAVE = 1 # # GS_DIRECTORY constants # # Geosoft predefined directory #: None GS_DIRECTORY_NONE = 0 #: Geosoft GS_DIRECTORY_GEOSOFT = 1 #: Bin GS_DIRECTORY_BIN = 2 #: Ger GS_DIRECTORY_GER = 3 #: Omn GS_DIRECTORY_OMN = 4 #: Tbl GS_DIRECTORY_TBL = 5 #: Fonts GS_DIRECTORY_FONTS = 6 #: Gx GS_DIRECTORY_GX = 7 #: Gs GS_DIRECTORY_GS = 8 #: Apps GS_DIRECTORY_APPS = 9 #: Etc GS_DIRECTORY_ETC = 10 #: Hlp GS_DIRECTORY_HLP = 11 #: Gxdev GS_DIRECTORY_GXDEV = 12 #: Component GS_DIRECTORY_COMPONENT = 13 #: Csv GS_DIRECTORY_CSV = 14 #: Lic GS_DIRECTORY_LIC = 15 #: Ini GS_DIRECTORY_INI = 16 #: Temp GS_DIRECTORY_TEMP = 17 #: Uetc GS_DIRECTORY_UETC = 18 #: Umaptemplate GS_DIRECTORY_UMAPTEMPLATE = 19 #: Component scripts GS_DIRECTORY_COMPONENT_SCRIPTS = 50 #: Component html GS_DIRECTORY_COMPONENT_HTML = 51 #: Img GS_DIRECTORY_IMG = 52 #: Bar GS_DIRECTORY_BAR = 53 #: Gxnet GS_DIRECTORY_GXNET = 54 #: Maptemplate GS_DIRECTORY_MAPTEMPLATE = 55 # # IMPCH_TYPE constants # # Import Chem defines #: Data IMPCH_TYPE_DATA = 0 #: Assay IMPCH_TYPE_ASSAY = 1 # # WINDOW_STATE constants # # Window State Options #: Window restore WINDOW_RESTORE = 0 #: Window minimize WINDOW_MINIMIZE = 1 #: Window maximize WINDOW_MAXIMIZE = 2 # # XTOOL_ALIGN constants # # XTool docking alignment flags #: Left XTOOL_ALIGN_LEFT = 1 #: Top XTOOL_ALIGN_TOP = 2 #: Right XTOOL_ALIGN_RIGHT = 4 #: Bottom XTOOL_ALIGN_BOTTOM = 8 #: Any XTOOL_ALIGN_ANY = 15 # # XTOOL_DOCK constants # # XTool default docking state #: Top XTOOL_DOCK_TOP = 1 #: Left XTOOL_DOCK_LEFT = 2 #: Right XTOOL_DOCK_RIGHT = 3 #: Bottom XTOOL_DOCK_BOTTOM = 4 #: Float XTOOL_DOCK_FLOAT = 5 # # GXHTTP Constants # # # GXIEXP Constants # # # GXINTERNET Constants # # # GXIP Constants # # # IP_ARRAY constants # # `GXIP <geosoft.gxapi.GXIP>` Array options #: Dpdp IP_ARRAY_DPDP = 0 #: Pldp IP_ARRAY_PLDP = 1 #: Plpl IP_ARRAY_PLPL = 2 #: Grad IP_ARRAY_GRAD = 3 #: Wenner IP_ARRAY_WENNER = 5 #: Schlumberger IP_ARRAY_SCHLUMBERGER = 6 #: Unknown IP_ARRAY_UNKNOWN = 7 #: 3d IP_ARRAY_3D = 9 #: 3d pldp IP_ARRAY_3D_PLDP = 10 #: 3d plpl IP_ARRAY_3D_PLPL = 11 # # IP_CHANNELS constants # # Channels to display #: Displayed IP_CHANNELS_DISPLAYED = 0 #: Selected IP_CHANNELS_SELECTED = 1 #: All IP_CHANNELS_ALL = 2 # # IP_DOMAIN constants # # Types of Domains #: None IP_DOMAIN_NONE = -1 #: Time IP_DOMAIN_TIME = 0 #: Frequency IP_DOMAIN_FREQUENCY = 1 #: Both IP_DOMAIN_BOTH = 2 # # IP_DUPLICATE constants # # How to handle duplicates #: Append IP_DUPLICATE_APPEND = 0 #: Overwrite IP_DUPLICATE_OVERWRITE = 1 # # IP_FILTER constants # # Fraser Filters #: Regular pant-leg filter:: #: #: _!_ #: /*_*\\ n1 #: /*/ \\*\\ n2` #: /*/ \\*\\ n3 #: : : IP_FILTER_PANTLEG = 1 #: Regular pant-leg filter with top at first point:: #: #: ! nscp: #: /*\\ n1 #: /*_*\\ n2 #: /*/ \\*\\ n3 #: : : IP_FILTER_PANTLEGP = 2 #: Regular pyramid filter:: #: #: _!_ maxn: #: /* *\\ n1 #: /* * *\\ n2 #: /* * * *\\ n3 #: : : IP_FILTER_PYRIAMID = 3 #: Regular pyramid filter with peak on a point:: #: #: ! maxn: #: /*\\ n1 #: /* *\\ n2 #: /* * *\\ n3 #: : : IP_FILTER_PYRIAMIDP = 4 # # IP_I2XIMPMODE constants # # Interpext Import Mode #: Recreates the line from scratch. IP_I2XIMPMODE_REPLACE = 0 #: Looks for matching Tx1 and N values and #: replaces data in matching lines only. IP_I2XIMPMODE_MERGE = 1 # # IP_I2XINV constants # # Type of Inversion #: Image IP_I2XINV_IMAGE = 0 #: Zonge IP_I2XINV_ZONGE = 1 # # IP_LINES constants # # Lines to display #: Displayed IP_LINES_DISPLAYED = 0 #: Selected IP_LINES_SELECTED = 1 #: All IP_LINES_ALL = 2 # # IP_PLOT constants # # Type of Plot #: Pseudosection IP_PLOT_PSEUDOSECTION = 0 #: Stackedsection IP_PLOT_STACKEDSECTION = 1 # # IP_QCTYPE constants # # Type of Measurement #: Resistivity IP_QCTYPE_RESISTIVITY = 0 #: `GXIP <geosoft.gxapi.GXIP>` IP_QCTYPE_IP = 1 # # IP_STACK_TYPE constants # # Spacing Types #: Use map-based spacing, and preserve the directions of the #: original lines by rotating the sections as desired to their true #: locations. (At present only N-S and E-W sections are supported). IP_STACK_TYPE_MAP = 0 #: Spaces the sections equally, with enough room to #: guarantee no overlap with high N-values or closely spaced lines. IP_STACK_TYPE_EQUAL = 1 #: Now the same as IP_STACK_MAP IP_STACK_TYPE_GEOGRAPHIC = 2 # # IP_STNSCALE constants # # Station Scaling #: Station numbers become X or Y locations IP_STNSCALE_NONE = 0 #: Multiply station numbers by the A spacing IP_STNSCALE_ASPACE = 1 #: Multiply by an input value. IP_STNSCALE_VALUE = 2 #: Look up locations from a CSV Line/Station/X/Y file IP_STNSCALE_FILE = 3 # # IP_SYS constants # # Instrument #: Ipdata IP_SYS_IPDATA = 0 #: Ip2 IP_SYS_IP2 = 1 #: Ip6 IP_SYS_IP6 = 2 #: Ip10 IP_SYS_IP10 = 3 #: Syscalr2 IP_SYS_SYSCALR2 = 4 #: Ipr11 IP_SYS_IPR11 = 5 #: Ipr12 IP_SYS_IPR12 = 6 #: Phoenix IP_SYS_PHOENIX = 7 #: Phoenix v2 IP_SYS_PHOENIX_V2 = 8 #: Elrec pro IP_SYS_ELREC_PRO = 9 #: Prosys ii IP_SYS_PROSYS_II = 10 # # IP_UBC_CONTROL constants # # Types of Domains #: None IP_UBC_CONTROL_NONE = -1 #: Default IP_UBC_CONTROL_DEFAULT = 0 #: File IP_UBC_CONTROL_FILE = 1 #: Value IP_UBC_CONTROL_VALUE = 2 #: Length IP_UBC_CONTROL_LENGTH = 3 # # IP_PLDP_CONV constants # # Types of Domains #: Close rx IP_PLDP_CONV_CLOSE_RX = 0 #: Mid rx IP_PLDP_CONV_MID_RX = 1 #: Distant rx IP_PLDP_CONV_DISTANT_RX = 2 # # GXIPGUI Constants # # # GXKGRD Constants # # # GXLMSG Constants # # # GXMISC Constants # # # GXMSTK Constants # # # GXMVG Constants # # # MVG_DRAW constants # # `GXMVG <geosoft.gxapi.GXMVG>` draw define #: Polyline MVG_DRAW_POLYLINE = 0 #: Polygon MVG_DRAW_POLYGON = 1 # # MVG_GRID constants # # `GXMVG <geosoft.gxapi.GXMVG>` grid define #: Dot MVG_GRID_DOT = 0 #: Line MVG_GRID_LINE = 1 #: Cross MVG_GRID_CROSS = 2 # # MVG_LABEL_BOUND constants # # `GXMVG <geosoft.gxapi.GXMVG>` label bound define #: No MVG_LABEL_BOUND_NO = 0 #: Yes MVG_LABEL_BOUND_YES = 1 # # MVG_LABEL_JUST constants # # `GXMVG <geosoft.gxapi.GXMVG>` label justification define #: Top MVG_LABEL_JUST_TOP = 0 #: Bottom MVG_LABEL_JUST_BOTTOM = 1 #: Left MVG_LABEL_JUST_LEFT = 2 #: Right MVG_LABEL_JUST_RIGHT = 3 # # MVG_LABEL_ORIENT constants # # `GXMVG <geosoft.gxapi.GXMVG>` label orientation #: Horizontal MVG_LABEL_ORIENT_HORIZONTAL = 0 #: Top right MVG_LABEL_ORIENT_TOP_RIGHT = 1 #: Top left MVG_LABEL_ORIENT_TOP_LEFT = 2 # # MVG_SCALE constants # # `GXMVG <geosoft.gxapi.GXMVG>` scale define #: Linear MVG_SCALE_LINEAR = 0 #: Log MVG_SCALE_LOG = 1 #: Loglinear MVG_SCALE_LOGLINEAR = 2 # # MVG_WRAP constants # # `GXMVG <geosoft.gxapi.GXMVG>` wrap define #: No MVG_WRAP_NO = 0 #: Yes MVG_WRAP_YES = 1 # # GXPDF3D Constants # # # GXPGEXP Constants # # # GXPGU Constants # # # BLAKEY_TEST constants # # Types of BLAKEY tests #: Oneside BLAKEY_TEST_ONESIDE = 1 #: Twoside BLAKEY_TEST_TWOSIDE = 2 #: Threeside BLAKEY_TEST_THREESIDE = 3 #: Fourside BLAKEY_TEST_FOURSIDE = 4 # # PGU_CORR constants # # Correlation (must be synchronized with :ref:`ST2_CORRELATION`) #: Simple correlation PGU_CORR_SIMPLE = 0 #: Pearson's correlation (normalized to standard deviations) PGU_CORR_PEARSON = 1 # # PGU_DIRECTGRID constants # # Type of statistic to use on the data points in each cell. #: Select the minimum value found in each cell PGU_DIRECTGRID_MINIMUM = 0 #: Select the maximum value found in each cell PGU_DIRECTGRID_MAXIMUM = 1 #: Select the mean of all values found in each cell PGU_DIRECTGRID_MEAN = 2 #: The number of valid (non-dummy) items found in each cell PGU_DIRECTGRID_ITEMS = 3 # # PGU_DIRECTION constants # # Direction #: Forward direction: Removes mean and standard deviation, #: storing the values in the VVs. PGU_FORWARD = 0 #: Backward direction: Applies mean and standard deviation #: values in the VVs to the data. PGU_BACKWARD = 1 # # PGU_TRANS constants # # Transform methods for the columns #: None PGU_TRANS_NONE = 0 #: Log PGU_TRANS_LOG = 1 # # PGU_INTERP_ORDER constants # # Interpolation direction order #: Xyz PGU_INTERP_ORDER_XYZ = 0 #: Xzy PGU_INTERP_ORDER_XZY = 1 #: Yxz PGU_INTERP_ORDER_YXZ = 2 #: Yzx PGU_INTERP_ORDER_YZX = 3 #: Zxy PGU_INTERP_ORDER_ZXY = 4 #: Zyx PGU_INTERP_ORDER_ZYX = 5 # # GXPRAGA3 Constants # # # GXPROJ Constants # # # COMMAND_ENV constants # # Command environments #: Normal COMMAND_ENV_NORMAL = 0 #: Executing from inside 3D Viewer COMMAND_ENV_IN3DVIEWER = 1 # # TOOL_TYPE constants # # Tool type defines #: Geosoft created default tools TOOL_TYPE_DEFAULT = 0 #: Auxiliary tools (including custom XTools) TOOL_TYPE_AUXILIARY = 1 #: All tools TOOL_TYPE_ALL = 2 # # PROJ_DISPLAY constants # # How to display an object #: Do not display the object PROJ_DISPLAY_NO = 0 #: Display the object unless user set option not to PROJ_DISPLAY_YES = 1 #: Always display the object PROJ_DISPLAY_ALWAYS = 2 # # GXRGRD Constants # # # GXSEMPLOT Constants # # # SEMPLOT_GROUP_CLASS constants # # `GXSEMPLOT <geosoft.gxapi.GXSEMPLOT>` group class. #: Semplot group class SEMPLOT_GROUP_CLASS = "Semplot" # # SEMPLOT_EXPORT constants # # `GXSEMPLOT <geosoft.gxapi.GXSEMPLOT>` export type selection. #: Exports Sample info channels, oxides/ratios, totals, extra channels. SEMPLOT_EXPORT_NORMAL = 0 #: Exports Sample info, oxides/ratios, totals. SEMPLOT_EXPORT_NOEXTRA = 1 # # SEMPLOT_EXT constants # # `GXSEMPLOT <geosoft.gxapi.GXSEMPLOT>` file extension selection #: Use for selection only. Selects both "Semplot" and "`GXCHIMERA <geosoft.gxapi.GXCHIMERA>`" type #: files when creating LSTs etc. SEMPLOT_EXT_ALL = 0 #: Read/write templates with extensions ".xyt", ".tri" and ".semtemplate" #: Read/write overlays with extensions ".oly" and ".semoverlay" SEMPLOT_EXT_SEMPLOT = 1 #: Read/write templates with extensions ".geosoft_template" #: Read/write overlays with extensions ".geosoft_overlay" SEMPLOT_EXT_CHIMERA = 2 # # SEMPLOT_PLOT constants # # `GXSEMPLOT <geosoft.gxapi.GXSEMPLOT>` plot type selection. #: Use for selection only. Selects both "XYPlot" and "TriPlot" #: plots when creating LSTs etc. SEMPLOT_PLOT_ALL = 0 #: Select XY (Scatter) plot. SEMPLOT_PLOT_XYPLOT = 1 #: Select Tri (Triangular) plot. SEMPLOT_PLOT_TRIPLOT = 2 #: Returned as an error status from some functions. SEMPLOT_PLOT_UNKNOWN = 3 # # GXSHD Constants # # # SHD_FIX constants # # Interactive tracking constraints. #: None SHD_FIX_NONE = 0 #: Inclination SHD_FIX_INCLINATION = 1 #: Declination SHD_FIX_DECLINATION = 2 # # GXSHP Constants # # # SHP_GEOM_TYPE constants # # Shape file geometry types #: Single (X, Y) point SHP_GEOM_TYPE_POINT = 1 #: Arc (polyline) multiple (X, Y) points. SHP_GEOM_TYPE_ARC = 3 #: Polygon. Multiple (X, Y) points. SHP_GEOM_TYPE_POLYGON = 5 #: Single (X, Y, Z) point SHP_GEOM_TYPE_POINTZ = 11 #: Arc (polyline) multiple (X, Y, Z) points. SHP_GEOM_TYPE_ARCZ = 13 #: Polygon. Multiple (X, Y, Z) points. SHP_GEOM_TYPE_POLYGONZ = 15 # # GXSQLSRV Constants # # # MFCSQL_DRIVER constants # # SQL Server Driver #: No dialog box, Error if authentication parameters are wrong MFCSQL_DRIVER_NOPROMPT = 0 #: Only shows dialog box if authentication parameters are wrong MFCSQL_DRIVER_COMPLETE = 1 #: Always show dialog box, with option to change parameter MFCSQL_DRIVER_PROMPT = 2 #: Same as `MFCSQL_DRIVER_COMPLETE <geosoft.gxapi.MFCSQL_DRIVER_COMPLETE>` except only missing parameters are editable MFCSQL_DRIVER_COMPLETE_REQUIRED = 3 # # GXSTK Constants # # # STK_AXIS constants # # `GXSTK <geosoft.gxapi.GXSTK>` Axis defines #: X Axis STK_AXIS_X = 0 #: Y Axis STK_AXIS_Y = 1 # # STK_AXIS_POS constants # # `GXSTK <geosoft.gxapi.GXSTK>` Axis defines. Use with STK_AXIS_X and STK_AXIS_Y #: Axis STK_AXIS_NONE = 0 #: Left side only STK_AXIS_LEFT = 1 #: Right side only STK_AXIS_RIGHT = 2 #: Left and right sides STK_AXIS_BOTH = 3 #: Bottom only STK_AXIS_BOTTOM = 1 #: Top only STK_AXIS_TOP = 2 # # STK_FLAG constants # # Stack flags #: Profile STK_FLAG_PROFILE = 0 #: Fid STK_FLAG_FID = 1 #: Symbol STK_FLAG_SYMBOL = 2 #: Xbar STK_FLAG_XBAR = 3 #: Xlabel STK_FLAG_XLABEL = 4 #: Xtitle STK_FLAG_XTITLE = 5 #: Ybar STK_FLAG_YBAR = 6 #: Ylabel STK_FLAG_YLABEL = 7 #: Ytitle STK_FLAG_YTITLE = 8 #: Grid1 STK_FLAG_GRID1 = 9 #: Grid2 STK_FLAG_GRID2 = 10 # # STK_GRID constants # # Stack Grid define #: Primary Grid STK_GRID_PRIMARY = 0 #: Secondary Grid STK_GRID_SECONDARY = 1 # # GXSTRINGS Constants # # # GXTC Constants # # # TC_OPT constants # # Optimization #: (slow) no optimization TC_OPT_NONE = 0 #: (faster) desampling and using qspline (4x4 points) interpolation #: on coarser averaged grid TC_OPT_MAX = 1 # # TC_SURVEYTYPE constants # # Survey Type #: Ground TC_SURVEYTYPE_GROUND = 0 #: Shipborne TC_SURVEYTYPE_SHIPBORNE = 1 #: Airborne TC_SURVEYTYPE_AIRBORNE = 2 # # GG_ELEMENT constants # # GG element #: Gxx GG_ELEMENT_XX = 0 #: Gyy GG_ELEMENT_YY = 1 #: Gxy GG_ELEMENT_XY = 2 #: Gxz GG_ELEMENT_XZ = 3 #: Gyz GG_ELEMENT_YZ = 4 # # GXTEST Constants # # # GXTIN Constants # # # GXTRANSFORMLAYER Constants # # # GXTRND Constants # # # TRND_NODE constants # # Node to find #: Trnd min TRND_MIN = 0 #: Trnd max TRND_MAX = 1 # # GXUNC Constants # # # UTF8 constants # # UTF-8 Defines #: Maximum width of a single Unicode code point as a :ref:`UTF8` string, including terminator (5) UTF8_MAX_CHAR = 5 # # GXVAU Constants # # # VAU_PRUNE constants # # Prune Options #: Dummy VAU_PRUNE_DUMMY = 0 #: Valid VAU_PRUNE_VALID = 1 # # GXVVEXP Constants # # # GXVVU Constants # # # QC_CRITERION constants # # Criterion #: 1 QC_CRITERION_1 = 0 #: 2 QC_CRITERION_2 = 1 #: 12 QC_CRITERION_12 = 2 # # TEM_ARRAY constants # # Array Type #: Verticalsounding TEM_ARRAY_VERTICALSOUNDING = 0 #: Profiling TEM_ARRAY_PROFILING = 1 #: Borehole TEM_ARRAY_BOREHOLE = 2 # # VV_DUP constants # # Duplicate handling mode #: Average numeric values (for strings, same as `VV_DUP_1 <geosoft.gxapi.VV_DUP_1>`) VV_DUP_AVERAGE = 0 #: Use first value of the pair VV_DUP_1 = 1 #: Use second value of the pair VV_DUP_2 = 2 #: Set to dummy VV_DUP_DUMMY = 3 #: Set to "3" (cannot use with string data `GXVV <geosoft.gxapi.GXVV>`) VV_DUP_SAMPLE = 4 # # VV_XYDUP constants # # Sample handling #: Average VV_XYDUP_AVERAGE = 0 #: Sum VV_XYDUP_SUM = 1 # # VVU_CASE constants # # String case handling #: Tolerant VVU_CASE_TOLERANT = 0 #: Sensitive VVU_CASE_SENSITIVE = 1 # # VVU_CLIP constants # # Type of clipping #: Clip replaces clipped values with a dummy. VVU_CLIP_DUMMY = 0 #: Clip replaces clipped values with the limit. VVU_CLIP_LIMIT = 1 # # VVU_DUMMYREPEAT constants # # How to deal with repeats #: Dummies all but first point. VVU_DUMMYREPEAT_FIRST = 0 #: Dummies all but last point. VVU_DUMMYREPEAT_LAST = 1 #: Dummies all but middle point. VVU_DUMMYREPEAT_MIDDLE = 2 # # VVU_INTERP constants # # Interpolation method to use #: Nearest VVU_INTERP_NEAREST = 1 #: Linear VVU_INTERP_LINEAR = 2 #: Cubic VVU_INTERP_CUBIC = 3 #: Akima VVU_INTERP_AKIMA = 4 #: Predict VVU_INTERP_PREDICT = 5 # # VVU_INTERP_EDGE constants # # Interpolation method to use on edges #: None VVU_INTERP_EDGE_NONE = 0 #: Same VVU_INTERP_EDGE_SAME = 1 #: Nearest VVU_INTERP_EDGE_NEAREST = 2 #: Linear VVU_INTERP_EDGE_LINEAR = 3 # # VVU_LINE constants # # Line Types #: Line 2 points LINE_2_POINTS = 0 #: Line point azimuth LINE_POINT_AZIMUTH = 1 # # VVU_MASK constants # # Type of clipping #: Mask `GXVV <geosoft.gxapi.GXVV>` is set to dummy at locations inside the `GXPLY <geosoft.gxapi.GXPLY>`. VVU_MASK_INSIDE = 0 #: Mask `GXVV <geosoft.gxapi.GXVV>` is set to dummy at locations outside the `GXPLY <geosoft.gxapi.GXPLY>`. VVU_MASK_OUTSIDE = 1 # # VVU_MATCH constants # # Matching style #: Entire string VVU_MATCH_FULL_STRINGS = 0 #: Match the first part of a string. VVU_MATCH_INPUT_LENGTH = 1 # # VVU_MODE constants # # Statistic to select #: Mean VVU_MODE_MEAN = 0 #: Median VVU_MODE_MEDIAN = 1 #: Maximum VVU_MODE_MAXIMUM = 2 #: Minimum VVU_MODE_MINIMUM = 3 # # VVU_OFFSET constants # # Heading #: Forward VVU_OFFSET_FORWARD = 0 #: Backward VVU_OFFSET_BACKWARD = 1 #: Right VVU_OFFSET_RIGHT = 2 #: Left VVU_OFFSET_LEFT = 3 # # VVU_PRUNE constants # # Prune options #: 0 VVU_PRUNE_DUMMY = 0 #: 1 VVU_PRUNE_VALID = 1 # # VVU_SPL constants # # Spline types #: Linear VVU_SPL_LINEAR = 0 #: Cubic VVU_SPL_CUBIC = 1 #: Akima VVU_SPL_AKIMA = 2 #: Nearest VVU_SPL_NEAREST = 3 # # VVU_SRCHREPL_CASE constants # # Search and Replace handling of string case #: Tolerant VVU_SRCHREPL_CASE_TOLERANT = 0 #: Sensitive VVU_SRCHREPL_CASE_SENSITIVE = 1 ### endblock Constants ### block ClassImports # NOTICE: Do not edit anything here, it is generated code __all__ = [ 'GXContext', 'GX3DC', 'GX3DN', 'GX3DV', 'GXAGG', 'GXBF', 'GXCSYMB', 'GXDAT', 'GXDATALINKD', 'GXDATAMINE', 'GXDB', 'GXDBREAD', 'GXDBWRITE', 'GXDSEL', 'GXE3DV', 'GXEXT', 'GXGEOSTRING', 'GXGIS', 'GXGRID3D', 'GXHGD', 'GXHXYZ', 'GXIGRF', 'GXIMG', 'GXIMU', 'GXIPJ', 'GXITR', 'GXKML', 'GXLAYOUT', 'GXLL2', 'GXLPT', 'GXLST', 'GXLTB', 'GXMAP', 'GXMAPL', 'GXMAPTEMPLATE', 'GXMATH', 'GXMESH', 'GXMESHUTIL', 'GXMETA', 'GXMPLY', 'GXMULTIGRID3D', 'GXMULTIGRID3DUTIL', 'GXMVIEW', 'GXMVU', 'GXMXD', 'GXPAT', 'GXPG', 'GXPJ', 'GXPLY', 'GXRA', 'GXREG', 'GXSBF', 'GXSEGYREADER', 'GXST', 'GXST2', 'GXSTORAGEPROJECT', 'GXSTR', 'GXSURFACE', 'GXSURFACEITEM', 'GXSYS', 'GXTB', 'GXTPAT', 'GXTR', 'GXUSERMETA', 'GXVA', 'GXVECTOR3D', 'GXVM', 'GXVOX', 'GXVOXD', 'GXVOXE', 'GXVULCAN', 'GXVV', 'GXWA', 'GXACQUIRE', 'GXARCDB', 'GXARCDH', 'GXARCMAP', 'GXARCPY', 'GXARCSYS', 'GXBIGRID', 'GXCHIMERA', 'GXCOM', 'GXDCOL', 'GXDGW', 'GXDH', 'GXDMPPLY', 'GXDOCU', 'GXDU', 'GXDXFI', 'GXEDB', 'GXEDOC', 'GXEMAP', 'GXEMAPTEMPLATE', 'GXEUL3', 'GXEXP', 'GXFFT', 'GXFFT2', 'GXFLT', 'GXGD', 'GXGER', 'GXGMSYS', 'GXGU', 'GXGUI', 'GXHTTP', 'GXIEXP', 'GXINTERNET', 'GXIP', 'GXIPGUI', 'GXKGRD', 'GXLMSG', 'GXMISC', 'GXMSTK', 'GXMVG', 'GXPDF3D', 'GXPGEXP', 'GXPGU', 'GXPRAGA3', 'GXPROJ', 'GXRGRD', 'GXSEMPLOT', 'GXSHD', 'GXSHP', 'GXSQLSRV', 'GXSTK', 'GXSTRINGS', 'GXTC', 'GXTEST', 'GXTIN', 'GXTRANSFORMLAYER', 'GXTRND', 'GXUNC', 'GXVAU', 'GXVVEXP', 'GXVVU', ] from .GXContext import GXContext from .GX3DC import GX3DC from .GX3DN import GX3DN from .GX3DV import GX3DV from .GXAGG import GXAGG from .GXBF import GXBF from .GXCSYMB import GXCSYMB from .GXDAT import GXDAT from .GXDATALINKD import GXDATALINKD from .GXDATAMINE import GXDATAMINE from .GXDB import GXDB from .GXDBREAD import GXDBREAD from .GXDBWRITE import GXDBWRITE from .GXDSEL import GXDSEL from .GXE3DV import GXE3DV from .GXEXT import GXEXT from .GXGEOSTRING import GXGEOSTRING from .GXGIS import GXGIS from .GXGRID3D import GXGRID3D from .GXHGD import GXHGD from .GXHXYZ import GXHXYZ from .GXIGRF import GXIGRF from .GXIMG import GXIMG from .GXIMU import GXIMU from .GXIPJ import GXIPJ from .GXITR import GXITR from .GXKML import GXKML from .GXLAYOUT import GXLAYOUT from .GXLL2 import GXLL2 from .GXLPT import GXLPT from .GXLST import GXLST from .GXLTB import GXLTB from .GXMAP import GXMAP from .GXMAPL import GXMAPL from .GXMAPTEMPLATE import GXMAPTEMPLATE from .GXMATH import GXMATH from .GXMESH import GXMESH from .GXMESHUTIL import GXMESHUTIL from .GXMETA import GXMETA from .GXMPLY import GXMPLY from .GXMULTIGRID3D import GXMULTIGRID3D from .GXMULTIGRID3DUTIL import GXMULTIGRID3DUTIL from .GXMVIEW import GXMVIEW from .GXMVU import GXMVU from .GXMXD import GXMXD from .GXPAT import GXPAT from .GXPG import GXPG from .GXPJ import GXPJ from .GXPLY import GXPLY from .GXRA import GXRA from .GXREG import GXREG from .GXSBF import GXSBF from .GXSEGYREADER import GXSEGYREADER from .GXST import GXST from .GXST2 import GXST2 from .GXSTORAGEPROJECT import GXSTORAGEPROJECT from .GXSTR import GXSTR from .GXSURFACE import GXSURFACE from .GXSURFACEITEM import GXSURFACEITEM from .GXSYS import GXSYS from .GXTB import GXTB from .GXTPAT import GXTPAT from .GXTR import GXTR from .GXUSERMETA import GXUSERMETA from .GXVA import GXVA from .GXVECTOR3D import GXVECTOR3D from .GXVM import GXVM from .GXVOX import GXVOX from .GXVOXD import GXVOXD from .GXVOXE import GXVOXE from .GXVULCAN import GXVULCAN from .GXVV import GXVV from .GXWA import GXWA from .GXACQUIRE import GXACQUIRE from .GXARCDB import GXARCDB from .GXARCDH import GXARCDH from .GXARCMAP import GXARCMAP from .GXARCPY import GXARCPY from .GXARCSYS import GXARCSYS from .GXBIGRID import GXBIGRID from .GXCHIMERA import GXCHIMERA from .GXCOM import GXCOM from .GXDCOL import GXDCOL from .GXDGW import GXDGW from .GXDH import GXDH from .GXDMPPLY import GXDMPPLY from .GXDOCU import GXDOCU from .GXDU import GXDU from .GXDXFI import GXDXFI from .GXEDB import GXEDB from .GXEDOC import GXEDOC from .GXEMAP import GXEMAP from .GXEMAPTEMPLATE import GXEMAPTEMPLATE from .GXEUL3 import GXEUL3 from .GXEXP import GXEXP from .GXFFT import GXFFT from .GXFFT2 import GXFFT2 from .GXFLT import GXFLT from .GXGD import GXGD from .GXGER import GXGER from .GXGMSYS import GXGMSYS from .GXGU import GXGU from .GXGUI import GXGUI from .GXHTTP import GXHTTP from .GXIEXP import GXIEXP from .GXINTERNET import GXINTERNET from .GXIP import GXIP from .GXIPGUI import GXIPGUI from .GXKGRD import GXKGRD from .GXLMSG import GXLMSG from .GXMISC import GXMISC from .GXMSTK import GXMSTK from .GXMVG import GXMVG from .GXPDF3D import GXPDF3D from .GXPGEXP import GXPGEXP from .GXPGU import GXPGU from .GXPRAGA3 import GXPRAGA3 from .GXPROJ import GXPROJ from .GXRGRD import GXRGRD from .GXSEMPLOT import GXSEMPLOT from .GXSHD import GXSHD from .GXSHP import GXSHP from .GXSQLSRV import GXSQLSRV from .GXSTK import GXSTK from .GXSTRINGS import GXSTRINGS from .GXTC import GXTC from .GXTEST import GXTEST from .GXTIN import GXTIN from .GXTRANSFORMLAYER import GXTRANSFORMLAYER from .GXTRND import GXTRND from .GXUNC import GXUNC from .GXVAU import GXVAU from .GXVVEXP import GXVVEXP from .GXVVU import GXVVU ### endblock ClassImports ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXARCSYS.rst .. _GXARCSYS: GXARCSYS class ================================== .. autoclass:: geosoft.gxapi.GXARCSYS :members: <file_sep>/examples/tutorial/Geosoft modules - gxapi and gxpy/grid_dimensions_gxapi.py import geosoft.gxapi as gxapi gxc = gxapi.GXContext.create(__name__, '0.1') img = gxapi.GXIMG.create_file(gxapi.GS_FLOAT, 'test.grd(GRD)', gxapi.IMG_FILE_READONLY) x_sep = gxapi.float_ref() y_sep = gxapi.float_ref() x_origin = gxapi.float_ref() y_origin = gxapi.float_ref() rotation = gxapi.float_ref() img.get_info(x_sep, y_sep, x_origin, y_origin, rotation) print(' dimension (nx, ny): ({}, {})'.format(img.nx(), img.ny()), '\n separation (x, y): ({}, {})'.format(x_sep.value, y_sep.value), '\n origin (x, y): ({}, {})'.format(x_origin.value, y_origin.value), '\n rotation: {}'.format(rotation.value))<file_sep>/docs/GXMISC.rst .. _GXMISC: GXMISC class ================================== .. autoclass:: geosoft.gxapi.GXMISC :members: <file_sep>/docs/GXEMAP.rst .. _GXEMAP: GXEMAP class ================================== .. autoclass:: geosoft.gxapi.GXEMAP :members: .. _EMAP_FONT: EMAP_FONT constants ----------------------------------------------------------------------- Font Types .. autodata:: geosoft.gxapi.EMAP_FONT_TT :annotation: .. autoattribute:: geosoft.gxapi.EMAP_FONT_TT .. autodata:: geosoft.gxapi.EMAP_FONT_GFN :annotation: .. autoattribute:: geosoft.gxapi.EMAP_FONT_GFN .. _EMAP_PATH: EMAP_PATH constants ----------------------------------------------------------------------- Four forms .. autodata:: geosoft.gxapi.EMAP_PATH_FULL :annotation: .. autoattribute:: geosoft.gxapi.EMAP_PATH_FULL .. autodata:: geosoft.gxapi.EMAP_PATH_DIR :annotation: .. autoattribute:: geosoft.gxapi.EMAP_PATH_DIR .. autodata:: geosoft.gxapi.EMAP_PATH_NAME_EXT :annotation: .. autoattribute:: geosoft.gxapi.EMAP_PATH_NAME_EXT .. autodata:: geosoft.gxapi.EMAP_PATH_NAME :annotation: .. autoattribute:: geosoft.gxapi.EMAP_PATH_NAME .. _EMAP_REDRAW: EMAP_REDRAW constants ----------------------------------------------------------------------- Redraw Options .. autodata:: geosoft.gxapi.EMAP_REDRAW_NO :annotation: .. autoattribute:: geosoft.gxapi.EMAP_REDRAW_NO .. autodata:: geosoft.gxapi.EMAP_REDRAW_YES :annotation: .. autoattribute:: geosoft.gxapi.EMAP_REDRAW_YES .. _EMAP_REMOVE: EMAP_REMOVE constants ----------------------------------------------------------------------- How to handle pending changes in document .. autodata:: geosoft.gxapi.EMAP_REMOVE_SAVE :annotation: .. autoattribute:: geosoft.gxapi.EMAP_REMOVE_SAVE .. autodata:: geosoft.gxapi.EMAP_REMOVE_PROMPT :annotation: .. autoattribute:: geosoft.gxapi.EMAP_REMOVE_PROMPT .. autodata:: geosoft.gxapi.EMAP_REMOVE_DISCARD :annotation: .. autoattribute:: geosoft.gxapi.EMAP_REMOVE_DISCARD .. _EMAP_TRACK: EMAP_TRACK constants ----------------------------------------------------------------------- Tracking Options .. autodata:: geosoft.gxapi.EMAP_TRACK_ERASE :annotation: .. autoattribute:: geosoft.gxapi.EMAP_TRACK_ERASE .. autodata:: geosoft.gxapi.EMAP_TRACK_RMENU :annotation: .. autoattribute:: geosoft.gxapi.EMAP_TRACK_RMENU .. autodata:: geosoft.gxapi.EMAP_TRACK_CYCLE :annotation: .. autoattribute:: geosoft.gxapi.EMAP_TRACK_CYCLE .. _EMAP_VIEWPORT: EMAP_VIEWPORT constants ----------------------------------------------------------------------- Tracking Options .. autodata:: geosoft.gxapi.EMAP_VIEWPORT_NORMAL :annotation: .. autoattribute:: geosoft.gxapi.EMAP_VIEWPORT_NORMAL .. autodata:: geosoft.gxapi.EMAP_VIEWPORT_BROWSEZOOM :annotation: .. autoattribute:: geosoft.gxapi.EMAP_VIEWPORT_BROWSEZOOM .. autodata:: geosoft.gxapi.EMAP_VIEWPORT_BROWSEAOI :annotation: .. autoattribute:: geosoft.gxapi.EMAP_VIEWPORT_BROWSEAOI .. _EMAP_WINDOW_POSITION: EMAP_WINDOW_POSITION constants ----------------------------------------------------------------------- Window Positioning Options .. autodata:: geosoft.gxapi.EMAP_WINDOW_POSITION_DOCKED :annotation: .. autoattribute:: geosoft.gxapi.EMAP_WINDOW_POSITION_DOCKED .. autodata:: geosoft.gxapi.EMAP_WINDOW_POSITION_FLOATING :annotation: .. autoattribute:: geosoft.gxapi.EMAP_WINDOW_POSITION_FLOATING .. _EMAP_WINDOW_STATE: EMAP_WINDOW_STATE constants ----------------------------------------------------------------------- Window State Options .. autodata:: geosoft.gxapi.EMAP_WINDOW_RESTORE :annotation: .. autoattribute:: geosoft.gxapi.EMAP_WINDOW_RESTORE .. autodata:: geosoft.gxapi.EMAP_WINDOW_MINIMIZE :annotation: .. autoattribute:: geosoft.gxapi.EMAP_WINDOW_MINIMIZE .. autodata:: geosoft.gxapi.EMAP_WINDOW_MAXIMIZE :annotation: .. autoattribute:: geosoft.gxapi.EMAP_WINDOW_MAXIMIZE .. _LAYOUT_VIEW_UNITS: LAYOUT_VIEW_UNITS constants ----------------------------------------------------------------------- Base dlayout display units .. autodata:: geosoft.gxapi.LAYOUT_VIEW_MM :annotation: .. autoattribute:: geosoft.gxapi.LAYOUT_VIEW_MM .. autodata:: geosoft.gxapi.LAYOUT_VIEW_CM :annotation: .. autoattribute:: geosoft.gxapi.LAYOUT_VIEW_CM .. autodata:: geosoft.gxapi.LAYOUT_VIEW_IN :annotation: .. autoattribute:: geosoft.gxapi.LAYOUT_VIEW_IN <file_sep>/examples/tutorial/Geosoft Databases/distance_expression.py import numpy as np import math import geosoft.gxpy as gxpy import geosoft.gxpy.gdb as gxdb gxc = gxpy.gx.GXpy() # open the database, best practice is to use a 'with ...' construct with gxdb.Geosoft_gdb.open('mag_data_split') as gdb: # make a distance channel dist_channel = gxdb.Channel.new(gdb, 'distance', dup='x', replace=True) # work through each line for line in gdb.list_lines(): print ('processing line {}'.format(line)) # read data from the line, returns in a 2D numpy array. xy_data, channels_read, fid = gdb.read_line(line, ('x', 'y')) # get the first point (x0, y0) x0 = xy_data[0, 0] y0 = xy_data[0, 1] # FAST - use numpy array math to calculate distance in a 1D array dist_array dist_array = np.sqrt((xy_data[:, 0] - x0)**2 + (xy_data[:, 1] - y0)**2) # save the distance gdb.write_channel(line, dist_channel, dist_array, fid) exit() <file_sep>/geosoft/gxapi/GXGD.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXGD(gxapi_cy.WrapGD): """ GXGD class. This class provides access to Geosoft grid files using an old interface. Only the `GXDU.sample_gd <geosoft.gxapi.GXDU.sample_gd>` function uses this class. Use the `GXIMG <geosoft.gxapi.GXIMG>` class instead. """ def __init__(self, handle=0): super(GXGD, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXGD <geosoft.gxapi.GXGD>` :returns: A null `GXGD <geosoft.gxapi.GXGD>` :rtype: GXGD """ return GXGD() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, name, type): """ This method creates a `GXGD <geosoft.gxapi.GXGD>` object. :param name: Name of the Grid File :param type: :ref:`GD_STATUS` :type name: str :type type: int :returns: Handle to the `GXGD <geosoft.gxapi.GXGD>` object :rtype: GXGD .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapGD._create(GXContext._get_tls_geo(), name.encode(), type) return GXGD(ret_val) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXCSYMB.rst .. _GXCSYMB: GXCSYMB class ================================== .. autoclass:: geosoft.gxapi.GXCSYMB :members: .. _CSYMB_COLOR: CSYMB_COLOR constants ----------------------------------------------------------------------- Color Symbol filling defines .. autodata:: geosoft.gxapi.CSYMB_COLOR_EDGE :annotation: .. autoattribute:: geosoft.gxapi.CSYMB_COLOR_EDGE .. autodata:: geosoft.gxapi.CSYMB_COLOR_FILL :annotation: .. autoattribute:: geosoft.gxapi.CSYMB_COLOR_FILL <file_sep>/docs/GXTR.rst .. _GXTR: GXTR class ================================== .. autoclass:: geosoft.gxapi.GXTR :members: <file_sep>/docs/GXDH.rst .. _GXDH: GXDH class ================================== .. autoclass:: geosoft.gxapi.GXDH :members: .. _DH_DEFAULT_FILENAMES: DH_DEFAULT_FILENAMES constants ----------------------------------------------------------------------- Default filenames .. autodata:: geosoft.gxapi.DH_DEFAULT_ROCKCODE_FILE :annotation: .. autoattribute:: geosoft.gxapi.DH_DEFAULT_ROCKCODE_FILE .. autodata:: geosoft.gxapi.DH_DEFAULT_STRUCTURECODE_FILE :annotation: .. autoattribute:: geosoft.gxapi.DH_DEFAULT_STRUCTURECODE_FILE .. _STR_DH_HOLES: STR_DH_HOLES constants ----------------------------------------------------------------------- This declares the size of the string used in various `GXDH <geosoft.gxapi.GXDH>` GXs to store all the currently selected holes, as input to the two-panel selection tool. This should be big enough for 65,000 16-character hole names! .. autodata:: geosoft.gxapi.STR_DH_HOLES :annotation: .. autoattribute:: geosoft.gxapi.STR_DH_HOLES .. _DH_COMP_CHOICE: DH_COMP_CHOICE constants ----------------------------------------------------------------------- Composition .. autodata:: geosoft.gxapi.DH_COMP_DONE :annotation: .. autoattribute:: geosoft.gxapi.DH_COMP_DONE .. autodata:: geosoft.gxapi.DH_COMP_CANCEL :annotation: .. autoattribute:: geosoft.gxapi.DH_COMP_CANCEL .. autodata:: geosoft.gxapi.DH_COMP_SELECT :annotation: .. autoattribute:: geosoft.gxapi.DH_COMP_SELECT .. autodata:: geosoft.gxapi.DH_COMP_REFRESH :annotation: .. autoattribute:: geosoft.gxapi.DH_COMP_REFRESH .. _DH_COMPSTDB_HOLSEL: DH_COMPSTDB_HOLSEL constants ----------------------------------------------------------------------- Composite Hole Selection .. autodata:: geosoft.gxapi.DH_COMPSTDB_HOLSEL_ALL :annotation: .. autoattribute:: geosoft.gxapi.DH_COMPSTDB_HOLSEL_ALL .. autodata:: geosoft.gxapi.DH_COMPSTDB_HOLSEL_SELECTED :annotation: .. autoattribute:: geosoft.gxapi.DH_COMPSTDB_HOLSEL_SELECTED .. _DH_COMPSTDB_INTSEL: DH_COMPSTDB_INTSEL constants ----------------------------------------------------------------------- Composite Interval .. autodata:: geosoft.gxapi.DH_COMPSTDB_INTSEL_FIXED :annotation: .. autoattribute:: geosoft.gxapi.DH_COMPSTDB_INTSEL_FIXED .. autodata:: geosoft.gxapi.DH_COMPSTDB_INTSEL_LITHOLOGY :annotation: .. autoattribute:: geosoft.gxapi.DH_COMPSTDB_INTSEL_LITHOLOGY .. autodata:: geosoft.gxapi.DH_COMPSTDB_INTSEL_BESTFITLITH :annotation: .. autoattribute:: geosoft.gxapi.DH_COMPSTDB_INTSEL_BESTFITLITH .. autodata:: geosoft.gxapi.DH_COMPSTDB_INTSEL_INTFILE :annotation: .. autoattribute:: geosoft.gxapi.DH_COMPSTDB_INTSEL_INTFILE .. _DH_DATA: DH_DATA constants ----------------------------------------------------------------------- What to import .. autodata:: geosoft.gxapi.DH_DATA_DIPAZIMUTH :annotation: .. autoattribute:: geosoft.gxapi.DH_DATA_DIPAZIMUTH .. autodata:: geosoft.gxapi.DH_DATA_EASTNORTH :annotation: .. autoattribute:: geosoft.gxapi.DH_DATA_EASTNORTH .. autodata:: geosoft.gxapi.DH_DATA_FROMTO :annotation: .. autoattribute:: geosoft.gxapi.DH_DATA_FROMTO .. autodata:: geosoft.gxapi.DH_DATA_POINT :annotation: .. autoattribute:: geosoft.gxapi.DH_DATA_POINT .. autodata:: geosoft.gxapi.DH_DATA_COLLAR :annotation: .. autoattribute:: geosoft.gxapi.DH_DATA_COLLAR .. autodata:: geosoft.gxapi.DH_DATA_UNKNOWN :annotation: .. autoattribute:: geosoft.gxapi.DH_DATA_UNKNOWN .. _DH_DEFINE_PLAN: DH_DEFINE_PLAN constants ----------------------------------------------------------------------- Plans .. autodata:: geosoft.gxapi.DH_DEFINE_PLAN :annotation: .. autoattribute:: geosoft.gxapi.DH_DEFINE_PLAN .. _DH_DEFINE_SECT: DH_DEFINE_SECT constants ----------------------------------------------------------------------- Types of Sections .. autodata:: geosoft.gxapi.DH_DEFINE_SECT_NS :annotation: .. autoattribute:: geosoft.gxapi.DH_DEFINE_SECT_NS .. autodata:: geosoft.gxapi.DH_DEFINE_SECT_EW :annotation: .. autoattribute:: geosoft.gxapi.DH_DEFINE_SECT_EW .. autodata:: geosoft.gxapi.DH_DEFINE_SECT_ANGLED :annotation: .. autoattribute:: geosoft.gxapi.DH_DEFINE_SECT_ANGLED .. _DH_EXP: DH_EXP constants ----------------------------------------------------------------------- Type of Export .. autodata:: geosoft.gxapi.DH_EXP_CSV :annotation: .. autoattribute:: geosoft.gxapi.DH_EXP_CSV .. autodata:: geosoft.gxapi.DH_EXP_ASCII :annotation: .. autoattribute:: geosoft.gxapi.DH_EXP_ASCII .. autodata:: geosoft.gxapi.DH_EXP_ACCESS :annotation: .. autoattribute:: geosoft.gxapi.DH_EXP_ACCESS .. autodata:: geosoft.gxapi.DH_EXP_SHP :annotation: .. autoattribute:: geosoft.gxapi.DH_EXP_SHP .. autodata:: geosoft.gxapi.DH_EXP_SURPAC :annotation: .. autoattribute:: geosoft.gxapi.DH_EXP_SURPAC .. autodata:: geosoft.gxapi.DH_EXP_SHP_TRACES :annotation: .. autoattribute:: geosoft.gxapi.DH_EXP_SHP_TRACES .. _DH_HOLES: DH_HOLES constants ----------------------------------------------------------------------- Holes to select .. autodata:: geosoft.gxapi.DH_HOLES_ALL :annotation: .. autoattribute:: geosoft.gxapi.DH_HOLES_ALL .. autodata:: geosoft.gxapi.DH_HOLES_SELECTED :annotation: .. autoattribute:: geosoft.gxapi.DH_HOLES_SELECTED .. _DH_MASK: DH_MASK constants ----------------------------------------------------------------------- Masks .. autodata:: geosoft.gxapi.DH_MASK_APPEND :annotation: .. autoattribute:: geosoft.gxapi.DH_MASK_APPEND .. autodata:: geosoft.gxapi.DH_MASK_NEW :annotation: .. autoattribute:: geosoft.gxapi.DH_MASK_NEW .. _DH_PLOT: DH_PLOT constants ----------------------------------------------------------------------- Type of Plot .. autodata:: geosoft.gxapi.DH_PLOT_PLAN :annotation: .. autoattribute:: geosoft.gxapi.DH_PLOT_PLAN .. autodata:: geosoft.gxapi.DH_PLOT_SECTION :annotation: .. autoattribute:: geosoft.gxapi.DH_PLOT_SECTION .. autodata:: geosoft.gxapi.DH_PLOT_STRIPLOG :annotation: .. autoattribute:: geosoft.gxapi.DH_PLOT_STRIPLOG .. autodata:: geosoft.gxapi.DH_PLOT_HOLE_TRACES :annotation: .. autoattribute:: geosoft.gxapi.DH_PLOT_HOLE_TRACES .. autodata:: geosoft.gxapi.DH_PLOT_3D :annotation: .. autoattribute:: geosoft.gxapi.DH_PLOT_3D .. autodata:: geosoft.gxapi.DH_PLOT_SECTION_STACK :annotation: .. autoattribute:: geosoft.gxapi.DH_PLOT_SECTION_STACK .. autodata:: geosoft.gxapi.DH_PLOT_SECTION_FENCE :annotation: .. autoattribute:: geosoft.gxapi.DH_PLOT_SECTION_FENCE .. autodata:: geosoft.gxapi.DH_PLOT_SECTION_CROOKED :annotation: .. autoattribute:: geosoft.gxapi.DH_PLOT_SECTION_CROOKED .. _DH_SECT_PAGE: DH_SECT_PAGE constants ----------------------------------------------------------------------- Sections .. autodata:: geosoft.gxapi.DH_SECT_PAGE_SECTION :annotation: .. autoattribute:: geosoft.gxapi.DH_SECT_PAGE_SECTION .. _DH_SURFACE: DH_SURFACE constants ----------------------------------------------------------------------- Surface selection for creation of geological top or bottom surfaces. .. autodata:: geosoft.gxapi.DH_SURFACE_FIRST_LAYER_FROM :annotation: .. autoattribute:: geosoft.gxapi.DH_SURFACE_FIRST_LAYER_FROM .. autodata:: geosoft.gxapi.DH_SURFACE_FIRST_LAYER_TO :annotation: .. autoattribute:: geosoft.gxapi.DH_SURFACE_FIRST_LAYER_TO .. autodata:: geosoft.gxapi.DH_SURFACE_SECOND_LAYER_FROM :annotation: .. autoattribute:: geosoft.gxapi.DH_SURFACE_SECOND_LAYER_FROM .. autodata:: geosoft.gxapi.DH_SURFACE_SECOND_LAYER_TO :annotation: .. autoattribute:: geosoft.gxapi.DH_SURFACE_SECOND_LAYER_TO .. autodata:: geosoft.gxapi.DH_SURFACE_LAST_LAYER_FROM :annotation: .. autoattribute:: geosoft.gxapi.DH_SURFACE_LAST_LAYER_FROM .. autodata:: geosoft.gxapi.DH_SURFACE_LAST_LAYER_TO :annotation: .. autoattribute:: geosoft.gxapi.DH_SURFACE_LAST_LAYER_TO .. _DIP_CONVENTION: DIP_CONVENTION constants ----------------------------------------------------------------------- Dip convention .. autodata:: geosoft.gxapi.DIP_CONVENTION_NEGATIVE :annotation: .. autoattribute:: geosoft.gxapi.DIP_CONVENTION_NEGATIVE .. autodata:: geosoft.gxapi.DIP_CONVENTION_POSITIVE :annotation: .. autoattribute:: geosoft.gxapi.DIP_CONVENTION_POSITIVE .. _DH_DESURVEY: DH_DESURVEY constants ----------------------------------------------------------------------- Desurvey method .. autodata:: geosoft.gxapi.DH_DESURVEY_RAD_CURVE :annotation: .. autoattribute:: geosoft.gxapi.DH_DESURVEY_RAD_CURVE .. autodata:: geosoft.gxapi.DH_DESURVEY_POLYNOMIAL :annotation: .. autoattribute:: geosoft.gxapi.DH_DESURVEY_POLYNOMIAL .. autodata:: geosoft.gxapi.DH_DESURVEY_STRAIGHT_SEG :annotation: .. autoattribute:: geosoft.gxapi.DH_DESURVEY_STRAIGHT_SEG <file_sep>/geosoft/gxapi/GXIPJ.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXIPJ(gxapi_cy.WrapIPJ): """ GXIPJ class. The `GXIPJ <geosoft.gxapi.GXIPJ>` class describes a single spatial reference in the world, defined under a coordinate system, an orientation, and a warp (which can be used to distort the projected object to a particular shape or boundary). **Note:** `GXIPJ <geosoft.gxapi.GXIPJ>` objects may be attached to channels or views. Two IPJs taken together are used to create a `GXPJ <geosoft.gxapi.GXPJ>` object, which allows for the conversion of positions from one projection to the other. See also the `GXLL2 <geosoft.gxapi.GXLL2>` class, which creates Datum correction lookups. See also `GXPJ <geosoft.gxapi.GXPJ>` Converts coordinates between projections `GXLL2 <geosoft.gxapi.GXLL2>` Creates Datum correction lookups. """ def __init__(self, handle=0): super(GXIPJ, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXIPJ <geosoft.gxapi.GXIPJ>` :returns: A null `GXIPJ <geosoft.gxapi.GXIPJ>` :rtype: GXIPJ """ return GXIPJ() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def clear_warp(self): """ Clear warp parameters (if any) from an `GXIPJ <geosoft.gxapi.GXIPJ>`. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._clear_warp() def make_geographic(self): """ Remove a projected coordinate system from an `GXIPJ <geosoft.gxapi.GXIPJ>` .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This function does nothing if the `GXIPJ <geosoft.gxapi.GXIPJ>` is not a projected coordinate system. """ self._make_geographic() def make_wgs84(self): """ Make a WGS 84 geographic projection .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._make_wgs84() def set_units(self, scale, str_val): """ Set unit parameters :param scale: Factor to meters, must be >= 0.0 :param str_val: Abbreviation, can be "" :type scale: float :type str_val: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_units(scale, str_val.encode()) def add_exagg_warp(self, x_exag, y_exag, z_exag, x_orig, y_orig, z_orig): """ Add a warp to `GXIPJ <geosoft.gxapi.GXIPJ>` to exaggerate X, Y and Z. :param x_exag: X exaggeration, must be > 0.0 :param y_exag: Y exaggeration, must be > 0.0 :param z_exag: Z exaggeration, must be > 0.0 :param x_orig: X reference origin :param y_orig: Y reference origin :param z_orig: Z reference origin :type x_exag: float :type y_exag: float :type z_exag: float :type x_orig: float :type y_orig: float :type z_orig: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._add_exagg_warp(x_exag, y_exag, z_exag, x_orig, y_orig, z_orig) def add_log_warp(self, x, y): """ Add a warp to `GXIPJ <geosoft.gxapi.GXIPJ>` to log one or both coordinantes :param x: Log in X? :param y: Log in Y? :type x: int :type y: int .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._add_log_warp(x, y) def add_matrix_warp(self, v00, v01, v02, v03, v10, v11, v12, v13, v20, v21, v22, v23, v30, v31, v32, v33): """ Add a warp to `GXIPJ <geosoft.gxapi.GXIPJ>` using a matrix :param v00: Row 0 Element 0 :param v01: Row 0 Element 1 :param v02: Row 0 Element 2 :param v03: Row 0 Element 3 :param v10: Row 1 Element 0 :param v11: Row 1 Element 1 :param v12: Row 1 Element 2 :param v13: Row 1 Element 3 :param v20: Row 2 Element 0 :param v21: Row 2 Element 1 :param v22: Row 2 Element 2 :param v23: Row 2 Element 3 :param v30: Row 3 Element 0 :param v31: Row 3 Element 1 :param v32: Row 3 Element 2 :param v33: Row 3 Element 3 :type v00: float :type v01: float :type v02: float :type v03: float :type v10: float :type v11: float :type v12: float :type v13: float :type v20: float :type v21: float :type v22: float :type v23: float :type v30: float :type v31: float :type v32: float :type v33: float .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._add_matrix_warp(v00, v01, v02, v03, v10, v11, v12, v13, v20, v21, v22, v23, v30, v31, v32, v33) def add_warp(self, type, vv_x_old, vv_y_old, vv_x_new, vv_y_new): """ Add a warp to `GXIPJ <geosoft.gxapi.GXIPJ>`. :param type: :ref:`IPJ_TYPE` :param vv_x_old: Old X `GXVV <geosoft.gxapi.GXVV>` (real) :param vv_y_old: Old Y `GXVV <geosoft.gxapi.GXVV>` (real) :param vv_x_new: New X `GXVV <geosoft.gxapi.GXVV>` (real) :param vv_y_new: New Y `GXVV <geosoft.gxapi.GXVV>` (real) :type type: int :type vv_x_old: GXVV :type vv_y_old: GXVV :type vv_x_new: GXVV :type vv_y_new: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** There must be at least "warp type" points in the warp point `GXVV <geosoft.gxapi.GXVV>`'s. All point `GXVV <geosoft.gxapi.GXVV>`'s must have the same number of points. If there are more points than required by the warp, the warp will be determined by least-square fitting to the warp surface for all but the 4-point warp. The 4-point ward requires exactly 4 points. Cannot be used with WARP_MATRIX or WARP_LOG """ self._add_warp(type, vv_x_old, vv_y_old, vv_x_new, vv_y_new) def clear_coordinate_system(self): """ Clear coordinate sytsem, except for units .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Clears the Datum, Local Datum and Projection info. Leaves units, any warp or orientation warp unchanged. """ self._clear_coordinate_system() def clear_orientation(self): """ Clear any orientation and/or warp from an `GXIPJ <geosoft.gxapi.GXIPJ>`. .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._clear_orientation() def copy_orientation(self, ip_jd): """ Copy any orientation and/or warp from one `GXIPJ <geosoft.gxapi.GXIPJ>` to another. :param ip_jd: Destination `GXIPJ <geosoft.gxapi.GXIPJ>` :type ip_jd: GXIPJ .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._copy_orientation(ip_jd) def convert_orientation_warp_vv(self, vv_x, vv_y, vv_z, f_forward): """ Convert X,Y and Z VVs using the orientation warp from an `GXIPJ <geosoft.gxapi.GXIPJ>`. :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` coordinates converted on output :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` coordinates converted on output :param vv_z: Z `GXVV <geosoft.gxapi.GXVV>` coordinates converted on output :param f_forward: 1 - Forward (raw -> coordinate) , 0 - (coordinate -> raw) :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type f_forward: int .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._convert_orientation_warp_vv(vv_x, vv_y, vv_z, f_forward) def copy(self, ip_jd): """ Copy IPJs :param ip_jd: Destination `GXIPJ <geosoft.gxapi.GXIPJ>` :type ip_jd: GXIPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._copy(ip_jd) def copy_projection(self, ip_jd): """ Copy the projection from one `GXIPJ <geosoft.gxapi.GXIPJ>` to another :param ip_jd: Source :type ip_jd: GXIPJ .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Copies the projection parameters, while leaving the rest (e.g. Datum, Local Datum Transform) unchanged. """ self._copy_projection(ip_jd) @classmethod def create(cls): """ This method creates a projection object. :returns: `GXIPJ <geosoft.gxapi.GXIPJ>` Object :rtype: GXIPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapIPJ._create(GXContext._get_tls_geo()) return GXIPJ(ret_val) @classmethod def create_s(cls, bf): """ Create `GXIPJ <geosoft.gxapi.GXIPJ>` from serialized source. :type bf: GXBF :returns: `GXIPJ <geosoft.gxapi.GXIPJ>` Object :rtype: GXIPJ .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapIPJ._create_s(GXContext._get_tls_geo(), bf) return GXIPJ(ret_val) @classmethod def create_xml(cls, file): """ Create an `GXIPJ <geosoft.gxapi.GXIPJ>` from serialized Geosoft MetaData XML file :param file: File Name :type file: str :returns: `GXIPJ <geosoft.gxapi.GXIPJ>` Object :rtype: GXIPJ .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapIPJ._create_xml(GXContext._get_tls_geo(), file.encode()) return GXIPJ(ret_val) def get_3d_view(self, x, y, z, rx, ry, rz, sx, sy, str_val): """ Get 3D orientation parameters :param x: X location of view origin :param y: Y location of view origin :param z: Z location of view origin :param rx: Rotation in X :param ry: Rotation in Y :param rz: Rotation in Z :param sx: Scaling in X :param sy: Scaling in Y :param str_val: Scaling in Z :type x: float_ref :type y: float_ref :type z: float_ref :type rx: float_ref :type ry: float_ref :type rz: float_ref :type sx: float_ref :type sy: float_ref :type str_val: float_ref .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The view must have a 3D orientation """ x.value, y.value, z.value, rx.value, ry.value, rz.value, sx.value, sy.value, str_val.value = self._get_3d_view(x.value, y.value, z.value, rx.value, ry.value, rz.value, sx.value, sy.value, str_val.value) def get_3d_view_ex(self, x, y, z, rx, ry, rz, sx, sy, str_val, rotate, flags): """ Get 3D orientation parameters with new flags :param x: X location of view origin :param y: Y location of view origin :param z: Z location of view origin :param rx: Rotation in X :param ry: Rotation in Y :param rz: Rotation in Z :param sx: Scaling in X :param sy: Scaling in Y :param str_val: Scaling in Z :param rotate: :ref:`IPJ_3D_ROTATE` :param flags: :ref:`IPJ_3D_FLAG` :type x: float_ref :type y: float_ref :type z: float_ref :type rx: float_ref :type ry: float_ref :type rz: float_ref :type sx: float_ref :type sy: float_ref :type str_val: float_ref :type rotate: int_ref :type flags: int_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The view must have a 3D orientation """ x.value, y.value, z.value, rx.value, ry.value, rz.value, sx.value, sy.value, str_val.value, rotate.value, flags.value = self._get_3d_view_ex(x.value, y.value, z.value, rx.value, ry.value, rz.value, sx.value, sy.value, str_val.value, rotate.value, flags.value) def get_crooked_section_view_v_vs(self, dist_vv, xvv, yvv, log_z): """ Get the crooked section path. :param dist_vv: Section X locations (e.g. distance along the curve) :param xvv: True X :param yvv: True Y :param log_z: Use logarithmic Y-axis (usually for data profiles) 0:No, 1:Yes :type dist_vv: GXVV :type xvv: GXVV :type yvv: GXVV :type log_z: int_ref .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Returns the orignal VVs used to set up the crooked section path. """ log_z.value = self._get_crooked_section_view_v_vs(dist_vv, xvv, yvv, log_z.value) @classmethod def get_list(cls, parm, datum, lst): """ Get a list of parameters. :param parm: :ref:`IPJ_PARM_LST` :param datum: Datum filter, "" for no filter :param lst: List returned :type parm: int :type datum: str :type lst: GXLST .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The datum filter string, if specified, will limit the requested list to those valid for the spacified datum. """ gxapi_cy.WrapIPJ._get_list(GXContext._get_tls_geo(), parm, datum.encode(), lst) def get_orientation_info(self, x, y, z, az, swing): """ Get `GXIPJ <geosoft.gxapi.GXIPJ>` orientation parameters. :param x: Plane Origin X :param y: Plane Origin Y :param z: Plane Origin Z :param az: Plane Azimuth (section) or Rotation (plan) :param swing: Plane Swing (section) :type x: float_ref :type y: float_ref :type z: float_ref :type az: float_ref :type swing: float_ref .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** IPJ_ORIENT_TYPE: `IPJ_ORIENT_DEFAULT <geosoft.gxapi.IPJ_ORIENT_DEFAULT>` - no special orientation - plan view. This is equivalent to `IPJ_ORIENT_PLAN <geosoft.gxapi.IPJ_ORIENT_PLAN>` with dXo = dYo = dZo = dRotation = 0.0. `IPJ_ORIENT_PLAN <geosoft.gxapi.IPJ_ORIENT_PLAN>` Azimuth = Rotation CCW degrees The plan differs from the default view in that a reference level is set, and the axes can be rotated and offset from the local X,Y. `IPJ_ORIENT_SECTION <geosoft.gxapi.IPJ_ORIENT_SECTION>` Azimuth - CW degrees from North -360 <= azimuth <= 360 Swing - degrees bottom towards viewer -90 < swing < 90 The section view projects all plotted objects HORIZONTALLY onto the viewing plan in order to preserve elevations, even if the section has a swing. """ x.value, y.value, z.value, az.value, swing.value = self._get_orientation_info(x.value, y.value, z.value, az.value, swing.value) def get_plane_equation(self, min_x, min_y, max_x, max_y, pitch, yaw, roll, x, y, z, sx, sy, str_val): """ Get the equation of a plane :param min_x: Min X of surface :param min_y: Min Y of surface :param max_x: Max X of surface :param max_y: Max Y of surface :param pitch: Pitch angle (between -360 and 360) :param yaw: Yaw angle (between -360 and 360) :param roll: Roll angles (between -360 and 360) :param x: X offset of plane :param y: Y offset of plane :param z: Z offset of plane :param sx: X scale :param sy: Y scale :param str_val: Z scale :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type pitch: float_ref :type yaw: float_ref :type roll: float_ref :type x: float_ref :type y: float_ref :type z: float_ref :type sx: float_ref :type sy: float_ref :type str_val: float_ref .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Two opposite corners of the plane are required. Because the origin of the plane does not necessarily have a stable back-projection into true 3d coordinates. In practice, use the current view extents, or the corners of a grid. """ pitch.value, yaw.value, roll.value, x.value, y.value, z.value, sx.value, sy.value, str_val.value = self._get_plane_equation(min_x, min_y, max_x, max_y, pitch.value, yaw.value, roll.value, x.value, y.value, z.value, sx.value, sy.value, str_val.value) def get_plane_equation2(self, ip_jo, min_x, min_y, max_x, max_y, pitch, yaw, roll, x, y, z, sx, sy, str_val): """ Get the equation of a plane with reprojection. :param ip_jo: `GXIPJ <geosoft.gxapi.GXIPJ>` object for the output values :param min_x: Min X of surface (in grid coords) :param min_y: Min Y of surface :param max_x: Max X of surface :param max_y: Max Y of surface :param pitch: Pitch angle (between -360 and 360) (in view coords) :param yaw: Yaw angle (between -360 and 360) :param roll: Roll angles (between -360 and 360) :param x: X offset of plane (in view coords) :param y: Y offset of plane :param z: Z offset of plane :param sx: X scale (in view coords) :param sy: Y scale :param str_val: Z scale :type ip_jo: GXIPJ :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type pitch: float_ref :type yaw: float_ref :type roll: float_ref :type x: float_ref :type y: float_ref :type z: float_ref :type sx: float_ref :type sy: float_ref :type str_val: float_ref .. versionadded:: 6.4.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is the same as `get_plane_equation <geosoft.gxapi.GXIPJ.get_plane_equation>`, but the input projected coordinate system (PCS) may be different from that of the `GXIPJ <geosoft.gxapi.GXIPJ>` you want the plane equation values described in. This may be required, for instance, when a 3D view has been created in one PCS, and an oriented grid from a different PCS is to be displayed in that view. If the two input IPJs share the same PCS then the `get_plane_equation <geosoft.gxapi.GXIPJ.get_plane_equation>` function is called directly, using the input `GXIPJ <geosoft.gxapi.GXIPJ>`. """ pitch.value, yaw.value, roll.value, x.value, y.value, z.value, sx.value, sy.value, str_val.value = self._get_plane_equation2(ip_jo, min_x, min_y, max_x, max_y, pitch.value, yaw.value, roll.value, x.value, y.value, z.value, sx.value, sy.value, str_val.value) def compare_datums(self, ipj2): """ Compare the datums of two coordinate systems? :param ipj2: `GXIPJ <geosoft.gxapi.GXIPJ>` 2 :type ipj2: GXIPJ :returns: 0 - Datums are different 1 - Datums are the same, but different LDT 2 - Datums and LTD are the same :rtype: int .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** To transform between different datums requires the use of a local datum transform. The local datum transform can be defined when a coordinate system is created, but the definition is optional. This function will test that the local datum transforms are defined. Note that a coordinate transformation between datums without a local datum transform is still possible, but only the effect of ellipsoid shape will be modelled in the transform. """ ret_val = self._compare_datums(ipj2) return ret_val def compare_datums_to_specified_tolerance_with_feedback(self, ipj2, sig_digits, str_val): """ Compare the datums of two coordinate systems, but allows for a specified accuracy and returns the reason if they are different :param ipj2: `GXIPJ <geosoft.gxapi.GXIPJ>` 2 :param sig_digits: Significant digits (0 for exact) :param str_val: Reason if different returned :type ipj2: GXIPJ :type sig_digits: int :type str_val: str_ref :returns: 0 - Datums are different 1 - Datums are the same, but different LDT 2 - Datums and LTD are the same :rtype: int .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** To transform between different datums requires the use of a local datum transform. The local datum transform can be defined when a coordinate system is created, but the definition is optional. This function will test that the local datum transforms are defined. Note that a coordinate transformation between datums without a local datum transform is still possible, but only the effect of ellipsoid shape will be modelled in the transform. """ ret_val, str_val.value = self._compare_datums_to_specified_tolerance_with_feedback(ipj2, sig_digits, str_val.value.encode()) return ret_val def convert_warp(self, x, y, z, f_forward): """ Converts a point X, Y, Z to the new `GXIPJ <geosoft.gxapi.GXIPJ>` plane. :param x: X coordinates converted on output :param y: Y coordinates converted on output :param z: Z coordinates converted on output :param f_forward: 1 - Forward (raw -> coordinate) , 0 - (coordinate -> raw) :type x: float_ref :type y: float_ref :type z: float_ref :type f_forward: int :returns: 0 if ok - 1 otherwise :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, x.value, y.value, z.value = self._convert_warp(x.value, y.value, z.value, f_forward) return ret_val def convert_warp_vv(self, vv_x, vv_y, f_forward): """ Converts a set of X & Y VVs to the new `GXIPJ <geosoft.gxapi.GXIPJ>` plane. The Z is assumed to be 0 :param vv_x: X `GXVV <geosoft.gxapi.GXVV>` coordinates converted on output :param vv_y: Y `GXVV <geosoft.gxapi.GXVV>` coordinates converted on output :param f_forward: 1 - Forward (raw -> coordinate) , 0 - (coordinate -> raw) :type vv_x: GXVV :type vv_y: GXVV :type f_forward: int :returns: 0 if ok - 1 otherwise :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._convert_warp_vv(vv_x, vv_y, f_forward) return ret_val def coordinate_systems_are_the_same(self, ipj2): """ Are these two coordinate systems the same? :param ipj2: `GXIPJ <geosoft.gxapi.GXIPJ>` 2 :type ipj2: GXIPJ :returns: 0 - No 1 - Yes :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This does not compare LDT information (use `compare_datums <geosoft.gxapi.GXIPJ.compare_datums>` for that). """ ret_val = self._coordinate_systems_are_the_same(ipj2) return ret_val def coordinate_systems_are_the_same_within_a_small_tolerance(self, ipj2): """ Same as `coordinate_systems_are_the_same <geosoft.gxapi.GXIPJ.coordinate_systems_are_the_same>`, but allows for small numerical differences :param ipj2: `GXIPJ <geosoft.gxapi.GXIPJ>` 2 :type ipj2: GXIPJ :returns: 0 - No 1 - Yes :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._coordinate_systems_are_the_same_within_a_small_tolerance(ipj2) return ret_val def coordinate_systems_are_the_same_to_specified_tolerance_with_feedback(self, ipj2, sig_digits, sig_digits_EN, str_val): """ Same as `coordinate_systems_are_the_same <geosoft.gxapi.GXIPJ.coordinate_systems_are_the_same>`, but allows for a specified accuracy and returns the reason if they are different :param ipj2: `GXIPJ <geosoft.gxapi.GXIPJ>` 2 :param sig_digits: Significant digits (0 for exact) :param sig_digits_EN: Significant digits for high-precision parameters like easting and northing (0 for exact) :param str_val: Reason if different returned :type ipj2: GXIPJ :type sig_digits: int :type sig_digits_EN: int :type str_val: str_ref :returns: 0 - No 1 - Yes :rtype: int .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, str_val.value = self._coordinate_systems_are_the_same_to_specified_tolerance_with_feedback(ipj2, sig_digits, sig_digits_EN, str_val.value.encode()) return ret_val def get_display_name(self, str_val): """ Get a name for display purposes from `GXIPJ <geosoft.gxapi.GXIPJ>` :param str_val: Name returned :type str_val: str_ref .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ str_val.value = self._get_display_name(str_val.value.encode()) def get_esri(self, esri): """ Store coordinate system in an ESRI prj coordinate string :param esri: ESRI projection string returned :type esri: str_ref .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the projection is not supported in ESRI, the projection string will be empty. """ esri.value = self._get_esri(esri.value.encode()) def get_gxf(self, str1, str2, str3, str4, str5): """ Store coordinate system in GXF style strings. :param str1: Projection name :param str2: Datum name, major axis, elipticity :param str3: Method name, parameters :param str4: Unit name, factor :param str5: Local transform name,dX,dY,dZ,rX,rY,rZ,Scale :type str1: str_ref :type str2: str_ref :type str3: str_ref :type str4: str_ref :type str5: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See GXF revision 3 for string descriptions All strings must be the same length, 160 (`STR_GXF <geosoft.gxapi.STR_GXF>`) recommended. Strings too short will be truncated. """ str1.value, str2.value, str3.value, str4.value, str5.value = self._get_gxf(str1.value.encode(), str2.value.encode(), str3.value.encode(), str4.value.encode(), str5.value.encode()) def get_mi_coord_sys(self, coord, units): """ Store coordinate system in MapInfo coordsys pair :param coord: MapInfo coordsys string returned :param units: MapInfo unit string returned :type coord: str_ref :type units: str_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ coord.value, units.value = self._get_mi_coord_sys(coord.value.encode(), units.value.encode()) def get_name(self, type, str_val): """ Get an `GXIPJ <geosoft.gxapi.GXIPJ>` name :param type: :ref:`IPJ_NAME` :param str_val: Name returned :type type: int :type str_val: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ str_val.value = self._get_name(type, str_val.value.encode()) def set_vcs(self, str_val): """ Set the Vertical Coordinate System in the `GXIPJ <geosoft.gxapi.GXIPJ>` name string :param str_val: New name (See Valid inputs above). :type str_val: str .. versionadded:: 9.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The vertical coordinate system (vcs) describes the datum used for vertical coordinates. The vcs name, if known, will appear in square brackets as part of the coordinate system name. Examples: :: "WGS 84 [geoid]" "WGS 84 / UTM zone 12S" - the vcs is not known. "WGS 84 / UTM zone 12S [NAVD88]" Valid inputs: "NAVD88" - Clears existing vcs, if any, and sets the VCS name to "NAVD88". "" - Clears the vcs """ self._set_vcs(str_val.encode()) def get_orientation(self): """ Get `GXIPJ <geosoft.gxapi.GXIPJ>` orientation in space. :returns: :ref:`IPJ_ORIENT` :rtype: int .. versionadded:: 5.1.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Projections can be created oriented horizontally (e.g. in plan maps) or vertically (in section maps - Wholeplot and `GXIP <geosoft.gxapi.GXIP>`). """ ret_val = self._get_orientation() return ret_val def get_orientation_name(self, str_val): """ Get a name for display purposes from `GXIPJ <geosoft.gxapi.GXIPJ>` :param str_val: Name returned :type str_val: str_ref .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ str_val.value = self._get_orientation_name(str_val.value.encode()) def get_units(self, scale, str_val): """ Get unit parameters :param scale: Factor to meters :param str_val: Abbreviation :type scale: float_ref :type str_val: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ scale.value, str_val.value = self._get_units(scale.value, str_val.value.encode()) def get_xml(self, str_val): """ Get an Geosoft Metadata XML string from an `GXIPJ <geosoft.gxapi.GXIPJ>` :param str_val: XML string returned :type str_val: str_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ str_val.value = self._get_xml(str_val.value.encode()) def has_projection(self): """ Does the `GXIPJ <geosoft.gxapi.GXIPJ>` object contain a projection? :returns: 0 - No 1 - Yes :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._has_projection() return ret_val def is_3d_inverted(self): """ Is this 3D View inverted ? :returns: 0 - No 1 - Yes (inverted) :rtype: int .. versionadded:: 6.3.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_3d_inverted() return ret_val def is_3d_inverted_angles(self): """ Are the angles in this 3D View inverted ? :returns: 0 - No 1 - Yes (inverted) :rtype: int .. versionadded:: 6.3.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_3d_inverted_angles() return ret_val def is_geographic(self): """ See if this projection is geographic :returns: 0 - No 1 - Yes :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_geographic() return ret_val def orientations_are_the_same(self, ipj2): """ Are these two orientations the same? :param ipj2: `GXIPJ <geosoft.gxapi.GXIPJ>` 2 :type ipj2: GXIPJ :returns: 0 - No 1 - Yes :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._orientations_are_the_same(ipj2) return ret_val def orientations_are_the_same_within_a_small_tolerance(self, ipj2): """ Same as `orientations_are_the_same <geosoft.gxapi.GXIPJ.orientations_are_the_same>`, but allows for small numerical differences :param ipj2: `GXIPJ <geosoft.gxapi.GXIPJ>` 2 :type ipj2: GXIPJ :returns: 0 - No 1 - Yes :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._orientations_are_the_same_within_a_small_tolerance(ipj2) return ret_val def orientations_are_the_same_to_specified_tolerance_with_feedback(self, ipj2, sig_digits, str_val): """ Same as `orientations_are_the_same <geosoft.gxapi.GXIPJ.orientations_are_the_same>`, but allows for small numerical differences :param ipj2: `GXIPJ <geosoft.gxapi.GXIPJ>` 2 :param sig_digits: Significant digits (0 for exact) :param str_val: Reason if different returned :type ipj2: GXIPJ :type sig_digits: int :type str_val: str_ref :returns: 0 - No 1 - Yes :rtype: int .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, str_val.value = self._orientations_are_the_same_to_specified_tolerance_with_feedback(ipj2, sig_digits, str_val.value.encode()) return ret_val def has_section_orientation(self): """ Does this projection contain an orientation used by section plots? :returns: 0 - No 1 - Yes :rtype: int .. versionadded:: 8.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Returns 1 if there is a section orientation The following orientations can be used to orient sections or section views: `IPJ_ORIENT_SECTION <geosoft.gxapi.IPJ_ORIENT_SECTION>` - Target-type sections with Z projection horizontally `IPJ_ORIENT_SECTION_NORMAL <geosoft.gxapi.IPJ_ORIENT_SECTION_NORMAL>` - Like `IPJ_ORIENT_SECTION <geosoft.gxapi.IPJ_ORIENT_SECTION>`, but Z projects perpendicular to the secton plane. `IPJ_ORIENT_SECTION_CROOKED <geosoft.gxapi.IPJ_ORIENT_SECTION_CROOKED>` - Crooked sections `IPJ_ORIENT_3D <geosoft.gxapi.IPJ_ORIENT_3D>` - Some Sections extracted from a voxel - e.g. VoxelToGrids, as the voxel can have any orientation in 3D. It is sometimes important to ignore the section orientation, for instance when rendering a grid in 3D where it has been located on a plane. """ ret_val = self._has_section_orientation() return ret_val def projection_type_is_fully_supported(self): """ Is the projection type fully supported? :returns: 0 - No 1 - Yes :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This function checks only the projected coordinated system in the `GXIPJ <geosoft.gxapi.GXIPJ>` object, so should only be used with projections of type `IPJ_TYPE_PCS <geosoft.gxapi.IPJ_TYPE_PCS>`. This function does not test the validity of datums or local datum transforms. """ ret_val = self._projection_type_is_fully_supported() return ret_val def set_gxf_safe(self, str1, str2, str3, str4, str5): """ Same as `set_gxf <geosoft.gxapi.GXIPJ.set_gxf>`, but fails gracefully. :param str1: "projection name" or PCS_NAME from ipj_pcs.csv (datum / projection) or EPSG coordinate system code number or "<file.prj>" projection file name or "<file.wrp>" warp file name :param str2: "datum name"[, major axis, elipticity, prime meridian] or DATUM from datum.csv or EPSG datum code number :param str3: "method name", parameters (P1 through P8) or "projection name"[,"method name","Units",P1,P2...] or TRANSFORM from transform.csv or EPSG transform method code number :param str4: "unit name", convertion to metres or UNIT_LENGTH from units.csv :param str5: "local transform name"[,dX,dY,dZ,rX,rY,rZ,Scale] or DATUM_TRF from datumtrf.csv or AREA_OF_USE from ldatum.csv or EPSG local datum transform code number :type str1: str :type str2: str :type str3: str :type str4: str :type str5: str :returns: 0 - error in setting `GXIPJ <geosoft.gxapi.GXIPJ>`, input `GXIPJ <geosoft.gxapi.GXIPJ>` unchanged. 1 - success: `GXIPJ <geosoft.gxapi.GXIPJ>` set using input values. :rtype: int .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** `set_gxf <geosoft.gxapi.GXIPJ.set_gxf>` will fail and terminate the GX if anything goes wrong (e.g. having a wrong parameter). If this function fails, it simply returns 0 and leaves the `GXIPJ <geosoft.gxapi.GXIPJ>` unchanged. """ ret_val = self._set_gxf_safe(str1.encode(), str2.encode(), str3.encode(), str4.encode(), str5.encode()) return ret_val def source_type(self): """ Get `GXIPJ <geosoft.gxapi.GXIPJ>` source type :returns: :ref:`IPJ_TYPE` :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._source_type() return ret_val def support_datum_transform(self, ipj2): """ Can we transform between these two datums? :param ipj2: `GXIPJ <geosoft.gxapi.GXIPJ>` 2 :type ipj2: GXIPJ :returns: 0 - No 1 - Yes, either because both CS are on the same datum, or because a local datum transform is defined for each coordinate system. :rtype: int .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** To transform between different datums requires the use of a local datum transform. The local datum transform can be defined when a coordinate system is created, but the definition is optional. This function will test that the local datum transforms are defined. Note that a coordinate transformation between datums without a local datum transform is still possible, but only the effect of ellipsoid shape will be modelled in the transform. """ ret_val = self._support_datum_transform(ipj2) return ret_val @classmethod def unit_name(cls, val, type, name): """ Get a unit name given a scale factor :param val: Factor to meters :param type: :ref:`IPJ_UNIT` :param name: Name returned, "" if cannot find unit :type val: float :type type: int :type name: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ name.value = gxapi_cy.WrapIPJ._unit_name(GXContext._get_tls_geo(), val, type, name.value.encode()) def warped(self): """ Does `GXIPJ <geosoft.gxapi.GXIPJ>` contain a warp? :rtype: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._warped() return ret_val def warps_are_the_same(self, ipj2): """ Are these two warps the same? :param ipj2: `GXIPJ <geosoft.gxapi.GXIPJ>` 2 :type ipj2: GXIPJ :returns: 0 - No 1 - Yes :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._warps_are_the_same(ipj2) return ret_val def warps_are_the_same_within_a_small_tolerance(self, ipj2): """ Same as `warps_are_the_same <geosoft.gxapi.GXIPJ.warps_are_the_same>`, but allows for small numerical differences :param ipj2: `GXIPJ <geosoft.gxapi.GXIPJ>` 2 :type ipj2: GXIPJ :returns: 0 - No 1 - Yes :rtype: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._warps_are_the_same_within_a_small_tolerance(ipj2) return ret_val def warps_are_the_same_to_specified_tolerance_with_feedback(self, ipj2, sig_digits, str_val): """ Same as `warps_are_the_same <geosoft.gxapi.GXIPJ.warps_are_the_same>`, but allows for a specified accuracy and returns the reason if they are different :param ipj2: `GXIPJ <geosoft.gxapi.GXIPJ>` 2 :param sig_digits: Significant digits (0 for exact) :param str_val: Reason if different returned :type ipj2: GXIPJ :type sig_digits: int :type str_val: str_ref :returns: 0 - No 1 - Yes :rtype: int .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, str_val.value = self._warps_are_the_same_to_specified_tolerance_with_feedback(ipj2, sig_digits, str_val.value.encode()) return ret_val def warp_type(self): """ Obtain the warp type of an `GXIPJ <geosoft.gxapi.GXIPJ>`. :returns: :ref:`IPJ_WARP` :rtype: int .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._warp_type() return ret_val def make_projected(self, min_lon, min_lat, max_lon, max_lat): """ Create a default projected coordinate system from lat-long ranges. :param min_lon: Minimum longitude :param min_lat: Minimum latitude :param max_lon: Maximum longitude :param max_lat: Maximum latitude :type min_lon: float :type min_lat: float :type max_lon: float :type max_lat: float .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Terminates with invalid or unsupported ranges. If the map crosses the equator, or if map is within 20 degrees of the equator, uses an equatorial mercator projection centered at the central longitude. Otherwise, uses a Lambert Conic Conformal (1SP) projection for the map. Global maps outside of +/- 70 degrees latitude are not supported. """ self._make_projected(min_lon, min_lat, max_lon, max_lat) def new_box_resolution(self, ip_jo, res, min_x, min_y, max_x, max_y, min_res, max_res, diag_res): """ Determine a data resolution in a new coordinate system :param ip_jo: New `GXIPJ <geosoft.gxapi.GXIPJ>` :param res: Data resolution in original `GXIPJ <geosoft.gxapi.GXIPJ>` :param min_x: X minimum of bounding box in new `GXIPJ <geosoft.gxapi.GXIPJ>` :param min_y: Y minimum :param max_x: X maximum :param max_y: Y maximum :param min_res: Minimum data resolution in new `GXIPJ <geosoft.gxapi.GXIPJ>`, :param max_res: Maximum data resolution in new `GXIPJ <geosoft.gxapi.GXIPJ>` :param diag_res: Diagonal data resolution in new `GXIPJ <geosoft.gxapi.GXIPJ>` :type ip_jo: GXIPJ :type res: float :type min_x: float :type min_y: float :type max_x: float :type max_y: float :type min_res: float_ref :type max_res: float_ref :type diag_res: float_ref .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** if there are any problems reprojecting, new resolutions will dummy. The conversion to new resolution is based on measurements along the four edges and two diagonals. """ min_res.value, max_res.value, diag_res.value = self._new_box_resolution(ip_jo, res, min_x, min_y, max_x, max_y, min_res.value, max_res.value, diag_res.value) def read(self, type, str1, str2, str3): """ Read and define an `GXIPJ <geosoft.gxapi.GXIPJ>` from a standard file. :param type: :ref:`IPJ_TYPE` :param str1: String 1 :param str2: String 2 :param str3: String 3 :type type: int :type str1: str :type str2: str :type str3: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._read(type, str1.encode(), str2.encode(), str3.encode()) def get_method_parm(self, parm): """ Get projection method parameter :param parm: :ref:`IPJ_CSP` :type parm: int :returns: Parameter setting, `rDUMMY <geosoft.gxapi.rDUMMY>` if dot used :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_method_parm(parm) return ret_val def get_north_azimuth(self, x, y): """ Return the azimuth of geographic North at a point. :param x: Input X location :param y: Input Y location :type x: float :type y: float :returns: Azimuth (degrees CW) of geographic north from grid north at a location. :rtype: float .. versionadded:: 7.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the `GXIPJ <geosoft.gxapi.GXIPJ>` is not a projected coordinate system then the returned azimuth is `GS_R8DM <geosoft.gxapi.GS_R8DM>`; """ ret_val = self._get_north_azimuth(x, y) return ret_val @classmethod def unit_scale(cls, name, default): """ Get a unit scale (m/unit) given a name :param name: Unit name, abbreviation or full name :param default: Default to return if name not found :type name: str :type default: float :returns: Scale factor m/unit :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If name cannot be found, returns default. """ ret_val = gxapi_cy.WrapIPJ._unit_scale(GXContext._get_tls_geo(), name.encode(), default) return ret_val def serial(self, bf): """ Serialize `GXIPJ <geosoft.gxapi.GXIPJ>` to a `GXBF <geosoft.gxapi.GXBF>`. :type bf: GXBF .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._serial(bf) def serial_fgdcxml(self, file): """ Write the `GXIPJ <geosoft.gxapi.GXIPJ>` as a FDGC MetaData XML object :param file: Name of file to export to :type file: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._serial_fgdcxml(file.encode()) def serial_isoxml(self, file): """ Write the `GXIPJ <geosoft.gxapi.GXIPJ>` as a ISO MetaData XML object :param file: Name of file to export to :type file: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._serial_isoxml(file.encode()) def serial_xml(self, file): """ Write the `GXIPJ <geosoft.gxapi.GXIPJ>` as a Geosoft MetaData XML object :param file: Name of file to export to :type file: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._serial_xml(file.encode()) def set_3d_inverted(self, inverted): """ Set whether a view is inverted (must be 3D already) :param inverted: Inverted (0 or 1) :type inverted: int .. versionadded:: 6.3.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_3d_inverted(inverted) def set_3d_inverted_angles(self, inverted): """ Set whether the angles in this view are inverted (must be 3D already) :param inverted: Inverted (0 or 1) :type inverted: int .. versionadded:: 6.3.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_3d_inverted_angles(inverted) def set_3d_view(self, x, y, z, rx, ry, rz, sx, sy, str_val): """ Set 3D orientation parameters :param x: X location of view origin :param y: Y location of view origin :param z: Z location of view origin :param rx: Rotation in X :param ry: Rotation in Y :param rz: Rotation in Z :param sx: Scaling in X :param sy: Scaling in Y :param str_val: Scaling in Z :type x: float :type y: float :type z: float :type rx: float :type ry: float :type rz: float :type sx: float :type sy: float :type str_val: float .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Sets up translation, scaling and rotation in all three directions for 3D objects. """ self._set_3d_view(x, y, z, rx, ry, rz, sx, sy, str_val) def set_3d_view_ex(self, x, y, z, rx, ry, rz, sx, sy, str_val, rotate, flags): """ Set 3D orientation parameters with new flags :param x: X location of view origin :param y: Y location of view origin :param z: Z location of view origin :param rx: Rotation in X :param ry: Rotation in Y :param rz: Rotation in Z :param sx: Scaling in X :param sy: Scaling in Y :param str_val: Scaling in Z :param rotate: :ref:`IPJ_3D_ROTATE` :param flags: :ref:`IPJ_3D_FLAG` :type x: float :type y: float :type z: float :type rx: float :type ry: float :type rz: float :type sx: float :type sy: float :type str_val: float :type rotate: int :type flags: int .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Sets up translation, scaling and rotation in all three directions for 3D objects. """ self._set_3d_view_ex(x, y, z, rx, ry, rz, sx, sy, str_val, rotate, flags) def set_3d_view_from_axes(self, x, y, z, x1, x2, x3, y1, y2, y3, sx, sy, str_val): """ Set 3D orientation parameters :param x: X location of view origin :param y: Y location of view origin :param z: Z location of view origin :param x1: X axis X component :param x2: X axis Y component :param x3: X axis Z component :param y1: Y axis X component :param y2: Y axis Y component :param y3: Y axis Z component :param sx: Scaling in X :param sy: Scaling in Y :param str_val: Scaling in Z :type x: float :type y: float :type z: float :type x1: float :type x2: float :type x3: float :type y1: float :type y2: float :type y3: float :type sx: float :type sy: float :type str_val: float .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Sets up translation, scaling and rotation in all three directions for 3D objects, based on input origin and X and Y axis vectors. """ self._set_3d_view_from_axes(x, y, z, x1, x2, x3, y1, y2, y3, sx, sy, str_val) def set_crooked_section_view(self, dist_vv, xvv, yvv, log_z): """ Set up the crooked section view. :param dist_vv: Section X locations (e.g. distance along the curve) :param xvv: True X :param yvv: True Y :param log_z: Use logarithmic Y-axis (usually for data profiles) 0:No, 1:Yes :type dist_vv: GXVV :type xvv: GXVV :type yvv: GXVV :type log_z: int .. versionadded:: 7.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** A non-plane section. It is a vertical section which curves along a path in (X, Y). """ self._set_crooked_section_view(dist_vv, xvv, yvv, log_z) def set_depth_section_view(self, depth): """ Set depth section orientation parameters :param depth: View Y value for Depth = 0.0. :type depth: float .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_depth_section_view(depth) def set_esri(self, esri): """ Set coordinate system from an ESRI prj coordinate string :param esri: ESRI prj format projection string :type esri: str .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the projection is not supported in Geosoft, the `GXIPJ <geosoft.gxapi.GXIPJ>` will be unknown. """ self._set_esri(esri.encode()) def set_gxf(self, str1, str2, str3, str4, str5): """ Set coordinate system from GXF style strings. :param str1: "projection name" or PCS_NAME from ipj_pcs.csv (datum / projection) or EPSG coordinate system code number or "<file.prj>" projection file name or "<file.wrp>" warp file name :param str2: "datum name"[, major axis, elipticity, prime meridian] or DATUM from datum.csv or EPSG datum code number :param str3: "method name", parameters (P1 through P8) or "projection name"[,"method name","Units",P1,P2...] or TRANSFORM from transform.csv or EPSG transform method code number :param str4: "unit name", convertion to metres or UNIT_LENGTH from units.csv :param str5: "local transform name"[,dX,dY,dZ,rX,rY,rZ,Scale] or DATUM_TRF from datumtrf.csv or AREA_OF_USE from ldatum.csv or EPSG local datum transform code number :type str1: str :type str2: str :type str3: str :type str4: str :type str5: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Simplest Usage: The coordinate system can be resolved from the "coordinate system name" if the name is specified using an EPSG number or naming convention such as: "datum / projection" (example: "Arc 1960 / UTM zone 37S") Where: "datum" is the EPSG datum name (eg. NAD83). All supported datums are listed in ...usercsvdatum.csv. "projection" is the EPSG coordinate system map projection. datum name (eg. "UTM zone 10N"). All supported coordinate system projections are listed in ...user/csv/transform.csv. All EPSG known combined coordinate systems of the earth are listed in ...user/csv/ipj_pcs.csv. To define a geographic (longitude, latitude) oordinate system, specify the datum name alone (ie "Arc 1960"). EPSG numbers can also be used, so in the example above the name can be "21037". The coordinate system may also be oriented arbitrarily in 3D relative to the base coordinate system by specifying the orientation as a set of 6 comma-separated values between angled brackets after the coordinate system name, e.g: :: "datum / projection"<oX,oY,oZ,rX,rY,rZ> 21037<oX,oY,oZ,rX,rY,rZ> where: oX,oY,oZ is the location of the local origin on the CS rX,rY,rZ are rotations in degrees azimuth (clockwise) of the local axis frame around the X, Y and Z axis respectively. A simple plane rotation will only have a rotation around Z. For example: :: "Arc 1960 / UTM zone 37S"<525000,2500000,0,0,0,15> defines a local system with origin at (525000,2500000) with a rotation of 15 degrees azimuth. Orientation parameters not defined will default to align with the base CS, Note that although allowed, it does not make sense to have an orientation on a geographic coordinate system (long,lat). Complete usage: A coordinate system can also be fully described by providing an additional four strings that define the datum, map projection, length units and prefered local datum transform. Refer to GXF revision 3 for further detail: http://www.geosoft.com/resources/goto/GXF-Grid-eXchange-File Note that coordinate system reference tables sre maintained in csv files located in the .../user/csv folder found with the Geosoft installation files, which will usually be located here: C:\\Program Files (x86)\\Geosoft\\Oasis montaj\\user\\csv The "datum" string can use a datum name defined in the "datum.csv" file, or the local datum name from datumtrf.csv, or the local datum description from ldatum.csv. For a non-EPSG datum, you can define your own datum parameters in the Datum stringfield as follows: :: "*YourDatumName",major_axis,flattening(or eccentricity)[,prime_meridian] where The * before "YourDatumName" indicates this is a non-EPSG name. major_axis is in metres. flattening less than 0 is interpreted as eccentricity (0 indicates a sphere). prime_meridian is optional, specified in degrees of longitude relative to Greenwich. The "Projection" can contain a projection system defined in the "transform.csv" file, or the name of a projection type followed by projection parameters. Geographic coordinates systems (long/lat only) must leave "projection" blank. Projection names not defined in "transform.csv" can be defined in the "projection" string as follows: :: method,length_units,P1,P2,... where: method is a method from the table "transform_parameters.csv". length_units is a "Unit_length" from units.csv. P1 through P8 (or fewer) are the projection parameters for the method as defined in "transform_parameters.csv", and in the order defined. Parameters that are blank in "transform_parameters.csv" are omitted from the list so that each method will have a minimum list of parameters. Angular parameters must always be degrees, and may be defined a decimal degree fromat, or "DEG.MM.SS.ssss". Distance parameters (False Northing and False Easting) must be defined in the "length_units" (string 4). Examples: :: Geographic long,lat on datum "Arc 1960": "4210","","","","" "Arc 1960","","","","" "","Arc 1960","","","" Projected Coordinate System, UTM zone 37S "21037","","","","" "","4210","16137","","" ""Arc 1960 / UTM zone 37S"","","","","" "",""Arc 1960"","UTM zone 37S","","" "",""Arc 1960"","UTM zone 37S","m","" "",""Arc 1960"","UTM zone 37S","m,1.0","" "",""Arc 1960"","UTM zone 37S","m,1.0",""); "",""Arc 1960"","UTM zone 37S","m","Arc 1960 to WGS 84 (1)" Locally oriented coordinate system (origin at 525000,2500000, rotated 15 deg): "21037<525000,2500000,0,0,0,15>","","","","" "<525000,2500000,0,0,0,15>","4210","16137","","" ""Arc 1960 / UTM zone 37S"<525000,2500000,0,0,0,15>","","","","" """ self._set_gxf(str1.encode(), str2.encode(), str3.encode(), str4.encode(), str5.encode()) def set_method_parm(self, parm, parm_value): """ Set projection method parameter :param parm: :ref:`IPJ_CSP` :param parm_value: Parameter value :type parm: int :type parm_value: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If parameter is not valid, nothing happens. """ self._set_method_parm(parm, parm_value) def set_mi_coord_sys(self, coord, units): """ Set coordinate system from a MapInfo coordsys command :param coord: MapInfo Coordinate System :param units: MapInfo Units :type coord: str :type units: str .. versionadded:: 5.1.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_mi_coord_sys(coord.encode(), units.encode()) def set_normal_section_view(self, x, y, z, azimuth, swing): """ Set normal section orientation parameters :param x: X location of view origin :param y: Y location of view origin :param z: Z location of view origin :param azimuth: Section azimuth - degrees CCW from north :param swing: Section swing -90 < swing < 90. :type x: float :type y: float :type z: float :type azimuth: float :type swing: float .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This section is the type where values are projected normal to the section, and the "Y" values in a grid do not necessarily correspond to the elvations for a swung section. """ self._set_normal_section_view(x, y, z, azimuth, swing) def set_plan_view(self, x, y, z, rot): """ Set plan orientation parameters. :param x: X location of view rotation point :param y: Y location of view rotation point :param z: Z location of view plane in 3D :param rot: Rotation CCW from normal XY coords about the rotation point :type x: float :type y: float :type z: float :type rot: float .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This sets up the orientation of an `GXIPJ <geosoft.gxapi.GXIPJ>` for plan view plots, for instance in drill hole section map (top plan view). These differ from regular plan map views in that the elevation of the view plane is set, and the view may be rotated. In addition, when viewed in a map, a view with this `GXIPJ <geosoft.gxapi.GXIPJ>` will give a status bar location (X, Y, Z) of the actual location in space, as opposed to just the X, Y of the view plane itself. When a rotation azimuth angle (CW) is specified, the input (X, Y) location is the point about which the coordinate system is rotated. For instance, you can rotate a grid about its corner (XC, YC) by giving it a Plan View orientation (XC, YC, ZElevation, Rot). If there is no rotation, the input X, Y locations are irrelevent, but they will show up in the output string for the orientation, so it makes sense to default them to (0, 0). """ self._set_plan_view(x, y, z, rot) def set_section_view(self, x, y, z, azimuth, swing): """ Set section orientation parameters :param x: X location of view origin :param y: Y location of view origin :param z: Z location of view origin :param azimuth: Section azimuth - degrees CCW from north :param swing: Section swing -90 < swing < 90. :type x: float :type y: float :type z: float :type azimuth: float :type swing: float .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This sets up the orientation of an `GXIPJ <geosoft.gxapi.GXIPJ>` for section view plots, for instance in Wholeplot. In addition, when viewed in a map, a view with this `GXIPJ <geosoft.gxapi.GXIPJ>` will give a status bar location (X, Y, Z) of the actual location in space, as opposed to just the X, Y of the view plane itself. Swung sections are tricky because they are set up for section plots in such a way that the vertical axis remains "true"; points are projected horizontally to the viewing plane, independent of the swing angle. In other words, all locations in 3D space viewed using this projection will plot on the same horizontal line in the map view. This function is NOT suitable for simply creating an orientation for a dipping grid or view. """ self._set_section_view(x, y, z, azimuth, swing) def set_wms_coord_sys(self, coord, min_x, min_y, max_x, max_y): """ Set coordinate system from a WMS coordsys string. :param coord: WMS style coordinate string :param min_x: Minimum X bounding box :param min_y: Minimum Y :param max_x: Maximum X :param max_y: Maximum Y :type coord: str :type min_x: float :type min_y: float :type max_x: float :type max_y: float .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** WMS coordinate strings supported: EPSG:code where "code" is the EPSG code number "EPSG:4326" is geographic "WGS 84" (see datum.csv) "EPSG:25834" is projected "ETRS89 / UTM zone 34N" (see ipj_pcs.csv) The bounding box for EPSG systems must be defined in the EPSG coordinate system. If a bounding box is provided, it will not be changed. AUTO:wm_id,epsg_units,lon,lat (see OGC documentation) for "AUTO" coordinates, the "epsg_units" is the units of the bounding box. This procedure will transform the supplied bounding box from these units to the units of the projection. Normally, this is from long/lat (9102) to metres (9001). """ self._set_wms_coord_sys(coord.encode(), min_x, min_y, max_x, max_y) def set_xml(self, str_val): """ Set an `GXIPJ <geosoft.gxapi.GXIPJ>` from a Geosoft Metadata XML string :param str_val: XML string to set :type str_val: str .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_xml(str_val.encode()) def set_from_binary_as_string(self, str_val): """ Set `GXIPJ <geosoft.gxapi.GXIPJ>` from binary-as-string :param str_val: Binary as string :type str_val: str .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_from_binary_as_string(str_val.encode()) def get_from_binary_as_string(self, str_val): """ Get `GXIPJ <geosoft.gxapi.GXIPJ>` from binary-as-string :param str_val: Binary as string returned :type str_val: str_ref .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ str_val.value = self._get_from_binary_as_string(str_val.value.encode()) def get_3d_matrix_orientation(self, v00, v01, v02, v03, v10, v11, v12, v13, v20, v21, v22, v23, v30, v31, v32, v33): """ Gets the coefficients of a 3D matrix orientation. :param v00: Row 0 Element 0 :param v01: Row 0 Element 1 :param v02: Row 0 Element 2 :param v03: Row 0 Element 3 :param v10: Row 1 Element 0 :param v11: Row 1 Element 1 :param v12: Row 1 Element 2 :param v13: Row 1 Element 3 :param v20: Row 2 Element 0 :param v21: Row 2 Element 1 :param v22: Row 2 Element 2 :param v23: Row 2 Element 3 :param v30: Row 3 Element 0 :param v31: Row 3 Element 1 :param v32: Row 3 Element 2 :param v33: Row 3 Element 3 :type v00: float_ref :type v01: float_ref :type v02: float_ref :type v03: float_ref :type v10: float_ref :type v11: float_ref :type v12: float_ref :type v13: float_ref :type v20: float_ref :type v21: float_ref :type v22: float_ref :type v23: float_ref :type v30: float_ref :type v31: float_ref :type v32: float_ref :type v33: float_ref .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ v00.value, v01.value, v02.value, v03.value, v10.value, v11.value, v12.value, v13.value, v20.value, v21.value, v22.value, v23.value, v30.value, v31.value, v32.value, v33.value = self._get_3d_matrix_orientation(v00.value, v01.value, v02.value, v03.value, v10.value, v11.value, v12.value, v13.value, v20.value, v21.value, v22.value, v23.value, v30.value, v31.value, v32.value, v33.value) def set_3d_matrix_orientation(self, v00, v01, v02, v03, v10, v11, v12, v13, v20, v21, v22, v23, v30, v31, v32, v33): """ Apply a 3D orientation directly using matrix coefficients. :param v00: Row 0 Element 0 :param v01: Row 0 Element 1 :param v02: Row 0 Element 2 :param v03: Row 0 Element 3 :param v10: Row 1 Element 0 :param v11: Row 1 Element 1 :param v12: Row 1 Element 2 :param v13: Row 1 Element 3 :param v20: Row 2 Element 0 :param v21: Row 2 Element 1 :param v22: Row 2 Element 2 :param v23: Row 2 Element 3 :param v30: Row 3 Element 0 :param v31: Row 3 Element 1 :param v32: Row 3 Element 2 :param v33: Row 3 Element 3 :type v00: float :type v01: float :type v02: float :type v03: float :type v10: float :type v11: float :type v12: float :type v13: float :type v20: float :type v21: float :type v22: float :type v23: float :type v30: float :type v31: float :type v32: float :type v33: float .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_3d_matrix_orientation(v00, v01, v02, v03, v10, v11, v12, v13, v20, v21, v22, v23, v30, v31, v32, v33) def reproject_section_grid(self, output_ipj, x0, y0, dx, dy, rot): """ Reproject a section grid :param output_ipj: Reprojected `GXIPJ <geosoft.gxapi.GXIPJ>` on input (need not include an orientation). On output contains the same type of orientation as the initial `GXIPJ <geosoft.gxapi.GXIPJ>`, adjusted to be in the same location. :param x0: X origin of grid (input initial value, output new value) :param y0: Y origin of grid (input initial value, output new value) :param dx: X cell size of grid (input initial value, output new value) :param dy: Y cell size of grid (input initial value, output new value) :param rot: Grid rotation (degrees CCW) (input initial value, output new value) :type output_ipj: GXIPJ :type x0: float_ref :type y0: float_ref :type dx: float_ref :type dy: float_ref :type rot: float_ref .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Reproject a section grid to a new `GXIPJ <geosoft.gxapi.GXIPJ>`, adjusting its orientation and registration so that it remains in the same location. """ x0.value, y0.value, dx.value, dy.value, rot.value = self._reproject_section_grid(output_ipj, x0.value, y0.value, dx.value, dy.value, rot.value) def get_authority_id(self, authority): """ Get Authority ID (e.g. EPSG, ESRI) for coordinate system or `iDUMMY <geosoft.gxapi.iDUMMY>` if unknown. :param authority: Authority ID (e.g. EPSG and ESRI) :type authority: str_ref :returns: :ref:`IPJ_ORIENT` :rtype: int .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, authority.value = self._get_authority_id(authority.value.encode()) return ret_val def get_epsgid_for_datum(self): """ Get EPSG ID for datum of coordinate system or `iDUMMY <geosoft.gxapi.iDUMMY>` if unknown. :returns: :ref:`IPJ_ORIENT` :rtype: int .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_epsgid_for_datum() return ret_val def add_as_favourite_coordinate_system(self): """ Add as favourite coordinate system to Settings. .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._add_as_favourite_coordinate_system() @classmethod def get_number_of_favourite_coordinate_systems(cls): """ Get number of favourite coordinate systems in Settings. :rtype: int .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapIPJ._get_number_of_favourite_coordinate_systems(GXContext._get_tls_geo()) return ret_val @classmethod def get_favourite_coordinate_system(cls, index): """ Get a favourite coordinate system from Settings. :param index: Index of item. :type index: int :rtype: GXIPJ .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapIPJ._get_favourite_coordinate_system(GXContext._get_tls_geo(), index) return GXIPJ(ret_val) @classmethod def remove_favourite_coordinate_system(cls, index): """ Remove favourite coordinate system from Settings. :param index: Index of item. :type index: int .. versionadded:: 9.10 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapIPJ._remove_favourite_coordinate_system(GXContext._get_tls_geo(), index) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXARCDH.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXARCDH(gxapi_cy.WrapARCDH): """ GXARCDH class. This library is not a class. It contains various utilities used in the Target extension for ArcGIS. """ def __init__(self, handle=0): super(GXARCDH, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXARCDH <geosoft.gxapi.GXARCDH>` :returns: A null `GXARCDH <geosoft.gxapi.GXARCDH>` :rtype: GXARCDH """ return GXARCDH() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def close_project(cls): """ Closes the current `GXDH <geosoft.gxapi.GXDH>` project in the Target extension .. versionadded:: 8.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapARCDH._close_project(GXContext._get_tls_geo()) @classmethod def set_project(cls, path, project): """ Sets the current `GXDH <geosoft.gxapi.GXDH>` project in the Target extension :param path: Path String :param project: Project Name :type path: str :type project: str .. versionadded:: 8.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapARCDH._set_project(GXContext._get_tls_geo(), path.encode(), project.encode()) @classmethod def set_string_file_gdb(cls, string_file_gdb): """ Sets the current Geostring File Geodatabase in the Target extension :param string_file_gdb: File Geodatabase :type string_file_gdb: str .. versionadded:: 8.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapARCDH._set_string_file_gdb(GXContext._get_tls_geo(), string_file_gdb.encode()) @classmethod def stop_editing_string_file_gdb(cls): """ Stops editing session for current string fGDB .. versionadded:: 8.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapARCDH._stop_editing_string_file_gdb(GXContext._get_tls_geo()) @classmethod def has_string_file_gdb_edits(cls): """ Is a Geostring File Geodatabase loaded and contains edits? :rtype: int .. versionadded:: 8.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapARCDH._has_string_file_gdb_edits(GXContext._get_tls_geo()) return ret_val @classmethod def geostrings_extension_available(cls): """ Verifies if the geostrings extension in TfA is available. Return 1 if true, 0 otherwise :rtype: int .. versionadded:: 8.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapARCDH._geostrings_extension_available(GXContext._get_tls_geo()) return ret_val @classmethod def get_current_string_file_gdb(cls, name): """ Gets the current Geostring File Geodatabase. :param name: Name returned :type name: str_ref .. versionadded:: 8.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ name.value = gxapi_cy.WrapARCDH._get_current_string_file_gdb(GXContext._get_tls_geo(), name.value.encode()) @classmethod def is_valid_fgdb_file_name(cls, fgdb): """ Is this a valid FGDB filename? :param fgdb: FGDB filename :type fgdb: str :rtype: int .. versionadded:: 8.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapARCDH._is_valid_fgdb_file_name(GXContext._get_tls_geo(), fgdb.encode()) return ret_val @classmethod def is_valid_feature_class_name(cls, feature_class_name): """ Is this a valid featureclass name? :param feature_class_name: Featureclass name :type feature_class_name: str :rtype: int .. versionadded:: 8.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapARCDH._is_valid_feature_class_name(GXContext._get_tls_geo(), feature_class_name.encode()) return ret_val @classmethod def s_prompt_for_esri_symbol(cls, hwnd, h_wnd, input_xml_string, xml, fill_color, edge_color): """ Prompt the user to select an ESRI symbol and return it as an XML string. The output string will be empty if the user cancels the dialog. :param hwnd: Window handle :param h_wnd: Initial symbol that you want displayed when the dialog is launched (use "" if none) :param input_xml_string: (This parameter is ignored if an initial symbol was specified) Initial symbol type that you want displayed when the dialog is launched (0 for Fill, 1 for Line) :param xml: Returned XML string representing the symbol that was chosen by the user :param fill_color: RGBA representation of fill color to be set :param edge_color: RGBA representation of edge color to be set :type hwnd: int :type h_wnd: str :type input_xml_string: int :type xml: str_ref :type fill_color: int_ref :type edge_color: int_ref .. versionadded:: 8.2 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ xml.value, fill_color.value, edge_color.value = gxapi_cy.WrapARCDH._s_prompt_for_esri_symbol(GXContext._get_tls_geo(), hwnd, h_wnd.encode(), input_xml_string, xml.value.encode(), fill_color.value, edge_color.value) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/vv.py """ Geosoft vector. :Classes: =============== ========================= :class:`GXvv` data vector =============== ========================= VA and VV classes are related based on a key called a *fiducial*, which has a start value and increment between values. The :meth:`refid` method can be used to resample vector data to the same fiducial so that vector-to-vector operations can be performed. .. seealso:: :mod:`geosoft.gxpy.va`, :mod:`geosoft.gxapi.GXVV`, :mod:`geosoft.gxapi.GXVA` .. note:: Regression tests provide usage examples: `vv tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_vv.py>`_ """ from collections.abc import Sequence import geosoft import numpy as np import geosoft.gxapi as gxapi from . import utility as gxu __version__ = geosoft.__version__ MAX_VV_BYTES = 4096 #: maximum bytes per element in VV def _t(s): return geosoft.gxpy.system.translate(s) class VVException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.vv`. .. versionadded:: 9.1 """ pass def np_from_vvset(vvset, axis=1): """ Return a 2d numpy array from a set of `GXvv` instances. :param vvset: (vv1, vv2, ...) set ot `geosoft.gxpy.vv.GXvv` instances :param axis: axis for the vv, default is 1, such that each vv is a column, `axis=0` for vv to be rows :return: numpy array shaped (max_vv_length, number_of_vv) for `axis=1`, or (number_of_vv, max_vv_length) for `axis=0`. .. versionadded:: 9.3.1 """ nvv = len(vvset) length = 0 for vv in vvset: if len(vv) > length: length = len(vv) npd = np.empty((length, nvv), dtype=vvset[0].dtype) for i in range(nvv): npd[:, i] = vvset[i].np if axis == 1: return npd else: return npd.T def vvset_from_np(npd, axis=1): """ Return a set of `GXvv` instances from a 2d numpy array. :param npd: numpy data array of dimension 2. If the array has higher dimensions it will first be reshaped to a 2-dimension array based on the last axis. :param axis: axis for the vv, default is 1, such that each columns become vv, `axis=0` for rows to be vv :return: [vv0, vv1, vv2, ...] `geosoft.gxpy.vv.GXvv` instances for each column or row (`axis=0`) For example: npd = np.array([[1, 2, 3], [4, 5, 6]]) returns (vv([1, 4]), vv([2, 5]), vv([3, 6])) .. versionadded:: 9.3.1 """ if npd.ndim == 1: vv = [GXvv(npd)] else: if npd.ndim > 2: npd = npd.reshape((-1, npd.shape[-1])) if axis == 0: npd = npd.T vv = [] for i in range(npd.shape[1]): vv.append(GXvv(npd[:, i])) return tuple(vv) class GXvv(Sequence): """ VV class wrapper. :param array: array-like, None to create an empty VV. Can have 2 dimensions for float32 or float64 data, in which case the second dimension can be 2 or 3 to use Geosoft 2D and 3D dimensioned types. This can also be another `GXvv` instance, in which case a copy of the data is made and the dtype, dim, fid an unit_of_measurement will default to the source instance. :param dtype: numpy data type. For unicode strings 'U#', where # is a string length. If not specified the type is taken from first element in array, of if no array the default is 'float'. :param dim: dimension can be 1 (default), 2 (2D) or 3 (3D). Ignored if array is defined as the array dimensions will be used. :param fid: (start, increment) fiducial :param unit_of_measure: unit of measure for the contained data. :param len: length of VV :Properties: ``vv`` :class:`geosoft.gxapi.GXvv` instance ``fid`` (start, increment) fiducial ``length`` number of elements in the VV ``gxtype`` GX data type ``dtype`` numpy data type ``dim`` dimension .. versionadded:: 9.1 .. versionchanged:: 9.2 support construction directly from arrays .. versionchanged:: 9.3 added unit_of_measure .. versionchanged:: 9.3.1 added string support in __getitem__, and creates from a source `GVvv` instance. .. versionchanged:: 9.6 Added length parameter. """ def __enter__(self): return self def __exit__(self, _type, _value, _traceback): self.__del__() def __del__(self): if hasattr(self, '_gxvv'): self._gxvv = None def __eq__(self, other): return np.array_equal(self.np, other.np) \ and self.fid == other.fid \ and self.dim == other.dim \ and self.unit_of_measure == other.unit_of_measure def __init__(self, array=None, dtype=None, fid=None, unit_of_measure=None, dim=None, len=0): if array is not None: if isinstance(array, GXvv): if fid is None: fid = array.fid if unit_of_measure is None: unit_of_measure = array.unit_of_measure if dtype is None: dtype = array.dtype if dim is None: dim = array.dim array = array.np else: if not isinstance(array, np.ndarray): array = np.array(array) if array.ndim == 2: dim = array.shape[1] else: dim = 1 if dtype is None: dtype = array.dtype if fid is None: fid = (0.0, 1.0) if unit_of_measure is None: unit_of_measure = '' if dim is None: dim = 1 elif dim not in (1, 2, 3): raise VVException(_t('dimension (array, or dim=) must be 1, 2 or 3')) self._dim = dim if dtype is None: dtype = np.float64 self._gxtype = max(-MAX_VV_BYTES, gxu.gx_dtype(dtype)) # Since we are using UTF-8 internally characters can take anywhere between 1 and 4 bytes. # The gx_dtype method and the gxapi wrappers accounts for that by multiplying the dtype number accordingly. # Specifying a numpy dtype to instantiate VV will ensure the internal space is enough to allocate up to # that 4 times the Unicode characters, however any Numpy arrays should still used the passed dtype as specified if dtype is not None and isinstance(dtype, np.dtype) and dtype.type is np.str_: self._dtype = dtype elif type(dtype) is str: self._dtype = np.dtype(dtype) else: self._dtype = gxu.dtype_gx(self._gxtype) self._is_float = self._is_int = self._is_string = False if gxu.is_float(self._gxtype): self._is_float = True elif gxu.is_int(self._gxtype): self._is_int = True else: self._is_string = True if not self._is_float and self._dim != 1: raise VVException(_t('2 or 3 dimensioned data must be float32 or float64')) if self._dim != 1: self._gxvv = gxapi.GXVV.create_ext(gxu.gx_dtype_dimension(self._dtype, self._dim), len) else: self._gxvv = gxapi.GXVV.create_ext(self._gxtype, len) self.fid = fid self._next = 0 self._unit_of_measure = unit_of_measure if array is not None: self.set_data(array, fid) def __len__(self): return self._gxvv.length() def __iter__(self): return self def __next__(self): if self._next >= self.length: self._next = 0 raise StopIteration else: i = self._next self._next += 1 return self.np[i], self.fid[0] + self.fid[1] * i def __getitem__(self, item): start, incr = self.fid if self._is_float: v = float(self.np[item]) elif self._is_int: v = int(self.np[item]) else: v = str(self.np[item]) return v, start + incr * item def _set_data_np(self, npd, start=0): """set to data in a numpy array""" if not npd.flags['C_CONTIGUOUS']: npd = np.ascontiguousarray(npd) self.gxvv.set_data(start, npd.shape[0], npd.data.tobytes(), gxu.gx_dtype_dimension(npd.dtype, self._dim)) def _get_data_np(self, start=0, n=None, dtype=None): """return data in a numpy array""" if n is None: n = self.length - start if self._dim == 1: sh = (n,) else: sh = (n, self._dim) bytearr = np.empty(sh, dtype=dtype).tobytes() self.gxvv.get_data(start, n, bytearr, gxu.gx_dtype_dimension(dtype, self._dim)) npd = np.frombuffer(bytearr, dtype=dtype).reshape(sh) return np.array(npd) @property def unit_of_measure(self): """ data unit of measurement""" return self._unit_of_measure @unit_of_measure.setter def unit_of_measure(self, uom): self._unit_of_measure = str(uom) @property def gxvv(self): """:class:`geosoft.gxapi.GXVV` instance""" return self._gxvv @property def fid(self): """ fid tuple (start,increment), can be set .. versionadded:: 9.1 """ return self._gxvv.get_fid_start(), self._gxvv.get_fid_incr() @fid.setter def fid(self, fid): self._gxvv.set_fid_start(fid[0]) self._gxvv.set_fid_incr(fid[1]) @property def length(self): """ number of elements in the VV, can be set .. versionadded:: 9.1 .. versionchanged:: 9.3 can be set """ return self.__len__() @length.setter def length(self, length): self.refid(self.fid, length) @property def gxtype(self): """ GX data type .. versionadded:: 9.1 """ return self._gxtype @property def dtype(self): """ numpy data type .. versionadded:: 9.1 """ return self._dtype @property def is_float(self): """ True if a base float type, 32 or 64-bit""" return self._is_float @property def is_float64(self): """ True if a base 64-bit float .. versionadded:: 9.3.1 """ if self.dtype == np.float64: return True return False @property def is_int(self): """ True if a base integer type""" return self._is_int @property def is_string(self): """ True if a base string type""" return self._is_string @property def dim(self): """Dimension of elements in the array, 1, 2 or 3.""" return self._dim @property def np(self): """ Numpy array of VV data, in the data type of the VV. Use :meth:`get_data` to get a numpy array in another dtype. Note that changing the data in the numpy array does NOT change the data in the VV. Use `set_data` to change data in the VV. .. versionadded:: 9.2 """ return self.get_data()[0] def get_data(self, dtype=None, start=0, n=None, float_dummies_to_nan=True): """ Return vv data in a numpy array :param start: index of first value, must be >=0 :param n: number of values wanted :param dtype: numpy data type wanted :returns: (data, (fid_start, fid_incr)) .. versionadded:: 9.1 """ if dtype is None: dtype = self._dtype else: dtype = np.dtype(dtype) if n is None: n = self.length - start else: n = min((self.length - start), n) if (n < 0) or (start < 0) or ((start >= self.length) and self.length > 0): raise VVException(_t('Cannot get (start,n) ({},{}) from vv of length {}').format(start, n, self.length)) if (n == 0) or (self.length == 0): npd = np.array([], dtype=dtype) else: # strings wanted if dtype.type is np.str_: sr = gxapi.str_ref() npd = np.empty((n,), dtype=dtype) for i in range(start, start + n): self._gxvv.get_string(i, sr) npd[i - start] = sr.value # numeric wanted else: # strings to numeric if self._gxtype < 0: if np.issubclass_(dtype.type, np.integer): vvd = gxapi.GXVV.create_ext(gxapi.GS_LONG, n) else: vvd = gxapi.GXVV.create_ext(gxapi.GS_DOUBLE, n) vvd.copy(self._gxvv) # this will do the conversion npd = vvd.get_data_np(start, n, dtype) # numeric to numeric else: npd = self._get_data_np(start, n, dtype) if float_dummies_to_nan: if npd.dtype == np.float32 or npd.dtype == np.float64: npd[npd == gxu.gx_dummy(npd.dtype)] = np.nan fid = self.fid start = fid[0] + start * fid[1] return npd, (start, fid[1]) def set_data(self, data, fid=None): """ Set vv data from an iterable, which can be another `GXvv` instance. If the data is float type numpy.nan \ are used to indicate dummy values. :param data: data array of `GXvv` instance, will be reshapped to VV dimension :param fid: fid tuple (start,increment), default does not change current fid .. versionadded:: 9.1 .. versionchanged:: 9.3 default fid leaves fid unchanged .. versionchanged:: 9.3.1 now accepts `GXvv` instance as the source data. """ if isinstance(data, GXvv): data = data.np elif not isinstance(data, np.ndarray): data = np.array(data) if data.size == 0: self.length = 0 if fid: self.fid = fid return if self.dim == 1: data = data.flatten() else: data = data.reshape((-1, self.dim)) if data.size > gxapi.iMAX: raise VVException(_t('data length {}, max allowed is {})').format(np.size(data), gxapi.iMAX)) # numerical data if self._gxtype >= 0: # strings if gxu.gx_dtype(data.dtype) < 0: i = 0 for s in data: self._gxvv.set_double(i, gxu.rdecode(s)) i += 1 else: if data.dtype == np.float32 or data.dtype == np.float64: if np.isnan(data).any(): data = data.copy() data[np.isnan(data)] = gxu.gx_dummy(data.dtype) self._set_data_np(data) # strings else: i = 0 for d in data: self._gxvv.set_string(i, str(d)) i += 1 self._gxvv.set_len(data.shape[0]) if fid: self.fid = fid def refid(self, fid, length=None): """ Resample VV to a new fiducial and length :param fid: (start, incr) :param length: length, if not specified the length is calculated to the end of the data. .. versionadded:: 9.1 .. versionchanged:: 9.3.1 added default length calculation """ if fid[1] <= 0.: raise VVException(_t('fid increment must be greater than 0.')) if length is None: end_fid = self.fid[0] + self.fid[1] * (self.length - 1) length = (((end_fid - fid[0]) + fid[1] * 0.5) // fid[1]) + 1 if length < 0: length = 0 self._gxvv.re_fid(fid[0], fid[1], int(length)) self.fid = fid def list(self): """ Return the content of the VV as a list. .. versionadded:: 9.2 """ return [v[0] for v in self] def fill(self, value): """ Fill a vv with a constant value. :param value: value to fill .. versionadded:: 9.3.1 """ if self.is_float: self.gxvv.fill_double(float(value)) if self.is_int: self.gxvv.fill_int(int(value)) else: self.gxvv.fill_string(str(value)) def min_max(self): """ Return the minimum and maximum values as doubles. Strings are converted if possible. :return: (minimum, maximum), or if all dummy, (None, None) .. versionadded:: 9.3.1 """ rmin = gxapi.float_ref() rmax = gxapi.float_ref() self._gxvv.range_double(rmin, rmax) if rmin.value == gxapi.rDUMMY: return (None, None) return rmin.value, rmax.value <file_sep>/docs/GXFFT.rst .. _GXFFT: GXFFT class ================================== .. autoclass:: geosoft.gxapi.GXFFT :members: .. _FFT_DETREND: FFT_DETREND constants ----------------------------------------------------------------------- Detrending option .. autodata:: geosoft.gxapi.FFT_DETREND_NONE :annotation: .. autoattribute:: geosoft.gxapi.FFT_DETREND_NONE .. autodata:: geosoft.gxapi.FFT_DETREND_ENDS :annotation: .. autoattribute:: geosoft.gxapi.FFT_DETREND_ENDS .. autodata:: geosoft.gxapi.FFT_DETREND_ALL :annotation: .. autoattribute:: geosoft.gxapi.FFT_DETREND_ALL .. autodata:: geosoft.gxapi.FFT_DETREND_MEAN :annotation: .. autoattribute:: geosoft.gxapi.FFT_DETREND_MEAN .. _FFT_WHITE_NOISE: FFT_WHITE_NOISE constants ----------------------------------------------------------------------- Add white noise option .. autodata:: geosoft.gxapi.FFT_WHITE_NOISE_ADD :annotation: .. autoattribute:: geosoft.gxapi.FFT_WHITE_NOISE_ADD .. autodata:: geosoft.gxapi.FFT_WHITE_NOISE_SET :annotation: .. autoattribute:: geosoft.gxapi.FFT_WHITE_NOISE_SET <file_sep>/geosoft/gxapi/GXVV.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block from . import gxapi_cy_extend import numpy as np ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXVV(gxapi_cy.WrapVV): """ GXVV class. The `GXVV <geosoft.gxapi.GXVV>` class stores very long vector (array) data (such as channel data from an OASIS database) in memory and performs specific actions on the data. This set of functions is similar to the `GXVM <geosoft.gxapi.GXVM>` functions except that you cannot access data directly and therefore you cannot use a `GXVV <geosoft.gxapi.GXVV>` to pass data to an external (non-Geosoft) Dynamic Link Library (DLL) object function. If you want to pass data to a DLL, you must move a subset of the data stored in memory to a small vector object and then use the `GXGEO.get_ptr_vm <geosoft.gxapi.GXGEO.get_ptr_vm>` function to pass a pointer to the data on to the external function. See `GXVVU <geosoft.gxapi.GXVVU>` for more utility methods. """ def __init__(self, handle=0): super(GXVV, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXVV <geosoft.gxapi.GXVV>` :returns: A null `GXVV <geosoft.gxapi.GXVV>` :rtype: GXVV """ return GXVV() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def get_data(self, start, elements, data, gs_type): """ Copy data from user memory to a `GXVV <geosoft.gxapi.GXVV>` :param start: Start Location :param elements: Number of Elements :param data: Data buffer copy data into from `GXVV <geosoft.gxapi.GXVV>` :param gs_type: :ref:`GS_TYPES` :type start: int :type elements: int :type data: bytearray :type gs_type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_data(start, elements, data, gs_type) def set_data(self, start, elements, data, gs_type): """ Copy data from user memory to a `GXVV <geosoft.gxapi.GXVV>` :param start: Start Location :param elements: Number of Elements :param data: Data buffer to copy into into `GXVV <geosoft.gxapi.GXVV>` :param gs_type: :ref:`GS_TYPES` :type start: int :type elements: int :type data: bytearray :type gs_type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_data(start, elements, data, gs_type) def copy(self, vv_s): """ Copy one `GXVV <geosoft.gxapi.GXVV>` to another. :param vv_s: Source :type vv_s: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._copy(vv_s) def copy2(self, dest, vv_s, source, n): """ Copy part of a vector into part of another vector. :param dest: Destination start element :param vv_s: Source `GXVV <geosoft.gxapi.GXVV>` (can be the same as Destination) :param source: Source start element :param n: Number of points :type dest: int :type vv_s: GXVV :type source: int :type n: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** 1. Unlike Copy_VV destination `GXVV <geosoft.gxapi.GXVV>` is not reallocated, nor is the length changed. The caller must make any desired changes. 2. All `GXVV <geosoft.gxapi.GXVV>` types are supported and will be converted using Convert_GS if necessary. """ self._copy2(dest, vv_s, source, n) def log(self, log_base, log_mode, log_min): """ Apply log to the vv. :param log_base: :ref:`VV_LOG_BASE` :param log_mode: :ref:`VV_LOGMODE` :param log_min: Minimum value for :ref:`VV_LOG_NEGATIVE` :type log_base: int :type log_mode: int :type log_min: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Minimum value will be defaulted to 1.0 if it is 0.0 or less than 0.0 """ self._log(log_base, log_mode, log_min) def log_linear(self, log_min): """ Take the log10 or original value of a `GXVV <geosoft.gxapi.GXVV>`. :param log_min: Minimum value :type log_min: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the data is in the range +/- minimum value, it is left alone. Otherwise, the result is calculated as :: d = dMin * (log10(fabs(d)/dMin)+1.0) Sign is reapplied to d. Minimum value will be defaulted to 1.0 if it is negative or 0. """ self._log_linear(log_min) def mask(self, vv_m): """ Mask one `GXVV <geosoft.gxapi.GXVV>` against another. :param vv_m: Mask `GXVV <geosoft.gxapi.GXVV>` :type vv_m: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** All elements in the mask `GXVV <geosoft.gxapi.GXVV>` that are dummies will replace the value in the original `GXVV <geosoft.gxapi.GXVV>` with a dummy. The modified `GXVV <geosoft.gxapi.GXVV>` will always be the same length as the mask `GXVV <geosoft.gxapi.GXVV>` after this call. If the mask is longer than the target, the target will be lengthenned with dummies. """ self._mask(vv_m) def reverse(self): """ Reverses the order of the data in a `GXVV <geosoft.gxapi.GXVV>`. .. versionadded:: 5.1.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._reverse() def serial(self, bf): """ Serialize :type bf: GXBF .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._serial(bf) def trans(self, base, mult): """ Translate (`GXVV <geosoft.gxapi.GXVV>` + base ) * mult :param base: Base value :param mult: Mult value :type base: float :type mult: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** All `GXVV <geosoft.gxapi.GXVV>` types now supported. """ self._trans(base, mult) def abs(self): """ Take the absolute value of values in a `GXVV <geosoft.gxapi.GXVV>`. .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._abs() def add(self, vv_b, vv_c): """ Add two VVs: VV_A + VV_B = VV_C :param vv_b: `GXVV <geosoft.gxapi.GXVV>` B :param vv_c: `GXVV <geosoft.gxapi.GXVV>` C (returned), C = A + B :type vv_b: GXVV :type vv_c: GXVV .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._add(vv_b, vv_c) def add2(self, f1, vv_b, f2, vv_c): """ Add two VVs with linear factors: VV_A*f1 + VV_B*f2 = VV_C :param f1: Multiplier f1 for A :param vv_b: `GXVV <geosoft.gxapi.GXVV>` B :param f2: Multiplier f2 for B :param vv_c: `GXVV <geosoft.gxapi.GXVV>` C (returned), C = A*f1 + B*f2 :type f1: float :type vv_b: GXVV :type f2: float :type vv_c: GXVV .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The multipliers must be defined and within the `GS_R8MN <geosoft.gxapi.GS_R8MN>` `GS_R8MX <geosoft.gxapi.GS_R8MX>` range. """ self._add2(f1, vv_b, f2, vv_c) def append(self, vv_a): """ Appends `GXVV <geosoft.gxapi.GXVV>`'s :param vv_a: `GXVV <geosoft.gxapi.GXVV>` to append :type vv_a: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._append(vv_a) def crc(self, pul_crc): """ Compute the CRC value of a `GXVV <geosoft.gxapi.GXVV>`. :param pul_crc: Previous CRC `CRC_INIT_VALUE <geosoft.gxapi.CRC_INIT_VALUE>` :type pul_crc: int :returns: CRC Value :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._crc(pul_crc) return ret_val def crc_inexact(self, pul_crc, float_bits, double_bits): """ Compute the CRC value of a `GXVV <geosoft.gxapi.GXVV>` and allows you to specify number of bits of floats/doubles to drop so that the CRC will be same even of this are changed. :param pul_crc: Previous CRC `CRC_INIT_VALUE <geosoft.gxapi.CRC_INIT_VALUE>` :param float_bits: :ref:`VV_FLOAT_CRC_BITS` :param double_bits: :ref:`VV_DOUBLE_CRC_BITS` :type pul_crc: int :type float_bits: int :type double_bits: int :returns: CRC Value :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Very useful for testing where the last bits of accuracy are not as important. """ ret_val = self._crc_inexact(pul_crc, float_bits, double_bits) return ret_val @classmethod def create(cls, type, elements): """ Create a `GXVV <geosoft.gxapi.GXVV>`. :param type: :ref:`GEO_VAR` :param elements: Maximum number of elements in the `GXVV <geosoft.gxapi.GXVV>`, >= 0 :type type: int :type elements: int :returns: `GXVV <geosoft.gxapi.GXVV>` Object :rtype: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** To set the fiducial start and increment for the data in the `GXVV <geosoft.gxapi.GXVV>` you need to call `set_fid_start <geosoft.gxapi.GXVV.set_fid_start>` and `set_fid_incr <geosoft.gxapi.GXVV.set_fid_incr>`. If you are basing the `GXVV <geosoft.gxapi.GXVV>` data on fiducial information from a different `GXVV <geosoft.gxapi.GXVV>`, call GetFidStart_VV and GetFidIncr_VV to obtain that `GXVV <geosoft.gxapi.GXVV>`'s fiducial information. Do this prior to setting the new `GXVV <geosoft.gxapi.GXVV>`'s fiducial start and increment. If you do not know the required length for a `GXVV <geosoft.gxapi.GXVV>`, use 0 and the `GXVV <geosoft.gxapi.GXVV>` length will be adjusted as needed. This is a bit less efficient than setting the length when you know it. """ ret_val = gxapi_cy.WrapVV._create(GXContext._get_tls_geo(), type, elements) return GXVV(ret_val) @classmethod def create_ext(cls, type, elements): """ Create a `GXVV <geosoft.gxapi.GXVV>`, using one of the :ref:`GS_TYPES` special data types. :param type: :ref:`GS_TYPES` :param elements: Maximum number of elements in the `GXVV <geosoft.gxapi.GXVV>`, >= 0 :type type: int :type elements: int :returns: `GXVV <geosoft.gxapi.GXVV>` Object :rtype: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See `create <geosoft.gxapi.GXVV.create>` Do not use data type flags: `GS_INT <geosoft.gxapi.GS_INT>` or `GS_REAL <geosoft.gxapi.GS_REAL>`, this will result in a respective data type of unsigned byte or short for the `GXVV <geosoft.gxapi.GXVV>`. """ ret_val = gxapi_cy.WrapVV._create_ext(GXContext._get_tls_geo(), type, elements) return GXVV(ret_val) @classmethod def create_s(cls, bf): """ Create a `GXVV <geosoft.gxapi.GXVV>` from serialized source. :type bf: GXBF :returns: `GXVV <geosoft.gxapi.GXVV>` Object :rtype: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapVV._create_s(GXContext._get_tls_geo(), bf) return GXVV(ret_val) def diff(self, n): """ Calculate differences. :param n: Number of differences :type n: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Differences with dummies result in dummies. An even number of differences locates data accurately. An odd number of differences locates result 1/2 element lower in the `GXVV <geosoft.gxapi.GXVV>`. """ self._diff(n) def divide(self, vv_y, vv_z): """ Divide one `GXVV <geosoft.gxapi.GXVV>` by another: VV_A / VV_B = VV_C :param vv_y: `GXVV <geosoft.gxapi.GXVV>` B :param vv_z: `GXVV <geosoft.gxapi.GXVV>` C (returned), C = A / B :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._divide(vv_y, vv_z) def fid_norm(self, vv2): """ Re-sample a pair of `GXVV <geosoft.gxapi.GXVV>`'s to match each other. :param vv2: `GXVV <geosoft.gxapi.GXVV>` to resample :type vv2: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Both `GXVV <geosoft.gxapi.GXVV>`'s will return with the same start fid and fid increment. The smaller start fid and fid increment will be used. """ self._fid_norm(vv2) def fill_int(self, value): """ Fill a `GXVV <geosoft.gxapi.GXVV>` with an int value. :param value: Value to fill with :type value: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._fill_int(value) def fill_double(self, val): """ Fill a `GXVV <geosoft.gxapi.GXVV>` with a real value. :param val: Value to fill with :type val: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._fill_double(val) def fill_string(self, str_val): """ Fill a `GXVV <geosoft.gxapi.GXVV>` with a string value. :param str_val: String :type str_val: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._fill_string(str_val.encode()) def count_dummies(self, start, elem): """ Count the number of dummies in a `GXVV <geosoft.gxapi.GXVV>` :param start: Starting point in `GXVV <geosoft.gxapi.GXVV>` (0 for all) :param elem: Number of elements to process (-1 for all) :type start: int :type elem: int :returns: The count :rtype: int .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._count_dummies(start, elem) return ret_val def find_dum(self, start, end, yn, order): """ Finds the first dummy or non-dummy value in a `GXVV <geosoft.gxapi.GXVV>` :param start: Lowest element in `GXVV <geosoft.gxapi.GXVV>` element to search :param end: Highest element in `GXVV <geosoft.gxapi.GXVV>` to search :param yn: 0 = find first dummy / 1 = find first non-dummy :param order: 0 = use increasing order / 1 = use decreasing order :type start: int :type end: int :type yn: int :type order: int :returns: The index of the first dummy or non-dummy value. -1 if not found, 0 if the length of the `GXVV <geosoft.gxapi.GXVV>` is 0. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If a decreasing order search is performed, it will start at the highest element specified. (Conversely, an increasing order starts at the lowest element specified.) """ ret_val = self._find_dum(start, end, yn, order) return ret_val def get_fid_expansion(self): """ Gets the Fiducial expansion from a `GXVV <geosoft.gxapi.GXVV>` :returns: Number of expanions for this `GXVV <geosoft.gxapi.GXVV>` (see `re_fid_vv <geosoft.gxapi.GXVV.re_fid_vv>`) :rtype: int .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_fid_expansion() return ret_val def get_int(self, element): """ Get an integer element from a `GXVV <geosoft.gxapi.GXVV>`. :param element: Element wanted :type element: int :returns: Element wanted, or `iDUMMY <geosoft.gxapi.iDUMMY>` if the value is dummy or outside of the range of data. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_int(element) return ret_val def get_string(self, element, str_val): """ Get a string element from a `GXVV <geosoft.gxapi.GXVV>`. :param element: Element wanted :param str_val: String in which to place element :type element: int :type str_val: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Returns Element wanted, or blank string if the value is dummy or outside of the range of data. Type conversions are performed if necessary. Dummy values are converted to "*" string. """ str_val.value = self._get_string(element, str_val.value.encode()) def get_ext_type(self): """ Return the internal data type of this VV :returns: The internal VV data type :ref:`GS_TYPES` :rtype: int .. versionadded:: 9.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_ext_type() return ret_val def index_max(self, max): """ Get the index where the maximum value occurs. :param max: Maximum value (`rDUMMY <geosoft.gxapi.rDUMMY>` if all dummies or no data) :type max: float_ref :returns: Index of the maximum value, `iDUMMY <geosoft.gxapi.iDUMMY>` if no valid data. :rtype: int .. versionadded:: 6.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If more than one value has the same maximum value, the index of the first is returned. """ ret_val, max.value = self._index_max(max.value) return ret_val def length(self): """ Returns current `GXVV <geosoft.gxapi.GXVV>` length. :returns: # of elements in the `GXVV <geosoft.gxapi.GXVV>`. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._length() return ret_val def index_insert(self, vv_d, vv_i): """ Insert items into a `GXVV <geosoft.gxapi.GXVV>` using an index `GXVV <geosoft.gxapi.GXVV>`. :param vv_d: Data items to insert (must be same type as output data `GXVV <geosoft.gxapi.GXVV>`) :param vv_i: Index `GXVV <geosoft.gxapi.GXVV>` (must be type INT) :type vv_d: GXVV :type vv_i: GXVV .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The items in the input data `GXVV <geosoft.gxapi.GXVV>` are inserted into the output `GXVV <geosoft.gxapi.GXVV>` using the indices in the index `GXVV <geosoft.gxapi.GXVV>`. Values not referenced are not altered, so the output `GXVV <geosoft.gxapi.GXVV>` should be pre-initialized. The output `GXVV <geosoft.gxapi.GXVV>` length will NOT be changed, and index values referencing beyond the end of the output `GXVV <geosoft.gxapi.GXVV>` data will return an error. This function is useful when working with channel data that include dummies, but where the dummies must be removed before processing. Create and initialize an index (0, 1, 2...) `GXVV <geosoft.gxapi.GXVV>`, using the `init_index <geosoft.gxapi.GXVV.init_index>` function, and when you remove the dummies, remove the corresponding index values as well. After processing, init a `GXVV <geosoft.gxapi.GXVV>` to dummies, then use `index_insert <geosoft.gxapi.GXVV.index_insert>` to put the processed values at the correct locations in the data `GXVV <geosoft.gxapi.GXVV>` before you write it back to the channel. """ self._index_insert(vv_d, vv_i) def index_order(self, vv_d): """ Reorder a `GXVV <geosoft.gxapi.GXVV>`. :param vv_d: `GXVV <geosoft.gxapi.GXVV>` to order :type vv_d: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Given an index `GXVV <geosoft.gxapi.GXVV>` (of type INT), this method reorders a `GXVV <geosoft.gxapi.GXVV>`. Please make sure that the index holds valid information. """ self._index_order(vv_d) def init_index(self, n): """ Initialize an index `GXVV <geosoft.gxapi.GXVV>` to values 0, 1, 2, etc... :param n: Final length of `GXVV <geosoft.gxapi.GXVV>` (-1 to use current length). :type n: int .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Populates a `GXVV <geosoft.gxapi.GXVV>` with the values 0, 1, 2, 3, 4 etc., to be used for various indexing functions, such as `index_insert <geosoft.gxapi.GXVV.index_insert>` or `index_order <geosoft.gxapi.GXVV.index_order>`. """ self._init_index(n) def inv_log(self, log_base, log_mode, log_min): """ Inverse of the `log <geosoft.gxapi.GXVV.log>` function. :param log_base: :ref:`VV_LOG_BASE` :param log_mode: :ref:`VV_LOGMODE` :param log_min: Minimum value for :ref:`VV_LOG_NEGATIVE` :type log_base: int :type log_mode: int :type log_min: float .. versionadded:: 7.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is the inverse function for `log <geosoft.gxapi.GXVV.log>`, with the same inputs. """ self._inv_log(log_base, log_mode, log_min) def order(self, rep): """ Identifies the data size order of the elements. :param rep: Returned: Do any values repeat (0: No, 1: Yes)? :type rep: int_ref :returns: :ref:`VV_ORDER` :rtype: int .. versionadded:: 6.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val, rep.value = self._order(rep.value) return ret_val def lines_to_xy(self, vv_x, vv_y): """ Convert a 2D Line segment `GXVV <geosoft.gxapi.GXVV>` into X and Y VVs. :param vv_x: Output `GXVV <geosoft.gxapi.GXVV>` with X locations (`GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`) :param vv_y: Output `GXVV <geosoft.gxapi.GXVV>` with Y locations (`GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`) :type vv_x: GXVV :type vv_y: GXVV .. versionadded:: 8.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Some GX functions (such as `GXTIN.get_voronoi_edges <geosoft.gxapi.GXTIN.get_voronoi_edges>`) return a special `GXVV <geosoft.gxapi.GXVV>` where each element contains the start and end points of lines, (X_1, Y_1) and (X_2, Y_2). This GX dumps the individual X and Y values into individual X and Y VVs of type `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>` (REAL). N lines produces 2*N X and Y values. """ self._lines_to_xy(vv_x, vv_y) def lookup_index(self, vvi, vvr): """ Lookup a `GXVV <geosoft.gxapi.GXVV>` from another `GXVV <geosoft.gxapi.GXVV>` using an index `GXVV <geosoft.gxapi.GXVV>`. :param vvi: Index `GXVV <geosoft.gxapi.GXVV>` of REAL :param vvr: Result `GXVV <geosoft.gxapi.GXVV>` (same type as Data `GXVV <geosoft.gxapi.GXVV>`) :type vvi: GXVV :type vvr: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This method assigns index values of 0.0, 1.0, 2.0 etc. to the individual values in the input Data `GXVV <geosoft.gxapi.GXVV>`, and uses linear interpolation to calculate the values of Result `GXVV <geosoft.gxapi.GXVV>` at the input indices contained in the Index `GXVV <geosoft.gxapi.GXVV>`. If the input Data `GXVV <geosoft.gxapi.GXVV>` is string type, then only values at the integral index values are returned. See also `setup_index <geosoft.gxapi.GXVV.setup_index>` for an example of how this can be implemented. """ self._lookup_index(vvi, vvr) def make_mem_based(self): """ Make this `GXVV <geosoft.gxapi.GXVV>` use regular instead of virtual memory. .. versionadded:: 5.1.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This function should be called immediately aftter `create <geosoft.gxapi.GXVV.create>`. Normal VVs are optimised to prevent thrashing, and to efficiently support many extremely large VVs, although there is a small performance penalty. This function is intended for `GXVV <geosoft.gxapi.GXVV>`'s that you know can be handled by the operating system virtual memory manager, and will be used heavily. By using a memory based `GXVV <geosoft.gxapi.GXVV>`, you can achieve some performance improvements provided your application does not cause the memory manager to "thrash". External programs that use the GX API may prefer to use memory-based `GXVV <geosoft.gxapi.GXVV>`'s because you can get direct access to the `GXVV <geosoft.gxapi.GXVV>` through the `GXGEO.get_ptr_vv <geosoft.gxapi.GXGEO.get_ptr_vv>` function (see gx_extern.h). .. seealso:: `GXGEO.get_ptr_vv <geosoft.gxapi.GXGEO.get_ptr_vv>` in gx_extern.h """ self._make_mem_based() def mask_and(self, vv_y, vv_z): """ Create mask from logical AND of two VVs. :param vv_y: `GXVV <geosoft.gxapi.GXVV>` B :param vv_z: `GXVV <geosoft.gxapi.GXVV>` C (returned) :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 5.1.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If both values are non-dummies, then result is 1, else dummy. """ self._mask_and(vv_y, vv_z) def mask_or(self, vv_y, vv_z): """ Create mask from logical OR of two VVs. :param vv_y: `GXVV <geosoft.gxapi.GXVV>` B :param vv_z: `GXVV <geosoft.gxapi.GXVV>` C (returned) :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 5.1.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If either values is non-dummy, then result is 1, else dummy. """ self._mask_or(vv_y, vv_z) def mask_str(self, vv_m, str_val): """ Mask one `GXVV <geosoft.gxapi.GXVV>` against another using a string. :param vv_m: Mask `GXVV <geosoft.gxapi.GXVV>` :param str_val: String to compare :type vv_m: GXVV :type str_val: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** All elements in the mask `GXVV <geosoft.gxapi.GXVV>` that are same as string will replace the original `GXVV <geosoft.gxapi.GXVV>` with a 1. The modified `GXVV <geosoft.gxapi.GXVV>` will always be expanded to the MaskVV size but not shortened after this call. If the mask is longer than the target, the target will be lengthenned with dummies before applying the mask. """ self._mask_str(vv_m, str_val.encode()) def multiply(self, vv_y, vv_z): """ Multiply two VVs: VV_A * VV_B = VV_C :param vv_y: `GXVV <geosoft.gxapi.GXVV>` B :param vv_z: `GXVV <geosoft.gxapi.GXVV>` C (returned), C = A * B :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._multiply(vv_y, vv_z) def amplitude_3d(self, v_vx, v_vy, v_vz): """ Calculate the 3D length for XYZ component VVs :param v_vx: X component `GXVV <geosoft.gxapi.GXVV>` :param v_vy: Y component `GXVV <geosoft.gxapi.GXVV>` :param v_vz: Z component `GXVV <geosoft.gxapi.GXVV>` :type v_vx: GXVV :type v_vy: GXVV :type v_vz: GXVV .. versionadded:: 8.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._amplitude_3d(v_vx, v_vy, v_vz) def polygon_mask(self, yvv, rvv, pply, dummy): """ Mask a `GXVV <geosoft.gxapi.GXVV>` using XY data and a polygon :param yvv: Y `GXVV <geosoft.gxapi.GXVV>` :param rvv: `GXVV <geosoft.gxapi.GXVV>` to be masked :param pply: Polygon Object :param dummy: :ref:`VV_MASK` :type yvv: GXVV :type rvv: GXVV :type pply: GXPLY :type dummy: int .. versionadded:: 5.1.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The VVs has to be the same length """ self._polygon_mask(yvv, rvv, pply, dummy) @classmethod def project(cls, pj, vv_x, vv_y): """ This method projects an X and Y `GXVV <geosoft.gxapi.GXVV>`. :param vv_x: X :param vv_y: Y :type pj: GXPJ :type vv_x: GXVV :type vv_y: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This function is equivalent to `GXPJ.convert_vv <geosoft.gxapi.GXPJ.convert_vv>`. """ gxapi_cy.WrapVV._project(GXContext._get_tls_geo(), pj, vv_x, vv_y) @classmethod def project_3d(cls, pj, vv_x, vv_y, vv_z): """ This method projects an X,Y,Z `GXVV <geosoft.gxapi.GXVV>`. :param vv_x: X :param vv_y: Y :param vv_z: Z :type pj: GXPJ :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This function is equivalent to `GXPJ.convert_vv3 <geosoft.gxapi.GXPJ.convert_vv3>`. """ gxapi_cy.WrapVV._project_3d(GXContext._get_tls_geo(), pj, vv_x, vv_y, vv_z) def range_double(self, min, max): """ Get the min. and max. values of a `GXVV <geosoft.gxapi.GXVV>` while ignoring dummies. :param min: Minimum value - returned :param max: Maximum value - returned :type min: float_ref :type max: float_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Minimum and maximum become `GS_R8DM <geosoft.gxapi.GS_R8DM>` if entire `GXVV <geosoft.gxapi.GXVV>` is dummy. """ min.value, max.value = self._range_double(min.value, max.value) def re_fid(self, start, incr, length): """ Re-sample a `GXVV <geosoft.gxapi.GXVV>` to a new fid start/icrement :param start: New fid start :param incr: New fid increment :param length: New length :type start: float :type incr: float :type length: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._re_fid(start, incr, length) def re_fid_vv(self, vv_m): """ Re-sample a `GXVV <geosoft.gxapi.GXVV>` to match another `GXVV <geosoft.gxapi.GXVV>`. :param vv_m: `GXVV <geosoft.gxapi.GXVV>` model (fid increment and start) :type vv_m: GXVV .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This method will honor the `GXVV <geosoft.gxapi.GXVV>` FID Expansion and will expand/contract `GXVV <geosoft.gxapi.GXVV>`'s based on this flag if it is used. """ self._re_fid_vv(vv_m) def re_sample(self, c_start, c_incr, n_start, n_incr, length, unused): """ Resamples a `GXVV <geosoft.gxapi.GXVV>` from one fid/incr to another fid/incr. :param c_start: Current start fid :param c_incr: Current increment :param n_start: New fid start :param n_incr: New fid increment :param length: New length :param unused: This parameter is not used :type c_start: float :type c_incr: float :type n_start: float :type n_incr: float :type length: int :type unused: int .. versionadded:: 5.1.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._re_sample(c_start, c_incr, n_start, n_incr, length, unused) def get_fid_incr(self): """ Gets the Fiducial increment from a `GXVV <geosoft.gxapi.GXVV>` :returns: Fiducial increment of the `GXVV <geosoft.gxapi.GXVV>`. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_fid_incr() return ret_val def get_fid_start(self): """ Gets the Fiducial start from a `GXVV <geosoft.gxapi.GXVV>` :returns: Fiducial start of the `GXVV <geosoft.gxapi.GXVV>`. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_fid_start() return ret_val def get_double(self, element): """ Get a real element from a `GXVV <geosoft.gxapi.GXVV>`. :param element: Element wanted :type element: int :returns: Element wanted, or `rDUMMY <geosoft.gxapi.rDUMMY>` if the value is dummy or outside of the range of data. :rtype: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Type conversions are performed if necessary. Dummy values are converted to "*" string. """ ret_val = self._get_double(element) return ret_val def sum(self): """ Calculate the sum of the values in a `GXVV <geosoft.gxapi.GXVV>`. :returns: The sum of the elements. :rtype: float .. versionadded:: 7.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Dummy value is treated as Zero(0) """ ret_val = self._sum() return ret_val def weighted_mean(self, vv_weights): """ Calculate the weighted average of the values. :param vv_weights: `GXVV <geosoft.gxapi.GXVV>` of weights :type vv_weights: GXVV :returns: The weighted average of the values. :rtype: float .. versionadded:: 7.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Dummy values are ignored. """ ret_val = self._weighted_mean(vv_weights) return ret_val def set_fid_expansion(self, expand): """ Sets the Fiducial expansion from a `GXVV <geosoft.gxapi.GXVV>` :param expand: Expansion setting (1 or greater) :type expand: int .. versionadded:: 6.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_fid_expansion(expand) def set_fid_incr(self, incr): """ Sets the Fiducial increment of a `GXVV <geosoft.gxapi.GXVV>` :param incr: New increment :type incr: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_fid_incr(incr) def set_fid_start(self, start): """ Sets the Fiducial start of a `GXVV <geosoft.gxapi.GXVV>` :param start: New start :type start: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_fid_start(start) def set_int(self, element, value): """ Set an integer element in a `GXVV <geosoft.gxapi.GXVV>`. :param element: Element to set :param value: Value to set :type element: int :type value: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Element being set cannot be < 0. If the element is > current `GXVV <geosoft.gxapi.GXVV>` length, the `GXVV <geosoft.gxapi.GXVV>` length is increased. It is good practice to set the length ahead of time to the expected maximum value, as some `GXVV <geosoft.gxapi.GXVV>` processes rely on the current maximum length of the `GXVV <geosoft.gxapi.GXVV>` when you pass it in as an argument, and unexpected results may occur if the length is not what you expect it to be because of dynamic allocation at an earlier time. """ self._set_int(element, value) def set_int_n(self, start, n, value): """ Set N integer elements in a `GXVV <geosoft.gxapi.GXVV>`. :param start: Start element (>= 0) :param n: # elements to set (-1 sets all elements to end) :param value: Value to set :type start: int :type n: int :type value: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Element being set cannot be < 0. If the element is > current `GXVV <geosoft.gxapi.GXVV>` length, the `GXVV <geosoft.gxapi.GXVV>` length is increased. """ self._set_int_n(start, n, value) def set_len(self, size): """ Set the length of a `GXVV <geosoft.gxapi.GXVV>`. :param size: New length (number of elements) :type size: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If increasing the `GXVV <geosoft.gxapi.GXVV>` size, new elements are set to dummies. It is good practice to set the length ahead of time to the expected maximum value, as some `GXVV <geosoft.gxapi.GXVV>` processes rely on the current maximum length of the `GXVV <geosoft.gxapi.GXVV>` when you pass it in as an argument, and unexpected results may occur if the length is not what you expect it to be because of dynamic allocation at an earlier time. """ self._set_len(size) def set_double(self, element, value): """ Set a real element in a `GXVV <geosoft.gxapi.GXVV>`. :param element: Element to set :param value: Value to set :type element: int :type value: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Element being set cannot be < 0. If the element is > current `GXVV <geosoft.gxapi.GXVV>` length, the `GXVV <geosoft.gxapi.GXVV>` length is increased. It is good practice to set the length ahead of time to the expected maximum value, as some `GXVV <geosoft.gxapi.GXVV>` processes rely on the current maximum length of the `GXVV <geosoft.gxapi.GXVV>` when you pass it in as an argument, and unexpected results may occur if the length is not what you expect it to be because of dynamic allocation at an earlier time. """ self._set_double(element, value) def set_double_n(self, start, n, value): """ Set N real elements in a `GXVV <geosoft.gxapi.GXVV>`. :param start: Start element (>= 0) :param n: # elements to set (-1 sets all elements to end) :param value: Value to set :type start: int :type n: int :type value: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Element being set cannot be < 0. If the element is > current `GXVV <geosoft.gxapi.GXVV>` length, the `GXVV <geosoft.gxapi.GXVV>` length is increased. """ self._set_double_n(start, n, value) def set_string(self, element, value): """ Set a string element in a `GXVV <geosoft.gxapi.GXVV>`. :param element: Element to set :param value: String to set :type element: int :type value: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Element being set cannot be < 0. If the element is > current `GXVV <geosoft.gxapi.GXVV>` length, the `GXVV <geosoft.gxapi.GXVV>` length is increased. It is good practice to set the length ahead of time to the expected maximum value, as some `GXVV <geosoft.gxapi.GXVV>` processes rely on the current maximum length of the `GXVV <geosoft.gxapi.GXVV>` when you pass it in as an argument, and unexpected results may occur if the length is not what you expect it to be because of dynamic allocation at an earlier time. """ self._set_string(element, value.encode()) def set_string_n(self, start, n, value): """ Set N string elements in a `GXVV <geosoft.gxapi.GXVV>`. :param start: Start element (>= 0) :param n: # elements to set (-1 sets all elements to end) :param value: String to set :type start: int :type n: int :type value: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Element being set cannot be < 0. If the element is > current `GXVV <geosoft.gxapi.GXVV>` length, the `GXVV <geosoft.gxapi.GXVV>` length is increased. """ self._set_string_n(start, n, value.encode()) def setup_index(self, vvq, vvi, mode, space): """ Setup an index `GXVV <geosoft.gxapi.GXVV>` from VV1 to VV2. :param vvq: Query `GXVV <geosoft.gxapi.GXVV>` (same type as Data `GXVV <geosoft.gxapi.GXVV>`) :param vvi: `GXVV <geosoft.gxapi.GXVV>` index `GXVV <geosoft.gxapi.GXVV>` of type REAL :param mode: :ref:`VV_LOOKUP` :param space: Spacing for some modes :type vvq: GXVV :type vvi: GXVV :type mode: int :type space: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The input reference `GXVV <geosoft.gxapi.GXVV>` must be in ascending numerical order. If your reference data is NOT ordered, then use the `sort_index1 <geosoft.gxapi.GXVV.sort_index1>` function to create an order index, then sort both the reference and data VVs using this index `GXVV <geosoft.gxapi.GXVV>` before you call `setup_index <geosoft.gxapi.GXVV.setup_index>`. Example: You have a reference data set taken at specific times, ``hVVt``, ``hVVy`` and you want to calculate/estimate/interpolate the values ``hVVy2`` at a second set of times ``hVVt2`` Step 1: Create an index, ``hVVi``, type `GS_DOUBLE <geosoft.gxapi.GS_DOUBLE>`, and call `setup_index <geosoft.gxapi.GXVV.setup_index>`. with: ``hVVt2, hVVi, VV_LOOKUP_XXX, rSpacing`` Internally, this assigns index values of 0.0, 1.0, 2.0 etc. to the individual values in ``hVVt``, then, depending on the lookup method chosen, assigns fractional index values to the input values in ``hVVt2``. Step 2: To determine what the lookup values ``hVVy2`` should be at times ``hVVt2``, call the `lookup_index <geosoft.gxapi.GXVV.lookup_index>` function for hVVy with ``hVVi, hVVy2`` Internally, this assigns index values of 0.0, 1.0, 2.0 etc. to the individual values in ``hVVy``, and uses linear interpolation to calculate the values of ``hVVy2`` at the input indices contained in ``hVVi``. """ self._setup_index(vvq, vvi, mode, space) def sort(self, order): """ Sort a `GXVV <geosoft.gxapi.GXVV>`. :param order: :ref:`VV_SORT` :type order: int .. versionadded:: 5.1.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._sort(order) def sort_index(self, ivv): """ Sort index `GXVV <geosoft.gxapi.GXVV>` based on a data `GXVV <geosoft.gxapi.GXVV>`. :param ivv: Index `GXVV <geosoft.gxapi.GXVV>` of type INT :type ivv: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Create an Index `GXVV <geosoft.gxapi.GXVV>` (of type `GS_LONG <geosoft.gxapi.GS_LONG>`) based on a data `GXVV <geosoft.gxapi.GXVV>`. This index vv can then be used by the IndexOrder method to order a group of `GXVV <geosoft.gxapi.GXVV>`'s. """ self._sort_index(ivv) def sort_index1(self, ivv, ord1): """ Sort index `GXVV <geosoft.gxapi.GXVV>` based on 1 data `GXVV <geosoft.gxapi.GXVV>` - set orders. :param ivv: Index `GXVV <geosoft.gxapi.GXVV>` of type INT :param ord1: :ref:`VV_SORT` :type ivv: GXVV :type ord1: int .. versionadded:: 5.0.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Create an Index `GXVV <geosoft.gxapi.GXVV>` (of type `GS_LONG <geosoft.gxapi.GS_LONG>`) based on a data `GXVV <geosoft.gxapi.GXVV>`. This index vv can then be used by the IndexOrder method to order a group of `GXVV <geosoft.gxapi.GXVV>`'s. The individual VVs may be ordered in ascending or descending order. If the primary `GXVV <geosoft.gxapi.GXVV>` values of two indices are the same, then the secondary `GXVV <geosoft.gxapi.GXVV>` values are compared. If the secondary values are the same, the ternary values are compared, etc. """ self._sort_index1(ivv, ord1) def sort_index2(self, d2_vv, ivv, ord1, ord2): """ Sort index `GXVV <geosoft.gxapi.GXVV>` based on 2 data VVs - set orders. :param d2_vv: Secondary Data `GXVV <geosoft.gxapi.GXVV>` :param ivv: Index `GXVV <geosoft.gxapi.GXVV>` of type INT :param ord1: Primary Sort order :ref:`VV_SORT` :param ord2: Secondary Sort order :ref:`VV_SORT` :type d2_vv: GXVV :type ivv: GXVV :type ord1: int :type ord2: int .. versionadded:: 5.0.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Create an Index `GXVV <geosoft.gxapi.GXVV>` (of type `GS_LONG <geosoft.gxapi.GS_LONG>`) based on a data `GXVV <geosoft.gxapi.GXVV>`. This index vv can then be used by the IndexOrder method to order a group of `GXVV <geosoft.gxapi.GXVV>`'s. The individual VVs may be ordered in ascending or descending order. If the primary `GXVV <geosoft.gxapi.GXVV>` values of two indices are the same, then the secondary `GXVV <geosoft.gxapi.GXVV>` values are compared. If the secondary values are the same, the ternary values are compared, etc """ self._sort_index2(d2_vv, ivv, ord1, ord2) def sort_index3(self, d2_vv, d3_vv, ivv, ord1, ord2, ord3): """ Sort index `GXVV <geosoft.gxapi.GXVV>` based on 3 data VVs - set orders. :param d2_vv: Secondary Data `GXVV <geosoft.gxapi.GXVV>` :param d3_vv: Ternary Data `GXVV <geosoft.gxapi.GXVV>` :param ivv: Index `GXVV <geosoft.gxapi.GXVV>` of type INT :param ord1: Primary Sort order :ref:`VV_SORT` :param ord2: Secondary sort order :ref:`VV_SORT` :param ord3: Third Sort order :ref:`VV_SORT` :type d2_vv: GXVV :type d3_vv: GXVV :type ivv: GXVV :type ord1: int :type ord2: int :type ord3: int .. versionadded:: 5.0.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Create an Index `GXVV <geosoft.gxapi.GXVV>` (of type `GS_LONG <geosoft.gxapi.GS_LONG>`) based on a data `GXVV <geosoft.gxapi.GXVV>`. This index vv can then be used by the IndexOrder method to order a group of `GXVV <geosoft.gxapi.GXVV>`'s. The individual VVs may be ordered in ascending or descending order. If the primary `GXVV <geosoft.gxapi.GXVV>` values of two indices are the same, then the secondary `GXVV <geosoft.gxapi.GXVV>` values are compared. If the secondary values are the same, the third values are compared, etc """ self._sort_index3(d2_vv, d3_vv, ivv, ord1, ord2, ord3) def sort_index4(self, d2_vv, d3_vv, d4_vv, ivv, ord1, ord2, ord3, ord4): """ Sort index `GXVV <geosoft.gxapi.GXVV>` based on 4 data VVs - set orders. :param d2_vv: Secondary Data `GXVV <geosoft.gxapi.GXVV>` :param d3_vv: Ternary Data `GXVV <geosoft.gxapi.GXVV>` :param d4_vv: Quaternary Data `GXVV <geosoft.gxapi.GXVV>` :param ivv: Index `GXVV <geosoft.gxapi.GXVV>` of type INT :param ord1: Primary Ssort order :ref:`VV_SORT` :param ord2: Secondary Sort order :ref:`VV_SORT` :param ord3: Third Sort order :ref:`VV_SORT` :param ord4: Fourth Sort order :ref:`VV_SORT` :type d2_vv: GXVV :type d3_vv: GXVV :type d4_vv: GXVV :type ivv: GXVV :type ord1: int :type ord2: int :type ord3: int :type ord4: int .. versionadded:: 5.0.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Create an Index `GXVV <geosoft.gxapi.GXVV>` (of type `GS_LONG <geosoft.gxapi.GS_LONG>`) based on a data `GXVV <geosoft.gxapi.GXVV>`. This index vv can then be used by the IndexOrder method to order a group of `GXVV <geosoft.gxapi.GXVV>`'s. The individual VVs may be ordered in ascending or descending order. If the primary `GXVV <geosoft.gxapi.GXVV>` values of two indices are the same, then the secondary `GXVV <geosoft.gxapi.GXVV>` values are compared. If the secondary values are the same, the third values are compared, etc """ self._sort_index4(d2_vv, d3_vv, d4_vv, ivv, ord1, ord2, ord3, ord4) @classmethod def statistics(cls, st, vv): """ Add a `GXVV <geosoft.gxapi.GXVV>` to a `GXST <geosoft.gxapi.GXST>`. :param st: `GXST <geosoft.gxapi.GXST>` Handle :param vv: `GXVV <geosoft.gxapi.GXVV>` to add to `GXST <geosoft.gxapi.GXST>` :type st: GXST :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapVV._statistics(GXContext._get_tls_geo(), st, vv) def subtract(self, vv_y, vv_z): """ Subtract one `GXVV <geosoft.gxapi.GXVV>` from another: VV_A - VV_B = VV_C :param vv_y: `GXVV <geosoft.gxapi.GXVV>` B :param vv_z: `GXVV <geosoft.gxapi.GXVV>` C (returned), C = A - B :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._subtract(vv_y, vv_z) def swap(self): """ Swaps the bytes of the SHORT, USHORT, LONG, FLOAT and DOUBLE vv's. Other vv's are not affected by this method. This is used primarily with changing the order of bytes for other machine created data. .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._swap() def window(self, min, max, mode): """ Limit the elements of a vv to a range. :param min: Min Val :param max: Max Val :param mode: :ref:`VV_WINDOW` :type min: float :type max: float :type mode: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._window(min, max, mode) def write_xml(self, file, format, decimal): """ Write the `GXVV <geosoft.gxapi.GXVV>` data as an XML object with bytes and formating. :param file: XML file to create :param format: Format :param decimal: Significant digits/decimals :type file: str :type format: int :type decimal: int .. versionadded:: 8.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._write_xml(file.encode(), format, decimal) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block def get_data_np(self, start: int, num_elements: int, np_dtype: type(np.dtype)): from .GXNumpy import gs_from_np gs_type = gs_from_np(np_dtype) return np.asarray(self.get_data_array(start, num_elements, gs_type)) def set_data_np(self, start: int, np_array: type(np.ndarray)): from .GXNumpy import gs_from_np gs_type = gs_from_np(np_array.dtype) num_elements = np.prod(np_array.shape) if not np_array.flags['C_CONTIGUOUS']: np_array = np.ascontiguousarray(np_array) self._set_data(start, num_elements, np_array.data.tobytes(), gs_type) def get_data_array(self, start: int, num_elements: int, gs_type: int): return gxapi_cy_extend.GXMemMethods.get_data_array_vv(GXContext._internal_p(), self._internal_handle(), start, num_elements, gs_type) ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXVAU.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXVAU(gxapi_cy.WrapVAU): """ GXVAU class. This is not a class. These are methods that work on data stored in `GXVA <geosoft.gxapi.GXVA>` objects """ def __init__(self, handle=0): super(GXVAU, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXVAU <geosoft.gxapi.GXVAU>` :returns: A null `GXVAU <geosoft.gxapi.GXVAU>` :rtype: GXVAU """ return GXVAU() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def prune(cls, v_ap, vv_r, o): """ Prune values from a `GXVA <geosoft.gxapi.GXVA>` based on reference `GXVA <geosoft.gxapi.GXVA>` :param v_ap: `GXVA <geosoft.gxapi.GXVA>` to prune :param vv_r: Reference `GXVV <geosoft.gxapi.GXVV>` :param o: :ref:`VAU_PRUNE` :type v_ap: GXVA :type vv_r: GXVV :type o: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Pruning will shorten the `GXVA <geosoft.gxapi.GXVA>` by removing values that are either dummy or non-dummy in the reference `GXVA <geosoft.gxapi.GXVA>` """ gxapi_cy.WrapVAU._prune(GXContext._get_tls_geo(), v_ap, vv_r, o) @classmethod def total_vector(cls, xva, yva, zva, tva): """ Calculate total vector for X,Y and Z components :param xva: X Component object :param yva: Y Component object :param zva: Z Component object :param tva: Returned total vector `GXVA <geosoft.gxapi.GXVA>` object :type xva: GXVA :type yva: GXVA :type zva: GXVA :type tva: GXVA .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapVAU._total_vector(GXContext._get_tls_geo(), xva, yva, zva, tva) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXDCOL.rst .. _GXDCOL: GXDCOL class ================================== .. autoclass:: geosoft.gxapi.GXDCOL :members: .. _BRIGHT: BRIGHT constants ----------------------------------------------------------------------- Brightness type .. autodata:: geosoft.gxapi.BRIGHT_ALL :annotation: .. autoattribute:: geosoft.gxapi.BRIGHT_ALL .. autodata:: geosoft.gxapi.BRIGHT_LAYER :annotation: .. autoattribute:: geosoft.gxapi.BRIGHT_LAYER .. _BRIGHTNESS_TYPES: BRIGHTNESS_TYPES constants ----------------------------------------------------------------------- Detrending option .. autodata:: geosoft.gxapi.BRIGHTNESS_ALL :annotation: .. autoattribute:: geosoft.gxapi.BRIGHTNESS_ALL .. autodata:: geosoft.gxapi.BRIGHTNESS_ALL_AND_LAYERS :annotation: .. autoattribute:: geosoft.gxapi.BRIGHTNESS_ALL_AND_LAYERS .. _DCOL_TYPE: DCOL_TYPE constants ----------------------------------------------------------------------- Layer type .. autodata:: geosoft.gxapi.DCOL_TYPE_UNKNOWN :annotation: .. autoattribute:: geosoft.gxapi.DCOL_TYPE_UNKNOWN .. autodata:: geosoft.gxapi.DCOL_TYPE_GRID :annotation: .. autoattribute:: geosoft.gxapi.DCOL_TYPE_GRID .. autodata:: geosoft.gxapi.DCOL_TYPE_SYMBOLS :annotation: .. autoattribute:: geosoft.gxapi.DCOL_TYPE_SYMBOLS .. autodata:: geosoft.gxapi.DCOL_TYPE_VOXEL :annotation: .. autoattribute:: geosoft.gxapi.DCOL_TYPE_VOXEL .. autodata:: geosoft.gxapi.DCOL_TYPE_VECTOR_VOXEL :annotation: .. autoattribute:: geosoft.gxapi.DCOL_TYPE_VECTOR_VOXEL <file_sep>/geosoft/gxapi/GXPDF3D.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXPDF3D(gxapi_cy.WrapPDF3D): """ GXPDF3D class. The `GXPDF3D <geosoft.gxapi.GXPDF3D>` class provides the ability to create 3D PDFs. """ def __init__(self, handle=0): super(GXPDF3D, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXPDF3D <geosoft.gxapi.GXPDF3D>` :returns: A null `GXPDF3D <geosoft.gxapi.GXPDF3D>` :rtype: GXPDF3D """ return GXPDF3D() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def render(cls, mview, file_name, resolution, no_clipping): """ Render a voxel, voxsurf and/or gensurf to pdf :param mview: `GXMVIEW <geosoft.gxapi.GXMVIEW>` handle :param file_name: Filename :param resolution: Resolution :param no_clipping: Noclipping :type mview: GXMVIEW :type file_name: str :type resolution: int :type no_clipping: int .. versionadded:: 6.4.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapPDF3D._render(GXContext._get_tls_geo(), mview, file_name.encode(), resolution, no_clipping) @classmethod def render_to_page(cls, mview, file_name, page_number, resolution, no_clip): """ Render a voxel, voxsurf and/or gensurf to a specified page on a pdf :param mview: `GXMVIEW <geosoft.gxapi.GXMVIEW>` handle :param file_name: Filename :param page_number: Page number :param resolution: Resolution :param no_clip: Noclipping :type mview: GXMVIEW :type file_name: str :type page_number: int :type resolution: int :type no_clip: int .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapPDF3D._render_to_page(GXContext._get_tls_geo(), mview, file_name.encode(), page_number, resolution, no_clip) @classmethod def export_2d(cls, input_map, output_file, create_layersin_pdf, geospatial_pdf, open_pdf): """ Export a 2D map to a PDF file. :param input_map: Input map file :param output_file: Output PDF file :param create_layersin_pdf: Create layers in PDF :param geospatial_pdf: Geospatial PDF :param open_pdf: Open PDF after export :type input_map: str :type output_file: str :type create_layersin_pdf: int :type geospatial_pdf: int :type open_pdf: int .. versionadded:: 8.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapPDF3D._export_2d(GXContext._get_tls_geo(), input_map.encode(), output_file.encode(), create_layersin_pdf, geospatial_pdf, open_pdf) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXSTORAGEPROJECT.rst .. _GXSTORAGEPROJECT: GXSTORAGEPROJECT class ================================== .. autoclass:: geosoft.gxapi.GXSTORAGEPROJECT :members: <file_sep>/geosoft/gxapi/GXWA.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXWA(gxapi_cy.WrapWA): """ GXWA class. The `GXWA <geosoft.gxapi.GXWA>` class enables you to access and write data to ASCII files. """ def __init__(self, handle=0): super(GXWA, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXWA <geosoft.gxapi.GXWA>` :returns: A null `GXWA <geosoft.gxapi.GXWA>` :rtype: GXWA """ return GXWA() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def puts(self, str_val): """ Writes a string to the file. :param str_val: String to write :type str_val: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._puts(str_val.encode()) @classmethod def create(cls, file, append): """ Creates an ASCII file to write to. :param file: Name of the File :param append: :ref:`WA_OPEN` :type file: str :type append: int :returns: `GXWA <geosoft.gxapi.GXWA>` Handle :rtype: GXWA .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** ANSI Encoding is assumed, See `create_ex <geosoft.gxapi.GXWA.create_ex>` to override this. """ ret_val = gxapi_cy.WrapWA._create(GXContext._get_tls_geo(), file.encode(), append) return GXWA(ret_val) @classmethod def create_ex(cls, file, append, encode): """ Creates an ASCII file to write to. :param file: Name of the File :param append: :ref:`WA_OPEN` :param encode: :ref:`WA_ENCODE` :type file: str :type append: int :type encode: int :returns: `GXWA <geosoft.gxapi.GXWA>` Handle :rtype: GXWA .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Before version 6.2. text in on the GX API level were handled as characters in the current ANSI code page defining how characters above ASCII 127 would be displayed. 6.2. introduced Unicode in the core montaj engine that greatly increased the number of symbols that can be used. The :ref:`WA_ENCODE` constants were introduce that controls how text are written to files on disk with the `GXWA <geosoft.gxapi.GXWA>` class. """ ret_val = gxapi_cy.WrapWA._create_ex(GXContext._get_tls_geo(), file.encode(), append, encode) return GXWA(ret_val) @classmethod def create_sbf(cls, sbf, file, append): """ Creates an ASCII file to write to in an `GXSBF <geosoft.gxapi.GXSBF>`. :param sbf: Storage :param file: Name of the File :param append: :ref:`WA_OPEN` :type sbf: GXSBF :type file: str :type append: int :returns: `GXWA <geosoft.gxapi.GXWA>` Handle :rtype: GXWA .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** See sbf.gxh. ANSI Encoding is assumed, See `create_sbf_ex <geosoft.gxapi.GXWA.create_sbf_ex>` to override this. """ ret_val = gxapi_cy.WrapWA._create_sbf(GXContext._get_tls_geo(), sbf, file.encode(), append) return GXWA(ret_val) @classmethod def create_sbf_ex(cls, sbf, file, append, encode): """ Creates an ASCII file to write to in an `GXSBF <geosoft.gxapi.GXSBF>`. :param sbf: Storage :param file: Name of the File :param append: :ref:`WA_OPEN` :param encode: :ref:`WA_ENCODE` :type sbf: GXSBF :type file: str :type append: int :type encode: int :returns: `GXWA <geosoft.gxapi.GXWA>` Handle :rtype: GXWA .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Also see sbf.gxh Before version 6.2. text in on the GX API level were handled as characters in the current ANSI code page defining how characters above ASCII 127 would be displayed. 6.2. introduced Unicode in the core montaj engine that greatly increased the number of symbols that can be used. The :ref:`WA_ENCODE` constants were introduce that controls how text are written to files on disk with the `GXWA <geosoft.gxapi.GXWA>` class. """ ret_val = gxapi_cy.WrapWA._create_sbf_ex(GXContext._get_tls_geo(), sbf, file.encode(), append, encode) return GXWA(ret_val) def new_line(self): """ Forces a new line in the `GXWA <geosoft.gxapi.GXWA>` object. .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._new_line() ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/tutorial/Grids and Images/grid_statistics_gxvv.py import geosoft.gxapi as gxapi import geosoft.gxpy.gx as gx import geosoft.gxpy.grid as gxgrid # create context gxc = gx.GXpy() # create a gxapi.GXST instance to accumulate statistics stats = gxapi.GXST.create() # open the grid with gxgrid.Grid.open('elevation_surfer.grd(SRF;VER=V7)') as grid: # add data from each row to the stats instance for row in range(grid.ny): stats.data_vv(grid.read_row(row).gxvv) # print statistical properties print('minimum: ', stats.get_info(gxapi.ST_MIN)) print('maximum: ', stats.get_info(gxapi.ST_MAX)) print('mean: ', stats.get_info(gxapi.ST_MEAN)) print('standard deviation:', stats.get_info(gxapi.ST_STDDEV)) <file_sep>/docs/index.rst Python Packages =============== The GX API for Python has two packages: 1. :ref:`geosoft.gxapi <geosoft.gxapi.classes>` exposes the full low-level library of functions available to all languages. 2. :ref:`geosoft.gxpy <gxpy>` is a set of modules that provide a simpler Pythonic abstraction of key parts of the underlying :ref:`geosoft.gxapi <geosoft.gxapi.classes>`. The `geosoft.gxpy source code <https://github.com/GeosoftInc/gxpy/tree/master/geosoft/gxpy>`_ provides a good reference for how to work with the low-level *geosoft.gxapi* modules. Reference ========= * `Geosoft Python Documentation <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/44367874/Python+in+GX+Developer>`_ * `Installation and Configuration <https://geosoftgxdev.atlassian.net/wiki/spaces/GXD93/pages/102924426/Python+Installation+and+Configuration>`_ * `Python Tutorial <https://geosoftgxdev.atlassian.net/wiki/spaces/GXD93/pages/103153671/Python+Tutorial+for+Geosoft+GX+Developer>`_ * `Tutorial Examples <https://github.com/GeosoftInc/gxpy/tree/master/examples/tutorial>`_ * `Tests <https://github.com/GeosoftInc/gxpy/tree/master/geosoft/gxpy/tests>`_ * :ref:`genindex` * :ref:`search` <file_sep>/geosoft/gxapi/GXVVEXP.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXVVEXP(gxapi_cy.WrapVVEXP): """ GXVVEXP class. The `GXVVEXP <geosoft.gxapi.GXVVEXP>` class is similar to the `GXIEXP <geosoft.gxapi.GXIEXP>` class, but is used to apply math expressions to `GXVV <geosoft.gxapi.GXVV>` objects. """ def __init__(self, handle=0): super(GXVVEXP, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXVVEXP <geosoft.gxapi.GXVVEXP>` :returns: A null `GXVVEXP <geosoft.gxapi.GXVVEXP>` :rtype: GXVVEXP """ return GXVVEXP() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def add_vv(self, vv, var): """ This method adds a `GXVV <geosoft.gxapi.GXVV>` to the `GXVVEXP <geosoft.gxapi.GXVVEXP>` object with a variable name. :param vv: `GXVV <geosoft.gxapi.GXVV>` to add :param var: Variable name :type vv: GXVV :type var: str .. versionadded:: 6.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._add_vv(vv, var.encode()) @classmethod def create(cls): """ This method creates an `GXVVEXP <geosoft.gxapi.GXVVEXP>` object. :returns: `GXVVEXP <geosoft.gxapi.GXVVEXP>` Object :rtype: GXVVEXP .. versionadded:: 6.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapVVEXP._create(GXContext._get_tls_geo()) return GXVVEXP(ret_val) def do_formula(self, formula, unused): """ This method runs a formula on the grids. :param formula: Formula :param unused: Legacy parameter, no longer used. :type formula: str :type unused: int .. versionadded:: 6.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._do_formula(formula.encode(), unused) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXVULCAN.rst .. _GXVULCAN: GXVULCAN class ================================== .. autoclass:: geosoft.gxapi.GXVULCAN :members: .. _BLOCK_MODEL_VARIABLE_TYPE: BLOCK_MODEL_VARIABLE_TYPE constants ----------------------------------------------------------------------- Which variables to return from sReadBlockModelVariableInfo .. autodata:: geosoft.gxapi.BLOCK_MODEL_NUMERIC_VARIABLE :annotation: .. autoattribute:: geosoft.gxapi.BLOCK_MODEL_NUMERIC_VARIABLE .. autodata:: geosoft.gxapi.BLOCK_MODEL_STRING_VARIABLE :annotation: .. autoattribute:: geosoft.gxapi.BLOCK_MODEL_STRING_VARIABLE <file_sep>/geosoft/gxapi/GXNumpy.py import geosoft.gxapi as gxa import numpy as np def gs_from_np(dtype): dtype = np.dtype(dtype) if dtype == np.byte: return gxa.GS_BYTE elif dtype == np.ubyte: return gxa.GS_UBYTE elif dtype == np.int16: return gxa.GS_SHORT elif dtype == np.uint16: return gxa.GS_USHORT elif dtype == np.int32: return gxa.GS_LONG elif dtype == np.uint32: return gxa.GS_ULONG elif dtype == np.int64: return gxa.GS_LONG64 elif dtype == np.uint64: return gxa.GS_ULONG64 elif dtype == np.float32: return gxa.GS_FLOAT elif dtype == np.float64: return gxa.GS_DOUBLE else: raise gxa.GXAPIError("Numpy array type does not map to one of the supported GS_TYPES"); <file_sep>/geosoft/gxapi/GXTRANSFORMLAYER.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXTRANSFORMLAYER(gxapi_cy.WrapTRANSFORMLAYER): """ GXTRANSFORMLAYER class. Object to interface with GMSYS 3D view objects that supports transforming layer. """ def __init__(self, handle=0): super(GXTRANSFORMLAYER, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXTRANSFORMLAYER <geosoft.gxapi.GXTRANSFORMLAYER>` :returns: A null `GXTRANSFORMLAYER <geosoft.gxapi.GXTRANSFORMLAYER>` :rtype: GXTRANSFORMLAYER """ return GXTRANSFORMLAYER() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def end(self, applyChanges): """ End interactive editing for selected grid layer in gmsys. :param applyChanges: Apply changes to layer. :type applyChanges: bool .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._end(applyChanges) def cancel_(self): """ Cancel changes done in the transform layer .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._cancel_() def undo(self): """ Undo one step of editing in the transform layer .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._undo() def redo(self): """ Redo one step of editing in the transform layer .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._redo() def can_undo(self): """ Can perform undo on the transform layer :rtype: int .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._can_undo() return ret_val def can_redo(self): """ Can perform redo on the transform layer :rtype: int .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._can_redo() return ret_val def save_to_new_layer_grid(self, sGrid): """ Save changes to a new grid :param sGrid: output grid path :type sGrid: str .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._save_to_new_layer_grid(sGrid.encode()) def apply_constant_transform(self, elevation): """ Apply constant transform to the transform layer :param elevation: change in elevation to apply :type elevation: float .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._apply_constant_transform(elevation) def select_node(self, nodeIdx): """ Select or deselect a node by its index :param nodeIdx: node index :type nodeIdx: int .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._select_node(nodeIdx) def clear_node_selection(self): """ Clear the section status of every node .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._clear_node_selection() ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXSTRINGS.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXSTRINGS(gxapi_cy.WrapSTRINGS): """ GXSTRINGS class. The `GXSTRINGS <geosoft.gxapi.GXSTRINGS>` class is used for displaying digitization tools for interpretations """ def __init__(self, handle=0): super(GXSTRINGS, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXSTRINGS <geosoft.gxapi.GXSTRINGS>` :returns: A null `GXSTRINGS <geosoft.gxapi.GXSTRINGS>` :rtype: GXSTRINGS """ return GXSTRINGS() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def launch_digitization_ui(cls, stringfile, feature_guid): """ Launch Digitization modeless window :param stringfile: String file :param feature_guid: Definition guid :type stringfile: str :type feature_guid: str .. versionadded:: 7.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ gxapi_cy.WrapSTRINGS._launch_digitization_ui(GXContext._get_tls_geo(), stringfile.encode(), feature_guid.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/tests/test_surface.py import unittest import os import numpy as np import geosoft.gxapi as gxapi import geosoft.gxpy.system as gsys import geosoft.gxpy.surface as gxsurf import geosoft.gxpy.vox as gxvox import geosoft.gxpy.group as gxgrp import geosoft.gxpy.spatialdata as gxspd import geosoft.gxpy.view as gxview import geosoft.gxpy.map as gxmap import geosoft.gxpy.vv as gxvv import geosoft.gxpy.viewer as gxviewer import geosoft.gxpy.coordinate_system as gxcs import geosoft.gxpy.geometry as gxgm from base import GXPYTest class Test(GXPYTest): @classmethod def setUpClass(cls): cls.setUpGXPYTest() cls.folder, files = gsys.unzip(os.path.join(os.path.dirname(cls._test_case_py), 'testvoxset.zip'), folder=cls._gx.temp_folder()) cls.vox_file = os.path.join(cls.folder, 'test.geosoft_voxel') cls.sfile = gxsurf.SurfaceDataset.vox_surface(gxvox.Vox.open(cls.vox_file), (0.005, 0.01, 0.02), overwrite=True).file_name @classmethod def tearDownClass(cls): cls.tearDownGXPYTest() gxsurf.delete_files(cls.sfile) def test_surfaceProperties(self): self.start() try: with gxsurf.SurfaceDataset.open(self.sfile) as surfdataset: self.assertEqual(surfdataset.name, 'test') self.assertEqual(surfdataset.file_name, 'test.geosoft_surface') self.assertEqual(surfdataset.surface_count, 3) self.assertEqual(str(surfdataset.coordinate_system), 'NAD83 / UTM zone 20N') surface_name_list = surfdataset.surface_name_list self.assertTrue('Isosurface 0.02' in surface_name_list) self.assertTrue('Isosurface 0.005' in surface_name_list) self.assertTrue('Isosurface 0.01' in surface_name_list) self.assertEqual(surfdataset.surface_guid('Isosurface 0.02'), '{ABCDEF02-2345-6789-6945-2301E0BC0A89}') self.assertEqual(surfdataset.surface_dict[surfdataset.surface_guid('Isosurface 0.02')], 'Isosurface 0.02') for surf in surfdataset: self.assertTrue(surf.verticies_count > 0) surf = surfdataset[2] self.assertEqual(surf.name, 'Isosurface 0.02') self.assertEqual(surf.verticies_count, 21) self.assertEqual(surf.faces_count, 26) self.assertEqual(surf.component_count, 1) self.assertEqual(surf.render_color.rgb, (255, 255, 0)) self.assertEqual(surf.render_opacity, 1.) self.assertEqual(surf.render_style, gxsurf.STYLE_SMOOTH) finally: gxsurf.delete_files('test') def test_new(self): self.start() with gxsurf.SurfaceDataset.open(self.sfile) as surfdataset: with gxsurf.SurfaceDataset.new('new', temp=True) as newsurf: nfn = newsurf.file_name for surf in surfdataset: newsurf.add_surface(surf) with gxsurf.SurfaceDataset.open(nfn) as surfdataset: self.assertEqual(surfdataset.surface_count, 3) self.assertEqual(str(surfdataset.coordinate_system), 'NAD83 / UTM zone 20N') surface_name_list = surfdataset.surface_name_list self.assertTrue('Isosurface 0.01' in surface_name_list) self.assertTrue('Isosurface 0.02' in surface_name_list) self.assertEqual(surfdataset.surface_dict[surfdataset.surface_guid('Isosurface 0.02')], 'Isosurface 0.02') for surf in surfdataset: self.assertTrue(surf.verticies_count > 0) surf = surfdataset['Isosurface 0.02'] self.assertEqual(surf.name, 'Isosurface 0.02') self.assertEqual(surf.verticies_count, 21) self.assertEqual(surf.faces_count, 26) self.assertEqual(surf.component_count, 1) self.assertEqual(surf.render_color.rgb, (255, 255, 0)) self.assertEqual(surf.render_opacity, 1.) self.assertEqual(surf.render_style, gxsurf.STYLE_SMOOTH) comp = surf.computed_properties() self.assertEqual(comp['components'], 1) self.assertEqual(comp['verticies'], 21) self.assertEqual(comp['edges'], 46) self.assertEqual(comp['triangles'], 26) self.assertEqual(comp['inconsistent'], 0) self.assertEqual(comp['invalid'], 0) self.assertEqual(comp['intersect'], 0) f = surf.mesh().faces_vv() v = surf.mesh().verticies_vv() f1, f2, f3 = f vx, vy, vz = v self.assertEqual(len(vx), surf.verticies_count) self.assertEqual(len(vy), surf.verticies_count) self.assertEqual(len(vz), surf.verticies_count) self.assertEqual(len(f1), surf.faces_count) self.assertEqual(len(f2), surf.faces_count) self.assertEqual(len(f3), surf.faces_count) f = surf.mesh().faces v = surf.mesh().verticies self.assertEqual(len(f), surf.faces_count) self.assertEqual(len(v), surf.verticies_count) def test_new_named(self): self.start() try: with gxsurf.SurfaceDataset.open(self.sfile) as surfdataset: with gxsurf.SurfaceDataset.new('new') as newsurf: self.assertEqual(newsurf.name, 'new') for surf in surfdataset: newsurf.add_surface(surf) finally: gxsurf.delete_files('new') with gxsurf.SurfaceDataset.open('billy', file_name=self.sfile) as surfdataset: self.assertEqual(surfdataset.name, 'billy') def test_temp(self): self.start() fn = gxsurf.SurfaceDataset.vox_surface(gxvox.Vox.open(self.vox_file), 0.01, color=gxgrp.C_GREY, opacity=0.5, temp=True).file_name with gxsurf.SurfaceDataset.open(fn) as surfdataset: with gxsurf.SurfaceDataset.new('new', temp=True) as newsurf: self.assertEqual(newsurf.name, 'new') temp_fn = newsurf.file_name for surf in surfdataset: newsurf.add_surface(surf) with gxsurf.SurfaceDataset.open(temp_fn) as surfdataset: self.assertEqual(surfdataset.surface_count, 1) self.assertEqual(str(surfdataset.coordinate_system), 'NAD83 / UTM zone 20N') surface_name_list = surfdataset.surface_name_list self.assertEqual(surface_name_list[0], 'Isosurface 0.01') self.assertEqual(surfdataset.surface_guid('Isosurface 0.01'), '{ABCDEF00-2345-6789-6745-2301DEBC0A89}') self.assertEqual(surfdataset.surface_dict[surfdataset.surface_guid('Isosurface 0.01')], 'Isosurface 0.01') self.assertEqual(surfdataset.surface_guid('{ABCDEF00-2345-6789-6745-2301DEBC0A89}'), '{ABCDEF00-2345-6789-6745-2301DEBC0A89}') for surf in surfdataset: self.assertTrue(surf.verticies_count > 0) surf = surfdataset['Isosurface 0.01'] self.assertEqual(surf.name, 'Isosurface 0.01') self.assertEqual(surf.verticies_count, 482) self.assertEqual(surf.faces_count, 855) self.assertEqual(surf.component_count, 1) self.assertEqual(surf.render_color.rgb, (128, 128, 128)) self.assertEqual(surf.render_opacity, 0.5) self.assertEqual(surf.render_style, gxsurf.STYLE_SMOOTH) self.assertEqual(surf.extent_xyz, (440718.65079365077, 6129015.476190476, -954.3756313323975, 441475.0, 6129475.0, 512.5)) def test_copy(self): self.start() # make a copy by copying each surface with gxsurf.SurfaceDataset.new() as new_sd: sd_fn = new_sd.file_name with gxsurf.SurfaceDataset.open(self.sfile) as sd: for s in sd: new_sd.add_surface(s) with gxsurf.SurfaceDataset.open(sd_fn) as sd: self.assertEqual(sd.surface_count, 3) self.assertTrue('Isosurface 0.005' in sd.surface_name_list) self.assertTrue('Isosurface 0.01' in sd.surface_name_list) self.assertTrue('Isosurface 0.02' in sd.surface_name_list) self.assertAlmostEqual(sd['Isosurface 0.01'].render_opacity, 0.6666666666666666) surf = sd['Isosurface 0.01'] self.assertEqual(surf.extent_xyz, (440718.65079365077, 6129015.476190476, -954.3756313323975, 441475.0, 6129475.0, 512.5)) with gxsurf.Surface('maki') as s: s.add_mesh(surf.mesh()) self.assertEqual(s.extent_xyz, (440718.65079365077, 6129015.476190476, -954.3756313323975, 441475.0, 6129475.0, 512.5)) def test_new_mesh(self): self.start() # make a copy by copying each surface with gxsurf.SurfaceDataset.new() as new_sd: sd_fn = new_sd.file_name with gxsurf.SurfaceDataset.open(self.sfile) as sd: for s in sd: snew = gxsurf.Surface(s.name) snew.add_mesh(s.mesh(), (gxgrp.C_MAGENTA, 0.25, gxsurf.STYLE_FLAT)) new_sd.add_surface(snew) with gxsurf.SurfaceDataset.open(sd_fn) as sd: self.assertEqual(sd.surface_count, 3) self.assertTrue('Isosurface 0.005' in sd.surface_name_list) self.assertTrue('Isosurface 0.01' in sd.surface_name_list) self.assertTrue('Isosurface 0.02' in sd.surface_name_list) self.assertEqual(sd['Isosurface 0.01'].render_opacity, 0.25) self.assertEqual(sd['Isosurface 0.02'].render_color.cmy, (0, 255, 0)) self.assertEqual(sd['Isosurface 0.01'].render_style, gxsurf.STYLE_FLAT) with gxview.View_3d.new() as v3d: v3d_file = v3d.file_name gxsurf.render(v3d, sd_fn, group_name='billy') self.crc_map(v3d_file) def test_exceptions(self): self.start() verts = np.array([[0, 0, 0], [5, 0, 0], [5, 5, 0], [0, 3, 5], [2.5, 2, 10]], dtype=np.float64) faces = np.array([[0, 1, 2], [0, 2, 3], [3, 2, 4]], dtype=np.int32) fn = 'except.geosoft_surface' try: with open(fn, '+w') as f: f.write('maki') self.assertRaises(gxsurf.SurfaceException, gxsurf.SurfaceDataset.new, 'except') finally: gxsurf.delete_files(fn) with gxsurf.SurfaceDataset.new('test', temp=True, coordinate_system='WGS 84') as sd: s = gxsurf.Surface('maki') s.coordinate_system = 'NAD83' self.assertEqual(str(s.coordinate_system), 'NAD83') self.assertRaises(gxsurf.SurfaceException, sd.add_surface, s) fn = gxsurf.SurfaceDataset.vox_surface(gxvox.Vox.open(self.vox_file), 0.01, temp=True).file_name with gxsurf.SurfaceDataset.open(self.sfile) as sd: self.assertFalse(sd.is_new) self.assertTrue(sd.has_surface('Isosurface 0.01')) self.assertRaises(gxsurf.SurfaceException, gxsurf.Surface, 'Isosurface 0.01', 'none', sd) self.assertRaises(gxsurf.SurfaceException, sd.add_surface, gxsurf.Surface('Isosurface 0.01')) self.assertFalse(sd.has_surface('billy')) self.assertRaises(gxsurf.SurfaceException, sd.add_surface, gxsurf.Surface('billy', mesh=(faces, verts))) with gxsurf.SurfaceDataset.new() as new_sd: new_sd.add_surface_dataset(sd) self.assertTrue(new_sd.has_surface('Isosurface 0.01')) self.assertRaises(gxsurf.SurfaceException, gxsurf.Surface, 'Isosurface 0.01', 'none', new_sd) self.assertRaises(gxsurf.SurfaceException, new_sd.add_surface, gxsurf.Surface('Isosurface 0.01')) self.assertFalse(new_sd.has_surface('billy')) s = gxsurf.Surface('billy', mesh=(faces, verts)) new_sd.add_surface(s) self.assertTrue(new_sd.has_surface('billy')) with gxsurf.SurfaceDataset.new() as new_sd: new_sd.unit_of_measure = 'nT' new_sd.add_surface_dataset(fn) self.assertTrue(new_sd.has_surface('Isosurface 0.01')) s = new_sd['Isosurface 0.01'] self.assertEqual(s.surface_type, 'ISOSURFACE') self.assertTrue(bool(s.source_dataset)) self.assertEqual(s.unit_of_measure, 'nT') self.assertEqual(s.component_count, 1) new_sd.add_surface(gxsurf.Surface('billy', surface_type='maki', mesh=(faces, verts))) self.assertTrue(new_sd.has_surface('billy')) self.assertEqual(new_sd.surface_guid('billy'), new_sd['billy'].guid) s = new_sd['billy'] self.assertEqual(s.surface_type, 'maki') self.assertEqual(s.source_dataset, '') self.assertEqual(s.unit_of_measure, 'nT') self.assertEqual(s.component_count, 1) self.assertEqual(s.render_color, gxgrp.Color(gxgrp.C_GREY)) with gxsurf.SurfaceDataset.new() as new_sd: new_sd.add_surface_dataset(fn) self.assertTrue(new_sd.has_surface('Isosurface 0.01')) gxsurf.Surface('billy', surface_dataset=new_sd, mesh=gxgm.Mesh((faces, verts))) self.assertTrue(new_sd.has_surface('billy')) def test_render(self): self.start() with gxview.View_3d.new() as v3d: v3d_file = v3d.file_name gxsurf.render(v3d, self.sfile) self.assertRaises(gxsurf.SurfaceException, gxsurf.render, v3d, self.sfile) gxsurf.render(v3d, self.sfile, overwrite=True) gxsurf.render(v3d, self.sfile, group_name='maki') self.crc_map(v3d_file) def test_render_1(self): self.start() with gxsurf.SurfaceDataset.open(self.sfile) as sd: with gxview.View_3d.new() as v3d: v3d_file = v3d.file_name gxsurf.render(v3d, sd, group_name='sdataset') self.crc_map(v3d_file) def test_render_2(self): self.start() with gxsurf.SurfaceDataset.open(self.sfile) as sd: surface = sd['Isosurface 0.01'] with gxview.View_3d.new() as v3d: v3d_file = v3d.file_name gxsurf.render(v3d, surface, group_name='surface') self.crc_map(v3d_file) def test_make_my_own(self): self.start() verts = np.array([[0, 0, 0], [5, 0, 0], [5, 5, 0], [0, 3, 5], [2.5, 2, 10]], dtype=np.float64) faces = np.array([[0, 1, 2], [0, 2, 3], [3, 2, 4]], dtype=np.int32) with gxsurf.Surface('maki') as s: s.add_mesh(gxgm.Mesh((faces, verts))) s.render_color = gxgrp.C_GREEN s.render_style = gxsurf.STYLE_FLAT with gxview.View_3d.new() as v3d: v3d_file = v3d.file_name gxsurf.render(v3d, s) self.crc_map(v3d_file) def test_make_my_own_1(self): self.start() verts = np.array([[0, 0, 0], [5, 0, 0], [5, 5, 0], [0, 3, 5], [2.5, 2, 10]], dtype=np.float64) faces = np.array([[0, 1, 2], [0, 2, 3], [3, 2, 4]], dtype=np.int32) mesh = gxgm.Mesh((faces, verts)) with gxsurf.Surface('maki') as s: s.add_mesh(mesh) s.render_color = gxgrp.C_BLUE s.render_style = gxsurf.STYLE_SMOOTH s.render_opacity = 0.25 with gxview.View_3d.new() as v3d: v3d_file = v3d.file_name gxsurf.render(v3d, s) self.crc_map(v3d_file) # gxviewer.view_document(v3d_file, wait_for_close=True) def test_make_my_own_2(self): self.start() verts = np.array([[0, 0, 0], [5, 0, 0], [5, 5, 0], [0, 3, 5], [2.5, 2, 10]], dtype=np.float64) faces = np.array([[0, 1, 2], [0, 2, 3], [3, 2, 4]], dtype=np.int32) with gxsurf.Surface('maki') as s: s.add_mesh(gxgm.Mesh((faces, verts))) s.render_color = gxgrp.C_RED s.render_style = gxsurf.STYLE_EDGE s.render_opacity = 1 with gxview.View_3d.new() as v3d: v3d_file = v3d.file_name gxsurf.render(v3d, s) self.crc_map(v3d_file) # gxviewer.view_document(v3d_file, wait_for_close=True) def test_fig_map(self): self.start() verts = np.array([[0, 0, 0], [5, 0, 0], [5, 5, 0], [0, 3, 5], [2.5, 2, 10]], dtype=np.float64) faces = np.array([[0, 1, 2], [0, 2, 3], [3, 2, 4]], dtype=np.int32) with gxsurf.SurfaceDataset.new() as sd: with gxsurf.Surface('maki', surface_dataset=sd) as s: s.add_mesh(gxgm.Mesh((faces, verts))) s.render_color = gxgrp.C_RED s.render_style = gxsurf.STYLE_FLAT s.render_opacity = 1 fig_map = sd.figure_map(features=('NEATLINE',)).file_name self.crc_map(fig_map) # gxviewer.view_document(fig_map, wait_for_close=True) def test_fig_map_legend(self): self.start() verts = np.array([[0, 0, 0], [5, 0, 0], [5, 5, 0], [0, 3, 5], [2.5, 2, 10]], dtype=np.float64) faces = np.array([[0, 1, 2], [0, 2, 3], [3, 2, 4]], dtype=np.int32) with gxsurf.SurfaceDataset.new() as sd: with gxsurf.Surface('maki 0', surface_dataset=sd) as s: s.add_mesh(gxgm.Mesh((faces, verts))) s.render_color = gxgrp.C_RED s.render_style = gxsurf.STYLE_FLAT s.render_opacity = 1 with gxsurf.Surface('maki 1', surface_dataset=sd) as s: verts[::, 2] *= 0.5 s.add_mesh(gxgm.Mesh((faces, verts))) s.render_color = gxgrp.C_LT_GREEN s.render_style = gxsurf.STYLE_FLAT s.render_opacity = 1 with gxsurf.Surface('Light-blue surface, with an extremely long name to test margin settings', surface_dataset=sd) as s: verts[::, 2] *= 0.5 s.add_mesh(gxgm.Mesh((faces, verts))) s.render_color = gxgrp.C_LT_BLUE s.render_style = gxsurf.STYLE_FLAT s.render_opacity = 1 fig_map = sd.figure_map().file_name self.crc_map(fig_map) # gxviewer.view_document(fig_map, wait_for_close=True) def test_fig_map_cs(self): self.start() verts = np.array([[0, 0, 0], [5, 0, 0], [5, 5, 0], [0, 3, 5], [2.5, 2, 10]], dtype=np.float64) faces = np.array([[0, 1, 2], [0, 2, 3], [3, 2, 4]], dtype=np.int32) mesh = gxgm.Mesh((faces, verts)) with gxsurf.SurfaceDataset.new() as sd: sd.coordinate_system = gxcs.Coordinate_system.local() with gxsurf.Surface('maki_red', surface_dataset=sd) as s: cs = gxcs.Coordinate_system.local(azimuth=20) s.add_mesh(mesh, coordinate_system=cs) s.render_color = gxgrp.C_RED with gxsurf.Surface('maki_green', surface_dataset=sd, coordinate_system=gxcs.Coordinate_system.local(azimuth=-20, elevation=3)) as s: s.add_mesh(mesh) s.render_color = gxgrp.C_GREEN with gxsurf.Surface('maki_blue', surface_dataset=sd) as s: s.add_mesh(mesh, coordinate_system=gxcs.Coordinate_system.local(azimuth=0, origin=(1.5, -2), elevation=-3)) s.render_color = gxgrp.C_BLUE fig_map = sd.figure_map(features=('NEATLINE',)).file_name self.crc_map(fig_map) # gxviewer.view_document(fig_map, wait_for_close=True) def test_doc_sample(self): self.start() faces = [[0, 1, 2], [0, 2, 3], [3, 2, 4]] verticies = [[0, 0, 0], [5, 0, 0], [5, 5, 0], [0, 3, 5], [2.5, 2, 10]] mesh = gxgm.Mesh((faces, verticies)) faces = np.array(faces, dtype=np.int32) verticies = np.array(verticies, dtype=np.float64) mesh = gxgm.Mesh((faces, verticies)) f1vv, f2vv, f3vv = gxvv.vvset_from_np(faces) xvv, yvv, zvv = gxvv.vvset_from_np(verticies) mesh = gxgm.Mesh(((f1vv, f2vv, f3vv), (xvv, yvv, zvv))) ############################################################################################### if __name__ == '__main__': unittest.main() <file_sep>/docs/GXARCDH.rst .. _GXARCDH: GXARCDH class ================================== .. autoclass:: geosoft.gxapi.GXARCDH :members: <file_sep>/docs/GXPLY.rst .. _GXPLY: GXPLY class ================================== .. autoclass:: geosoft.gxapi.GXPLY :members: .. _PLY_CLIP: PLY_CLIP constants ----------------------------------------------------------------------- Polygon clipping mode .. autodata:: geosoft.gxapi.PLY_CLIP_NO_INTERSECT :annotation: .. autoattribute:: geosoft.gxapi.PLY_CLIP_NO_INTERSECT .. autodata:: geosoft.gxapi.PLY_CLIP_INTERSECT :annotation: .. autoattribute:: geosoft.gxapi.PLY_CLIP_INTERSECT .. autodata:: geosoft.gxapi.PLY_CLIP_A_IN_B :annotation: .. autoattribute:: geosoft.gxapi.PLY_CLIP_A_IN_B .. autodata:: geosoft.gxapi.PLY_CLIP_B_IN_A :annotation: .. autoattribute:: geosoft.gxapi.PLY_CLIP_B_IN_A .. _PLY_POINT_CLIP: PLY_POINT_CLIP constants ----------------------------------------------------------------------- Polygon point clipping mode .. autodata:: geosoft.gxapi.PLY_POINT_CLIP_INSIDE :annotation: .. autoattribute:: geosoft.gxapi.PLY_POINT_CLIP_INSIDE .. autodata:: geosoft.gxapi.PLY_POINT_CLIP_OUTSIDE :annotation: .. autoattribute:: geosoft.gxapi.PLY_POINT_CLIP_OUTSIDE .. autodata:: geosoft.gxapi.PLY_POINT_CLIP_ERROR :annotation: .. autoattribute:: geosoft.gxapi.PLY_POINT_CLIP_ERROR .. _PLY_LINE_CLIP: PLY_LINE_CLIP constants ----------------------------------------------------------------------- Polygon line clip indicator .. autodata:: geosoft.gxapi.PLY_LINE_CLIP_INSIDE :annotation: .. autoattribute:: geosoft.gxapi.PLY_LINE_CLIP_INSIDE .. autodata:: geosoft.gxapi.PLY_LINE_CLIP_NO_INTERSECT :annotation: .. autoattribute:: geosoft.gxapi.PLY_LINE_CLIP_NO_INTERSECT .. autodata:: geosoft.gxapi.PLY_LINE_CLIP_OUTSIDE :annotation: .. autoattribute:: geosoft.gxapi.PLY_LINE_CLIP_OUTSIDE .. autodata:: geosoft.gxapi.PLY_LINE_CLIP_ERROR :annotation: .. autoattribute:: geosoft.gxapi.PLY_LINE_CLIP_ERROR <file_sep>/geosoft/gxpy/tests/test_metadata.py import os import unittest import geosoft.gxapi as gxapi import geosoft.gxpy.system as gsys import geosoft.gxpy.grid as gxgrd import geosoft.gxpy.metadata as gxmeta from base import GXPYTest class Test(GXPYTest): def test_meta(self): self.start() geometa = gxapi.GXMETA.create() folder, files = gsys.unzip(os.path.join(os.path.dirname(self._test_case_py), 'testgrids.zip'), folder=self._gx.temp_folder()) g1f = os.path.join(folder, 'test_grid_1.grd') with gxgrd.Grid.open(g1f) as g: g.gximg.get_meta(geometa) meta = gxmeta.Metadata(geometa) self.assertTrue(isinstance(meta.gxmeta, gxapi.GXMETA)) self.assertTrue(meta.has_node('Geosoft')) self.assertTrue(meta.has_node('Geosoft/Data')) self.assertTrue(meta.has_node('geosoft/data')) self.assertFalse(meta.has_node('Geosoft/maki')) self.assertTrue(meta.has_attribute('Geosoft/Data/boundary')) self.assertFalse(meta.has_attribute('Geosoft/Data/huh')) meta.node_token('Maki') self.assertTrue(meta.has_node('maki')) meta.node_token('maki/data/more') self.assertTrue(meta.has_node('/maki/data')) self.assertTrue(meta.has_node('/maki/data/more')) meta.node_token('maki/crazy/deep/stuff/is/here') self.assertTrue(meta.has_node('/maki/crazy')) self.assertTrue(meta.has_node('/maki/crazy/deep')) self.assertTrue(meta.has_node('/maki/crazy/deep/stuff/is/here')) meta.set_attribute('maki/crazy/hello/deep/stuff/is/here/deep_hello', 'hi there in a deep voice') meta.set_attribute('maki/crazy/crazy_hi', 'hi there') self.assertEqual(meta.get_attribute('maki/crazy/crazy_hi'), 'hi there') meta.set_attribute('maki/weirdo/nested/stuff/hello', 'hi there weirdo') self.assertTrue(meta.has_node('/maki/weirdo/nested/stuff')) self.assertEqual(meta.get_attribute('maki/weirdo/nested/stuff/hello'), 'hi there weirdo') self.assertEqual(meta.meta_dict()['Maki']['weirdo']['nested']['stuff']['hello'], 'hi there weirdo') meta.set_attribute('/maki/data/more/scale', 45) self.assertEqual(meta.get_attribute('/maki/data/more/scale'), 45) meta.set_attribute('/maki/data/more/unit_of_measure', 'cm') self.assertEqual(meta.get_attribute('/maki/data/more/unit_of_measure'), 'cm') meta.set_attribute('/maki/data/more/float', 4.995) self.assertEqual(meta.get_attribute('/maki/data/more/float'), 4.995) meta.set_attribute('/maki/data/more/array', ['a', 1, 4.95]) self.assertEqual(meta.get_attribute('/maki/data/more/array'), ['a', 1, 4.95]) meta.set_attribute('/maki/data/more/tuple', ('ian', 1, 2, 3.5)) self.assertEqual(meta.get_attribute('/maki/data/more/tuple'), ['ian', 1, 2, 3.5]) meta.set_attribute('/json/examples/dict', {'a': 25, 'b': (1, 2)}) d = meta.get_attribute('json/examples/dict') self.assertEqual(d['a'], 25) self.assertEqual(d['b'], [1, 2]) md = meta.meta_dict() self.assertTrue('Maki' in md) self.assertTrue('float' in md['Maki']['data']['more']) self.assertTrue('json' in md) def test_meta_dict(self): self.start() m = {} self.assertTrue(gxmeta.get_node_from_meta_dict('maki/rider', m) is None) self.assertTrue(gxmeta.get_node_from_meta_dict('', m) is None) gxmeta.set_node_in_meta_dict('geosoft', m, [1, 2, 3]) self.assertEqual(tuple(gxmeta.get_node_from_meta_dict('geosoft', m)), (1, 2, 3)) self.assertRaises(gxmeta.MetadataException, gxmeta.set_node_in_meta_dict, 'geosoft/dataset/sample/children', m, ('a', 1.8, 'b')) gxmeta.set_node_in_meta_dict('geosoft/dataset/sample/children', m, ('a', 1.8, 'b'), replace=True) self.assertEqual(gxmeta.get_node_from_meta_dict('geosoft/dataset/sample/children', m), ('a', 1.8, 'b')) def test_update_dict(self): self.start() meta = gxmeta.Metadata() meta.set_attribute('geosoft/array', [1, 2, 3]) self.assertEqual(tuple(meta.get_attribute('geosoft/array')), (1, 2, 3)) meta.update_dict({'maki': 'someone'}) self.assertEqual(tuple(meta.get_attribute('geosoft/array')), (1, 2, 3)) self.assertEqual(meta.get_attribute('maki'), 'someone') meta.update_dict({'maki': 'someone'}, trunk_node='geosoft') self.assertEqual(meta.get_attribute('geosoft/maki'), 'someone') meta.update_dict(meta.meta_dict(), trunk_node='bob') self.assertEqual(meta.get_attribute('bob/maki'), 'someone') self.assertEqual(meta.get_attribute('bob/geosoft/maki'), 'someone') meta.update_dict(meta.meta_dict(), trunk_node='geosoft/bobs/your/uncle') self.assertEqual(meta.get_attribute('geosoft/bobs/your/uncle/maki'), 'someone') self.assertEqual(tuple(meta.get_attribute('geosoft/bobs/your/uncle/geosoft/array')), (1, 2, 3)) ############################################################################################## if __name__ == '__main__': unittest.main()<file_sep>/docs/GXLTB.rst .. _GXLTB: GXLTB class ================================== .. autoclass:: geosoft.gxapi.GXLTB :members: .. _LTB_CASE: LTB_CASE constants ----------------------------------------------------------------------- Case handling of `GXLTB <geosoft.gxapi.GXLTB>` strings .. autodata:: geosoft.gxapi.LTB_CASE_INSENSITIVE :annotation: .. autoattribute:: geosoft.gxapi.LTB_CASE_INSENSITIVE .. autodata:: geosoft.gxapi.LTB_CASE_SENSITIVE :annotation: .. autoattribute:: geosoft.gxapi.LTB_CASE_SENSITIVE .. _LTB_CONLST: LTB_CONLST constants ----------------------------------------------------------------------- Matching types .. autodata:: geosoft.gxapi.LTB_CONLST_EXACT :annotation: .. autoattribute:: geosoft.gxapi.LTB_CONLST_EXACT .. autodata:: geosoft.gxapi.LTB_CONLST_ANY :annotation: .. autoattribute:: geosoft.gxapi.LTB_CONLST_ANY .. _LTB_DELIM: LTB_DELIM constants ----------------------------------------------------------------------- Types of `GXLTB <geosoft.gxapi.GXLTB>` Delimiters .. autodata:: geosoft.gxapi.LTB_DELIM_SPACE :annotation: .. autoattribute:: geosoft.gxapi.LTB_DELIM_SPACE .. autodata:: geosoft.gxapi.LTB_DELIM_COMMA :annotation: .. autoattribute:: geosoft.gxapi.LTB_DELIM_COMMA .. autodata:: geosoft.gxapi.LTB_DELIM_SPACECOMMA :annotation: .. autoattribute:: geosoft.gxapi.LTB_DELIM_SPACECOMMA .. _LTB_TYPE: LTB_TYPE constants ----------------------------------------------------------------------- Types of `GXLTB <geosoft.gxapi.GXLTB>` Headers .. autodata:: geosoft.gxapi.LTB_TYPE_HEADER :annotation: .. autoattribute:: geosoft.gxapi.LTB_TYPE_HEADER .. autodata:: geosoft.gxapi.LTB_TYPE_NOHEADER :annotation: .. autoattribute:: geosoft.gxapi.LTB_TYPE_NOHEADER <file_sep>/geosoft/gxpy/surface.py """ Geosoft surfaces :Classes: :`SurfaceDataset`: Geosoft_surface dataset, contains zero or more `Surface` instances :`Surface`: Surfaces defined by one or more `geosoft.gxpy.geometry.Mesh` instances :Constants: :MODE_READ: `geosoft.gxpy.spatialdata.MODE_READ` :MODE_READWRITE: `geosoft.gxpy.spatialdata.MODE_READWRITE` :MODE_NEW: `geosoft.gxpy.spatialdata.MODE_NEW` :MODE_APPEND: MODE_READWRITE :STYLE_FLAT: `geosoft.gxapi.SURFACERENDER_FILL` :STYLE_SMOOTH: `geosoft.gxapi.SURFACERENDER_SMOOTH` :STYLE_EDGE: `geosoft.gxapi.SURFACERENDER_EDGES` .. seealso:: `geosoft.gxpy.spatialdata`, `geosoft.gxapi.GXSURFACE`, `geosoft.gxapi.GXSURFACEITEM` .. note:: Regression tests provide usage examples: `Tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_surface.py>`_ """ import os import numpy as np from collections.abc import Sequence import geosoft import geosoft.gxapi as gxapi from . import gx from . import coordinate_system as gxcs from . import utility as gxu from . import spatialdata as gxspd from . import view as gxview from . import vox as gxvox from . import vv as gxvv from . import group as gxg from . import map as gxmap from . import geometry as gxgeo __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) class SurfaceException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.surface`. """ pass def _surface_file_name(name): ext = os.path.splitext(name)[1].lower() if ext == '.geosoft_surface': return name return name + '.geosoft_surface' def _surface_name(name): basename = os.path.basename(name) return os.path.splitext(basename)[0] def delete_files(surface_name): """ Delete all files associated with this surface dataset. :param surface_name: name of the surface dataset file .. versionadded:: 9.3.1 """ gxspd.delete_files(_surface_file_name(surface_name)) # constants MODE_READ = gxspd.MODE_READ MODE_READWRITE = gxspd.MODE_READWRITE MODE_NEW = gxspd.MODE_NEW MODE_APPEND = MODE_READWRITE STYLE_FLAT = gxapi.SURFACERENDER_FILL STYLE_SMOOTH = gxapi.SURFACERENDER_SMOOTH STYLE_EDGE = gxapi.SURFACERENDER_EDGES class SurfaceDataset(gxspd.SpatialData, Sequence): """ Surface dataset, which contains one or more `Surface` instances. A Surface dataset is stored in a .geosoft_surface file. Iterating yields `Surface` instances. :Constructors: ======================= ============================================ :meth:`open` open an existing surface dataset :meth:`new` create a new surface dataset :meth:`vox_surface` isosurfaces created from a vox. ======================= ============================================ .. versionadded:: 9.3.1 """ def _close(self): if hasattr(self, '_open'): if self._open: self._gxsurface = None self._surfaces = None super(SurfaceDataset, self)._close() pass def __init__(self, name, file_name=None, gxsurface=None, mode=None, overwrite=False): if file_name is None: file_name = _surface_file_name(name) self._file_name = file_name self._name = _surface_name(name) super().__init__(name=self._name, file_name=self._file_name, mode=mode, overwrite=overwrite, gxobj=gxsurface) self._gxsurface = gxsurface self._next = 0 self._surfaces = None self._new = False def __len__(self): return self.surface_count def __iter__(self): self._refresh_surfaces() return self def __next__(self): if self._next >= self.surface_count: self._next = 0 raise StopIteration else: surface = self.__getitem__(self._next) self._next += 1 return surface def __getitem__(self, item): if isinstance(item, int): item = self.surface_name_list[item] gxsurfaceitem = self._gxsurface.get_surface_item(self.surface_guid(item)) return Surface(gxsurfaceitem, surface_dataset=self, render_properties=None) @classmethod def open(cls, name, file_name=None, mode=MODE_READ, gxapi_surface=None): """ Open an existing surface dataset. :param name: name of the surface dataset. :param file_name: file name of the surface dataset file, default is name.geosoft_surface. :param mode: open mode: MODE_READ or MODE_READWRITE :param gxapi_surface: `geosoft.gxapi.GXSURFACE` instance, or None to open the named surface file. .. versionadded:: 9.3.1 """ if file_name is None: file_name = _surface_file_name(name) if gxapi_surface is None: gxapi_surface = gxapi.GXSURFACE.open(_surface_file_name(file_name), mode) surface_dataset = cls(name, file_name=file_name, gxsurface=gxapi_surface, mode=mode) surface_dataset._new = False return surface_dataset @classmethod def new(cls, name=None, temp=False, overwrite=False, coordinate_system=None): """ Create a new surface dataset. :param name: dataset name, or a path to a persistent file. If None a temporary dataset is created. :param temp: True to create a temporary surface dataset. :param overwrite: True to overwrite existing persistent surface dataset file :param coordinate_system: coordinate system as required to create from `geosoft.gxpy.Coordinate_system` .. versionadded:: 9.3.1 """ if name is None: temp = True if temp: file_name = gx.gx().temp_file('.geosoft_surface') overwrite = True if name is None: name = _surface_name(file_name) else: file_name = _surface_file_name(name) if os.path.exists(file_name) and not overwrite: raise SurfaceException(_t('\'{}\' exists. Use overwrite=True to overwrite existing surface dataset file.'). format(file_name)) gxsurface = gxapi.GXSURFACE.create(file_name, gxcs.Coordinate_system(coordinate_system).gxipj) surface_dataset = cls(name, file_name=file_name, gxsurface=gxsurface, mode=MODE_NEW) surface_dataset._new = True return surface_dataset @classmethod def vox_surface(cls, vox, surfaces, name=None, file_name=None, color=None, opacity=None, mode=MODE_NEW, temp=False, overwrite=False): """ Add voxel isosurfaces to a surface dataset. :param vox: `geosoft.gxpy.Vox` instance :param surfaces: surface value, or a list of surface values :param name: Surface dataset name. The default will be vox.name. :param file_name: optional file name if different from name root, ignored if temp=True :param color: surface color, or a list of colors, For a list of surfaces, the default colour of each surface cycles through a list of (C_GREY, C_GREEN, C_YELLOW, C_BLUE, C_MAGENTA, C_RED, C_CYAN). If only one surface the default color is `gxgroup.C_GREY`. :param opacity: opacity 0 t0 1. (1. is opaque), or a list of opacities. For a list of surfaces default opacity is applied in increasingly opaque steps in the order of the surface list, such that the 5'th and higher surfaces are opaque. :param mode: MODE_NEW to create a new surface dataset. MODE_APPEND to append to existing dataset. :param temp: True to create a temporary surface dataset. :param overwrite: True to overwrite if dataset exists and MODE_NEW. .. versionadded:: 9.3.1 """ if name is None: name = vox.name if temp: file_name = gx.gx().temp_file('.geosoft_surface') overwrite = True elif file_name is None: file_name = _surface_file_name(name) if mode == MODE_NEW: if os.path.exists(file_name) and not overwrite: raise SurfaceException(_t("Cannot overwrite existing surface dataset: {}").format(file_name)) gxspd.delete_files(file_name) if not hasattr(surfaces, '__iter__'): surfaces = (surfaces,) if color is None: color = (gxg.C_GREY, gxg.C_GREEN, gxg.C_YELLOW, gxg.C_BLUE, gxg.C_MAGENTA, gxg.C_RED, gxg.C_CYAN) elif not hasattr(color, '__iter__'): color = (color,) if opacity is None: opacity = [] max_transparent_surfaces = min(gxg.MAX_TRANSPARENT, len(surfaces)) for i in range(max_transparent_surfaces): opacity.append((i + 1) * (1. / max_transparent_surfaces)) elif not hasattr(opacity, '__iter__'): opacity = (opacity,) transparent_count = 0 # cannot have more than MAX_TRANSPARENT transparent surfaces with gxview.View_3d.new() as v3d: v3d_file = v3d.file_name for i in range(len(surfaces)): icolor = gxg.Color(color[i % len(color)]) trans = opacity[min(i, len(opacity) - 1)] if trans < 1.: if transparent_count > gxg.MAX_TRANSPARENT: trans = 1. else: transparent_count += 1 gxapi.GXMVU.plot_voxel_surface2(v3d.gxview, vox.gxvox, surfaces[i], icolor.int_value, 1., trans, file_name) gxview.delete_files(v3d_file) sd = SurfaceDataset.open(name, file_name=file_name) sd.unit_of_measure = vox.unit_of_measure return sd def _refresh_surfaces(self): if self._surfaces is None: gxlst = gxapi.GXLST.create(1024) self.gxsurface.get_surface_items(gxlst) self._surfaces = gxu.dict_from_lst(gxlst, ordered=True) @property def is_new(self): """True if this is a new surface dataset. Can only add to new datasets.""" return self._new @property def gxsurface(self): """`geosoft.gxapi.GXSURFACE` instance handle""" return self._gxsurface @property def surface_dict(self): """dictionary of surfaces keyed by GUID, values are the surface names""" self._refresh_surfaces() return self._surfaces @property def surface_name_list(self): """list of surface names""" return list(self.surface_dict.values()) @property def surface_count(self): """number of surfaces in the dataset""" return len(self.surface_dict) def surface_guid(self, name): """ Return the guid of a surface based on the name. :param name: Name of the surface. The first matching surface name is returned. :return: guid of the surface, or None if the surface not found .. versionadded:: 9.3.1 """ # just return the name if it is already a guid if name in self.surface_dict: return name name = name.lower() for guid, sname in self.surface_dict.items(): if sname.lower() == name: return guid return None def has_surface(self, name): """returns True if this surface name or guid exists in the surface dataset.""" if self.surface_guid(name) is None: return False return True def add_surface(self, surface): """ Add a surface to the surface dataset. One can only add surfaces to new datasets. :param surface: `Surface` instance to add .. versionadded:: 9.3.1 """ if not self.is_new: raise SurfaceException(_t('Cannot add new surfaces to an existing surface dataset.')) if self.has_surface(surface.name): raise SurfaceException(_t('Cannot overwrite existing surface {}').format(surface.name)) if surface.faces_count == 0: raise SurfaceException(_t('Cannot add an empty surface.')) if gxcs.is_known(surface.coordinate_system): if not gxcs.is_known(self.coordinate_system): self.coordinate_system = surface.coordinate_system elif surface.coordinate_system != self.coordinate_system: raise SurfaceException('Coordinate systems are not the same.') self._gxsurface.add_surface_item(surface.gxsurfaceitem) self._surfaces = None def add_surface_dataset(self, surface_dataset): """ Add the surfaces from an existing surface dataset. :param surface_dataset: `SurfaceDataset` instance or a file name .. versionadded:: 9.3.1 """ if isinstance(surface_dataset, str): surface_dataset = SurfaceDataset.open(surface_dataset) for s in surface_dataset: self.add_surface(s) def view_3d(self, file_name=None, overwrite=True, plane_2d=False): """ Create a 3d view (`geosoft.gxpy.view.View_3d`) that contains this `SurfaceDataset`. :param file_name: the name of a file for the 3d view. If None a temporary 3d view created. :param overwrite: True to overwrite existing file :param plane_2d: True to keep the 2D plane. Only keep it if you intend to draw on it otherwise a grey plane will appear in the view. .. versionadded:: 9.3 """ v3d = gxview.View_3d.new(file_name, overwrite=overwrite) render(v3d, self) if not plane_2d: v3d.delete_plane(0) return v3d def figure_map(self, file_name=None, overwrite=True, title=None, legend_label=None, features=('LEGEND', 'NEATLINE'), **kwargs): """ Create a figure view file from an SurfaceDataset. :param file_name: the name of a file for the 3d view. If None a temporary 3d view created. :param overwrite: True to overwrite existing file :param title: Title added to the image :param legend_label: If plotting a legend make this the legend title. The default is the unit_of_measure. :param features: list of features to place on the map, default is ('LEGEND', 'NEATLINE') =========== ========================================= 'LEGEND' draw a surface legend 'NEATLINE' draw a neat-line around the image =========== ========================================= :param kwargs: passed to `geosoft.gxpy.map.Map.new` .. versionadded:: 9.3 """ # uppercase features, use a dict so we pop things we use and report error if isinstance(features, str): features = (features,) feature_list = {} if features is not None: for f in features: feature_list[f.upper()] = None features = list(feature_list.keys()) # setup margins set_right_margin = False if not ('margins' in kwargs): bottom_margin = 1.0 if title: bottom_margin += len(title.split('\n')) * 1.0 kwargs['margins'] = (1, 0.4, bottom_margin, 1) set_right_margin = True gmap = gxmap.Map.figure((0, 0, 100, 100), file_name=file_name, overwrite=overwrite, features=[], title=title, **kwargs) leg_width = None if 'LEGEND' in features: with gxview.View.open(gmap, "data") as v: box = gxgeo.Point2(((0, 0), (0.8, 0.5))) * v.units_per_map_cm box += gxgeo.Point((v.extent_maximum_xy[0] + 0.2 * v.units_per_map_cm, v.centroid_xy[1])) text_height = box.dimension_xy[1] * 0.7 text_ref = gxgeo.Point((box.extent_maximum_xy[0] + 0.2 * v.units_per_map_cm, box.centroid_xy[1])) space = gxgeo.Point((0, -0.6 * v.units_per_map_cm)) leg_width = 0. with gxg.Draw(v, self.name + '_legend') as g: g.text_def = gxg.Text_def(height=text_height, color='K') for i in range(self.surface_count): surf = self[self.surface_count - i - 1] g.rectangle(box, pen=g.new_pen(line_thick=0.025 * v.units_per_map_cm, line_color='K', fill_color=surf.render_color)) g.text(surf.name, text_ref, reference=gxg.REF_CENTER_LEFT) textent = g.text_extent(surf.name) if textent.dimension_xy[0] > leg_width: leg_width = textent.dimension_xy[0] box -= space text_ref -= space leg_width += text_ref.extent_minimum_xy[0] - box.extent_minimum_xy[0] leg_width /= v.units_per_map_cm if 'NEATLINE' in features: if set_right_margin and leg_width: with gxview.View.open(gmap, "base") as v: leg_width *= v.units_per_map_cm area = gxgeo.Point2((v.extent.p0, v.extent.p1 + gxgeo.Point((leg_width, 0.)))) v.locate(area=area.extent_xy) gmap.surround() area = gxview.View.open(gmap, gmap.current_data_view).extent_map_cm() area = (area[0] * 10., area[1] * 10., area[2] * 10., area[3] * 10.) gmap.create_linked_3d_view(self.view_3d(), area_on_map=area) return gmap class Surface(gxspd.SpatialData, Sequence): """ A single surface, which contains one or more `geosoft.gxpy.geometry.Mesh` instances. :param surface: surface name or a `geosoft.gxapi.GXSURFACEITEM` instance. :param surface_type: surface type as a descriptive name, such as "ISOSURFACE" :param surface_dataset: optional `SurfaceDataset` instance in which to place a new `Surface` :param mesh: optional `geosoft.gxpy.geometry.Mesh` instance, or tuple (faces, verticies). :param coordinate_system: mesh coordinate system, which will become the surface coordinate system. :param render_properties: (color, opacity, style), default is (`geosoft.gxpy.group.C_GREY`, 1.0, `STYLE_FLAT`) Iterating yields component `geosoft.gxpy.geometry.Mesh` instances. .. versionadded:: 9.3.1 """ def __enter__(self): return self def __exit__(self, xtype, xvalue, xtraceback): self.__del__() def __del__(self): if hasattr(self, '_close'): self._close() def _close(self): if hasattr(self, '_open'): if self._open: if self._add and self._surface_dataset is not None: if gxcs.is_known(self._surface_dataset.coordinate_system): self.coordinate_system = self._surface_dataset.coordinate_system elif gxcs.is_known(self.coordinate_system): self._surface_dataset.coordinate_system = self.coordinate_system self._surface_dataset.add_surface(self) self._gxsurfaceitem = None self._surface_dataset = None self._cs = None super(Surface, self)._close() def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): return str((self.guid, self.name, self.surface_type)) def __init__(self, surface, surface_type='none', surface_dataset=None, mesh=None, coordinate_system=None, render_properties=None): if isinstance(surface, str): if surface_dataset and surface_dataset.has_surface(surface): raise SurfaceException(_t('Cannot overwrite existing surface ({}) in dataset ({}).') .format(surface, surface_dataset.name)) surface = gxapi.GXSURFACEITEM.create(surface_type, surface) self._new_surface = True self._add = True # always add new surfaces else: self._new_surface = False self._add = False self._gxsurfaceitem = surface self._surface_dataset = surface_dataset self._properties = None self._computed_properties = None self._next = 0 self.render_properties = render_properties super().__init__(gxobj=self._gxsurfaceitem) if coordinate_system is None and surface_dataset is not None: if gxcs.is_known(surface_dataset.coordinate_system): coordinate_system = surface_dataset.coordinate_system self.coordinate_system = coordinate_system self._extent = None if mesh: if not isinstance(mesh, gxgeo.Mesh): mesh = gxgeo.Mesh(mesh, name=self.name + '_0') self.add_mesh(mesh, render_properties=render_properties) elif self.faces_count: for m in self: self._update_extent(m.extent_xyz) def __len__(self): return self.component_count def __iter__(self): return self def __next__(self): if self._next >= self.component_count: self._next = 0 raise StopIteration else: item = self._next self._next += 1 return self.__getitem__(item) def __getitem__(self, item): return self.mesh(item) def _update_extent(self, ext): if self._extent is None: self._extent = ext else: self._extent = (min(self._extent[0], ext[0]), min(self._extent[1], ext[1]), min(self._extent[2], ext[2]), max(self._extent[3], ext[3]), max(self._extent[4], ext[4]), max(self._extent[5], ext[5])) def _get_mesh_vv(self, component=0): f1 = gxvv.GXvv(dtype=np.int) f2 = gxvv.GXvv(dtype=np.int) f3 = gxvv.GXvv(dtype=np.int) vx = gxvv.GXvv() vy = gxvv.GXvv() vz = gxvv.GXvv() self._gxsurfaceitem.get_mesh(component, vx.gxvv, vy.gxvv, vz.gxvv, f1.gxvv, f2.gxvv, f3.gxvv) return (f1, f2, f3), (vx, vy, vz) @property def extent(self): """ Return the surface extent as a `geosoft.gxpy.geometry.Point2` instance. .. versionadded:: 9.3.1 """ pmin = gxgeo.Point((self._extent[0], self._extent[1], self._extent[2])) pmax = gxgeo.Point((self._extent[3], self._extent[4], self._extent[5])) return gxgeo.Point2((pmin, pmax), self.coordinate_system) def properties(self, refresh=False): """ Surface properties from `geosoft.gxapi.GXSURFACEITEM.get_properties_ex`. :param refresh: if True, computed properties will be refreshed on next access. .. versionadded:: 9.3.1 """ if refresh: self._properties = None if not self._properties: stype = gxapi.str_ref() name = gxapi.str_ref() source_guid = gxapi.str_ref() source_name = gxapi.str_ref() source_measure = gxapi.float_ref() second_source_guid = gxapi.str_ref() second_source_name = gxapi.str_ref() second_source_option = gxapi.int_ref() second_source_measure = gxapi.float_ref() second_source_measure2 = gxapi.float_ref() self._gxsurfaceitem.get_properties_ex(stype, name, source_guid, source_name, source_measure, second_source_guid, second_source_name, second_source_option, second_source_measure, second_source_measure2) self._properties = {'type': stype.value, 'name': name.value, 'source_guid': source_guid.value, 'source_dataset': source_name.value, 'source_measure': source_measure.value, 'second_source_guid': second_source_guid.value, 'second_source_dataset': second_source_name.value, 'second_source_option': second_source_option.value, 'second_source_measure': second_source_measure.value, 'second_source_measure2': second_source_measure2.value} return self._properties def computed_properties(self, refresh=False): """ Surface properties, see: `geosoft.gxapi.GXSURFACEITEM.compute_extended_info`. :param refresh: if True, computed properties will be refreshed on next access. :returns: dictionary of properties, 'components', 'verticies', edges', 'triangles', 'inconsistent', 'invalid', 'intersect' .. versionadded:: 9.3.1 """ if refresh: self._computed_properties = None if not self._computed_properties: comp = gxapi.int_ref() vert = gxapi.int_ref() edge = gxapi.int_ref() trng = gxapi.int_ref() incn = gxapi.int_ref() invd = gxapi.int_ref() intr = gxapi.int_ref() self._gxsurfaceitem.compute_extended_info(comp, vert, edge, trng, incn, invd, intr) self._computed_properties = {'components': comp.value, 'verticies': vert.value, 'edges': edge.value, 'triangles': trng.value, 'inconsistent': incn.value, 'invalid': invd.value, 'intersect': intr.value} return self._computed_properties @property def gxsurfaceitem(self): """the `geosoft.gxapi.GXSURFACEITEM` instance""" return self._gxsurfaceitem @property def guid(self): """The GUID of this surface""" guid = gxapi.str_ref() self._gxsurfaceitem.get_guid(guid) return guid.value @property def name(self): """the name of this surface""" return self.properties()['name'] @property def surface_type(self): """the defined surface type string""" return self.properties()['type'] @property def source_dataset(self): """the source dataset from which this surface was derived""" return self.properties()['source_dataset'] @property def source_measure(self): """the source measure""" return self.properties()['source_measure'] @property def unit_of_measure(self): """the unit of measure for data defined by this surface, often the isosurface value""" if self._surface_dataset: return self._surface_dataset.unit_of_measure source = self.properties()['source_name'] if source: try: return gxvox.Vox.open(source).unit_of_measure except geosoft.gxapi.GXError: pass return '' @property def component_count(self): """number of components to this surface, usually 1""" return self._gxsurfaceitem.num_components() @property def verticies_count(self): """number of verticies""" vert = gxapi.int_ref() tri = gxapi.int_ref() self._gxsurfaceitem.get_geometry_info(vert, tri) return vert.value @property def faces_count(self): """number of triangular faces""" vert = gxapi.int_ref() tri = gxapi.int_ref() self._gxsurfaceitem.get_geometry_info(vert, tri) return tri.value @property def render_properties(self): """The rendering properties for this surface as (color, opacity, style). Can be set.""" color = gxapi.int_ref() trans = gxapi.float_ref() style = gxapi.int_ref() self._gxsurfaceitem.get_default_render_properties(color, trans, style) return gxg.Color(color.value), trans.value, style.value @render_properties.setter def render_properties(self, props): if self._new_surface: if props is None: props = (gxg.C_GREY, 1.0, STYLE_FLAT) c, t, s = props if isinstance(c, gxg.Color): c = c.int_value self._gxsurfaceitem.set_default_render_properties(c, t, s) @property def render_color(self): """rendering colour as a `geosoft.gxpy.group.Color` instance""" return self.render_properties[0] @render_color.setter def render_color(self, c): _, t, s = self.render_properties self.render_properties = (c, t, s) @property def render_opacity(self): """group opacity, 0.0 (transparent) to 1.0 (opaque)""" return self.render_properties[1] @render_opacity.setter def render_opacity(self, t): c, _, s = self.render_properties self.render_properties = (c, t, s) @property def render_style(self): """surface rendering style, one of STYLE constants""" return self.render_properties[2] @render_style.setter def render_style(self, s): c, t, _ = self.render_properties self.render_properties = (c, t, s) @property def metadata(self): """Return the parent surface dataset metadata as a dictionary.""" if self._surface_dataset: return self._surface_dataset.metadata else: return {} def mesh(self, component=0): """ Returns a component mesh as `geosoft.gxpy.geometry.Mesh` instance :param component: component number from a multi-component surface :return: (triangle_index_1, triangle_index_2, triangle_index_3), (vertex_x, vertex_y, vertex_z) as `geosoft.gxpy.vv.GXvv` instances .. versionadded:: 9.3.1 """ f1 = gxvv.GXvv(dtype=np.int) f2 = gxvv.GXvv(dtype=np.int) f3 = gxvv.GXvv(dtype=np.int) vx = gxvv.GXvv() vy = gxvv.GXvv() vz = gxvv.GXvv() self._gxsurfaceitem.get_mesh(component, vx.gxvv, vy.gxvv, vz.gxvv, f1.gxvv, f2.gxvv, f3.gxvv) return gxgeo.Mesh(((f1, f2, f3), (vx, vy, vz))) def add_mesh(self, mesh, render_properties=None, coordinate_system=None): """ Add a vv mesh to a new surface. :param mesh: mesh as ((f1vv, f2vv, f3vv), (xvv, yvv, zvv)) :param render_properties: (color, opacity, style), where colour is a `geosoft.gxpy.group.Color` instance or a 32-bit Geosoft color integer, opacity is a value between 0. (invisible) and 1. (opaque), and style is STYLE_FLAT, STYLE_SMOOTH or STYLE_EDGE. :param coordinate_system: coordinate system for the verticies, default is the same as the surface :returns: component number, which will always be the last component. .. versionadded:: 9.3 """ if not self._new_surface: raise SurfaceException(_t('Cannot add to an existing surface ({}) in surface dataset ({})') .format(self.name, self._surface_dataset.name)) if not isinstance(mesh, gxgeo.Mesh): mesh = gxgeo.Mesh(mesh) f1vv, f2vv, f3vv = mesh.faces_vv() xvv, yvv, zvv = mesh.verticies_vv() if coordinate_system is None: coordinate_system = self.coordinate_system if self._surface_dataset is not None: required_cs = self._surface_dataset.coordinate_system else: required_cs = self.coordinate_system if coordinate_system != required_cs: gxcs.Coordinate_translate(coordinate_system, required_cs).convert_vv(xvv, yvv, zvv) self._gxsurfaceitem.add_mesh(xvv.gxvv, yvv.gxvv, zvv.gxvv, f1vv.gxvv, f2vv.gxvv, f3vv.gxvv) self._add = True # extent self._update_extent(mesh.extent_xyz) if render_properties: self.render_properties = render_properties return self.component_count - 1 def render(view, surface, group_name=None, overwrite=False): """ Render a surface, surface dataset or surface dataset file in a 3D view. :param view: `geosoft.view.View_3d` instance :param surface: `Surface` instance, `SurfaceDataset` instance or a geosoft_surface file name. :param group_name: name for the group, which defaults to the source name :param overwrite: True to overwrite existing group .. note:: For a `Surface` or a `SurfaceDataset` instance a surface dataset file is created with a name constructed from the view name and the fies name: *view_name.group_name.geosoft_surface*. .. versionadded:: 9.3.1 """ if group_name is None: if isinstance(surface, str): group_name = _surface_name(surface) else: group_name = surface.name if view.has_group(group_name) and not overwrite: raise SurfaceException(_t('Cannot overwerwrite existing group: {}').format(group_name)) if not isinstance(surface, str): surface_dataset_file_name = _surface_file_name(view.file_name + '.' + surface.name) with SurfaceDataset.new(surface_dataset_file_name, overwrite=overwrite, coordinate_system=view.coordinate_system) as new_sd: if isinstance(surface, Surface): surface = (surface, ) for s in surface: new_sd.add_surface(s) surface = surface_dataset_file_name gxg.surface_group_from_file(view, surface, group_name=group_name, overwrite=overwrite) <file_sep>/docs/GXEUL3.rst .. _GXEUL3: GXEUL3 class ================================== .. autoclass:: geosoft.gxapi.GXEUL3 :members: .. _EUL3_RESULT: EUL3_RESULT constants ----------------------------------------------------------------------- Euler result types .. autodata:: geosoft.gxapi.EUL3_RESULT_X :annotation: .. autoattribute:: geosoft.gxapi.EUL3_RESULT_X .. autodata:: geosoft.gxapi.EUL3_RESULT_Y :annotation: .. autoattribute:: geosoft.gxapi.EUL3_RESULT_Y .. autodata:: geosoft.gxapi.EUL3_RESULT_DEPTH :annotation: .. autoattribute:: geosoft.gxapi.EUL3_RESULT_DEPTH .. autodata:: geosoft.gxapi.EUL3_RESULT_BACKGROUND :annotation: .. autoattribute:: geosoft.gxapi.EUL3_RESULT_BACKGROUND .. autodata:: geosoft.gxapi.EUL3_RESULT_DEPTHERROR :annotation: .. autoattribute:: geosoft.gxapi.EUL3_RESULT_DEPTHERROR .. autodata:: geosoft.gxapi.EUL3_RESULT_LOCATIONERROR :annotation: .. autoattribute:: geosoft.gxapi.EUL3_RESULT_LOCATIONERROR .. autodata:: geosoft.gxapi.EUL3_RESULT_WINDOWX :annotation: .. autoattribute:: geosoft.gxapi.EUL3_RESULT_WINDOWX .. autodata:: geosoft.gxapi.EUL3_RESULT_WINDOWY :annotation: .. autoattribute:: geosoft.gxapi.EUL3_RESULT_WINDOWY <file_sep>/geosoft/gxapi/GXGUI.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXGUI(gxapi_cy.WrapGUI): """ GXGUI class. These are graphical functions that typically create a dialog-style window for a specific function. Examples include file import wizards, and the Histogram and Scatter tools. """ def __init__(self, handle=0): super(GXGUI, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXGUI <geosoft.gxapi.GXGUI>` :returns: A null `GXGUI <geosoft.gxapi.GXGUI>` :rtype: GXGUI """ return GXGUI() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create_wnd_from_hwnd(cls, p1): """ Create a standard WND object from an HWND. :param p1: HWND Handle :type p1: int :returns: x - WND object created :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The object returned must be destroyed by the destroy object call. """ ret_val = gxapi_cy.WrapGUI._create_wnd_from_hwnd(GXContext._get_tls_geo(), p1) return ret_val @classmethod def get_parent_wnd(cls): """ Get the current parent window :returns: Parent window. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapGUI._get_parent_wnd(GXContext._get_tls_geo()) return ret_val @classmethod def get_printer_lst(cls, lst): """ Gets a list of all printers. :param lst: List to place into :type lst: GXLST .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._get_printer_lst(GXContext._get_tls_geo(), lst) @classmethod def get_window_state(cls): """ Retrieve the current state of the Oasis montaj window :returns: :ref:`WINDOW_STATE` :rtype: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapGUI._get_window_state(GXContext._get_tls_geo()) return ret_val @classmethod def set_window_state(cls, state): """ Changes the state of the Oasis montaj window :param state: :ref:`WINDOW_STATE` :type state: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._set_window_state(GXContext._get_tls_geo(), state) @classmethod def get_window_position(cls, left, top, right, bottom, state): """ Get the Oasis montaj window's position state :param left: Window left position :param top: Window top position :param right: Window right position :param bottom: Window bottom position :param state: Window state :ref:`WINDOW_STATE` :type left: int_ref :type top: int_ref :type right: int_ref :type bottom: int_ref :type state: int_ref .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ left.value, top.value, right.value, bottom.value, state.value = gxapi_cy.WrapGUI._get_window_position(GXContext._get_tls_geo(), left.value, top.value, right.value, bottom.value, state.value) @classmethod def set_window_position(cls, left, top, right, bottom, state): """ Get the Oasis montaj window's position and state :param left: Window left position :param top: Window top position :param right: Window right position :param bottom: Window bottom position :param state: Window state :ref:`WINDOW_STATE` :type left: int :type top: int :type right: int :type bottom: int :type state: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._set_window_position(GXContext._get_tls_geo(), left, top, right, bottom, state) @classmethod def get_client_window_area(cls, min_x, min_y, max_x, max_y): """ Get the location of the Oasis montaj client window. :param min_x: X Min returned (0) :param min_y: Y Min returned (0) :param max_x: X Max returned (width) :param max_y: Y Max returned (height) :type min_x: int_ref :type min_y: int_ref :type max_x: int_ref :type max_y: int_ref .. versionadded:: 9.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Returns the coordinates of the client window area (where MDI document windows are placed). The returned coordinates are 0,0 for the minimum X and Y and the window width width and height for the maximum X and Y. """ min_x.value, min_y.value, max_x.value, max_y.value = gxapi_cy.WrapGUI._get_client_window_area(GXContext._get_tls_geo(), min_x.value, min_y.value, max_x.value, max_y.value) @classmethod def grid_stat_hist(cls, grid_name): """ Display Histogram of grid :param grid_name: Name of the grid to get stats from :type grid_name: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._grid_stat_hist(GXContext._get_tls_geo(), grid_name.encode()) @classmethod def grid_stat_hist5(cls, grid_name_1, grid_name_2, grid_name_3, grid_name_4, grid_name_5): """ Display Histogram of up to 5 different grids :param grid_name_1: Name of the first grid to get stats from (required) :param grid_name_2: Optional name of the second grid to get stats from :param grid_name_3: Optional name of the third grid to get stats from :param grid_name_4: Optional name of the fourth grid to get stats from :param grid_name_5: Optional name of the fifth grid to get stats from :type grid_name_1: str :type grid_name_2: str :type grid_name_3: str :type grid_name_4: str :type grid_name_5: str .. versionadded:: 2021.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._grid_stat_hist5(GXContext._get_tls_geo(), grid_name_1.encode(), grid_name_2.encode(), grid_name_3.encode(), grid_name_4.encode(), grid_name_5.encode()) @classmethod def voxel_stat_hist(cls, vox_name): """ Display Histogram of Voxel :param vox_name: Name of the Voxel to get stats from :type vox_name: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._voxel_stat_hist(GXContext._get_tls_geo(), vox_name.encode()) @classmethod def color_form(cls, col, no_col): """ Select a color. :param col: Color (modified) :param no_col: Ask about `C_TRANSPARENT <geosoft.gxapi.C_TRANSPARENT>` if white is selected (1: yes, 0: no)? :type col: int_ref :type no_col: int :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Color value is set on input, and new value returned. If the input color type is `C_TRANSPARENT <geosoft.gxapi.C_TRANSPARENT>`, then the color is set to white, if any other type is input the output is guaranteed to be of the same type. If ``no_col`` is 1, then on exit, if white is selected, the user is prompted: 'Do you want white (Yes) or "None" (No) ?' and the color is converted as requested. If this is not the case, the `C_TRANSPARENT <geosoft.gxapi.C_TRANSPARENT>` is converted to white (if "Ok" is selected) and no choice is offered. """ ret_val, col.value = gxapi_cy.WrapGUI._color_form(GXContext._get_tls_geo(), col.value, no_col) return ret_val @classmethod def color_transform(cls, itr, st): """ Define an `GXITR <geosoft.gxapi.GXITR>` of up to 8 zones. :param itr: `GXITR <geosoft.gxapi.GXITR>` object (modified) :param st: `GXST <geosoft.gxapi.GXST>` object (input) :type itr: GXITR :type st: GXST :returns: 0 if OK 1 if user cancels :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The statistics object is required in order to determine data ranges, percentiles, etc. Create it using `GXST.create_exact <geosoft.gxapi.GXST.create_exact>`, or be sure to enable histogram statistics. """ ret_val = gxapi_cy.WrapGUI._color_transform(GXContext._get_tls_geo(), itr, st) return ret_val @classmethod def coord_sys_wizard(cls, ipj, editable, mode, source_label, source): """ Launch the coordinate system definition/display `GXGUI <geosoft.gxapi.GXGUI>`. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` object :param editable: Editable `GXIPJ <geosoft.gxapi.GXIPJ>` (0:No, 1:Yes) :param mode: :ref:`COORDSYS_MODE` :param source_label: Data source label :param source: Data source :type ipj: GXIPJ :type editable: int :type mode: int :type source_label: str :type source: str :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Launches the new GX.Net single-dialog coordinate system definition dialog. The input `GXIPJ <geosoft.gxapi.GXIPJ>` is modified on return if OK is selected (and the editable parameter is 1). The "Data source label" and "Data source" is information displayed in the dialog for the user to know where the `GXIPJ <geosoft.gxapi.GXIPJ>` came from (e.g. "Grid: X.grd") """ ret_val = gxapi_cy.WrapGUI._coord_sys_wizard(GXContext._get_tls_geo(), ipj, editable, mode, source_label.encode(), source.encode()) return ret_val @classmethod def coord_sys_wizard_licensed(cls, ipj, editable, mode, source_label, source): """ Launch the coordinate system definition/display `GXGUI <geosoft.gxapi.GXGUI>`. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` object :param editable: Editable `GXIPJ <geosoft.gxapi.GXIPJ>` (0:No, 1:Yes) :param mode: :ref:`COORDSYS_MODE` :param source_label: Data source label :param source: Data source :type ipj: GXIPJ :type editable: int :type mode: int :type source_label: str :type source: str :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Same as `coord_sys_wizard_licensed <geosoft.gxapi.GXGUI.coord_sys_wizard_licensed>` but will always be editable. The other method is not editable in the viewer while this one is. """ ret_val = gxapi_cy.WrapGUI._coord_sys_wizard_licensed(GXContext._get_tls_geo(), ipj, editable, mode, source_label.encode(), source.encode()) return ret_val @classmethod def coord_sys_wizard_grid(cls, ipj, target_ipj, editable, mode, source_label, source, nx, ny, x0, y0, dx, dy, rot): """ Launch the coordinate system definition/display `GXGUI <geosoft.gxapi.GXGUI>`. :param ipj: Original grid `GXIPJ <geosoft.gxapi.GXIPJ>` object :param target_ipj: Source (target) grid `GXIPJ <geosoft.gxapi.GXIPJ>` object. This is supplied so the modified orientation can be calculated and displayed. :param editable: Editable `GXIPJ <geosoft.gxapi.GXIPJ>` (0:No, 1:Yes) :param mode: :ref:`COORDSYS_MODE` :param source_label: Data source label :param source: Data source :param nx: Number of cells in X :param ny: Number of cells in Y :param x0: Grid orgin X (grid's own coordinate system) :param y0: Grid orgin Y (grid's own coordinate system) :param dx: Grid cell size X :param dy: Grid cell size Y :param rot: Grid rotation angle (degrees CCW) :type ipj: GXIPJ :type target_ipj: GXIPJ :type editable: int :type mode: int :type source_label: str :type source: str :type nx: int :type ny: int :type x0: float_ref :type y0: float_ref :type dx: float_ref :type dy: float_ref :type rot: float_ref :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Same as `coord_sys_wizard_licensed <geosoft.gxapi.GXGUI.coord_sys_wizard_licensed>` but allows the original grid info to be adjusted when projections on section or oriented plan grids are modified. In the tool, it is the "modified" orientation required to keep the edited projection's grid in the same location as it was in the target projection. """ ret_val, x0.value, y0.value, dx.value, dy.value, rot.value = gxapi_cy.WrapGUI._coord_sys_wizard_grid(GXContext._get_tls_geo(), ipj, target_ipj, editable, mode, source_label.encode(), source.encode(), nx, ny, x0.value, y0.value, dx.value, dy.value, rot.value) return ret_val @classmethod def database_type(cls, name, type): """ Returns the type string of an external DAO database. :param name: File Name :param type: Database type (returned) :type name: str :type type: str_ref :returns: 0 - OK -1 - Cancel terminates on error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the file extension is "mdb", then an MSJET (Microsoft Access) database is assumed. If the file name is "ODBC", then "ODBC" is returned as the type. Otherwise, a dialog appears listing the other valid DAO database types. """ ret_val, type.value = gxapi_cy.WrapGUI._database_type(GXContext._get_tls_geo(), name.encode(), type.value.encode()) return ret_val @classmethod def datamine_type(cls, file, type): """ Returns the type of a Datamine file. :param file: File Name (for display purposes only) :type file: str :type type: int_ref :returns: 0 - OK -1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Often, a Datamine file can be opened a number of different ways (e.g. as a string file or a as wireframe (point) file. The following function checks to see if there is a choice to be made between types supported by Geosoft for import. If not, it just returns the original type "hint" from Datamine. If there is a choice, it puts up a dialog with the choices for the user to pick from. Do a bit-wise AND with the returned type to determine the file type (or the type selected). Currently supported overlapping types/choices: dmString dmWireframePoint """ ret_val, type.value = gxapi_cy.WrapGUI._datamine_type(GXContext._get_tls_geo(), file.encode(), type.value) return ret_val @classmethod def export_xyz_template_editor(cls, db, templ, size): """ Allows the user to edit XYZ export template using a complex dialog. The Template name may change during editing. :param db: Database :param templ: Name of the Template (can change) :param size: Size of the Template :type db: GXDB :type templ: str :type size: int :returns: 0 - OK 1 - Error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Only uses the current `GXDB <geosoft.gxapi.GXDB>`. This function does not exactly work as supposed to. Instead of using the `GXEDB <geosoft.gxapi.GXEDB>` handle passed to it, it only will use the current `GXDB <geosoft.gxapi.GXDB>`. Please see ExportXYXTemplateEditorEx_GUI for an updated function. """ ret_val = gxapi_cy.WrapGUI._export_xyz_template_editor(GXContext._get_tls_geo(), db, templ.encode(), size) return ret_val @classmethod def export_xyz_template_editor_ex(cls, edb, templ): """ Allows the user to edit an XYZ export template using a complex dialog. The template name may change during editing. :param edb: `GXEDB <geosoft.gxapi.GXEDB>` object :param templ: Template name :type edb: GXEDB :type templ: str_ref :returns: 0 - OK 1 - Error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val, templ.value = gxapi_cy.WrapGUI._export_xyz_template_editor_ex(GXContext._get_tls_geo(), edb, templ.value.encode()) return ret_val @classmethod def file_filter_index(cls, filter): """ Return the FILE_FILTER_XXX value for a file filter string. :param filter: Input filter string :type filter: str :returns: :ref:`FILE_FILTER`, -1 if not found :rtype: int .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** For example, if "Database (``*.gdb``)" is input, then the `FILE_FILTER_GDB <geosoft.gxapi.FILE_FILTER_GDB>` value is returned. """ ret_val = gxapi_cy.WrapGUI._file_filter_index(GXContext._get_tls_geo(), filter.encode()) return ret_val @classmethod def gcs_datum_warning_shp(cls, data_source, ipj): """ Launch the GCS Datum Warning dialog for `GXSHP <geosoft.gxapi.GXSHP>` files. :param data_source: Data source :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` object :type data_source: str :type ipj: GXIPJ :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Runs the GCS Warning dialog with one data source """ ret_val = gxapi_cy.WrapGUI._gcs_datum_warning_shp(GXContext._get_tls_geo(), data_source.encode(), ipj) return ret_val @classmethod def gcs_datum_warning_shpdb_ex(cls, source_lst, datum_from_lst, ldtlst, db): """ Launch the GCS Datum Warning dialog for `GXSHP <geosoft.gxapi.GXSHP>` files (Database). :param source_lst: Data source names :param datum_from_lst: Corresponding datum names :param ldtlst: Returned corresponding LDT names :type source_lst: GXLST :type datum_from_lst: GXLST :type ldtlst: GXLST :type db: GXDB :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Runs the GCS Warning dialog with multiple data sources (Database) """ ret_val = gxapi_cy.WrapGUI._gcs_datum_warning_shpdb_ex(GXContext._get_tls_geo(), source_lst, datum_from_lst, ldtlst, db) return ret_val @classmethod def gcs_datum_warning_shp_ex(cls, source_lst, datum_from_lst, ldtlst, mview): """ Launch the GCS Datum Warning dialog for `GXSHP <geosoft.gxapi.GXSHP>` files. :param source_lst: Data source names :param datum_from_lst: Corresponding datum names :param ldtlst: Returned corresponding LDT names :type source_lst: GXLST :type datum_from_lst: GXLST :type ldtlst: GXLST :type mview: GXMVIEW :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 7.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Runs the GCS Warning dialog with multiple data sources """ ret_val = gxapi_cy.WrapGUI._gcs_datum_warning_shp_ex(GXContext._get_tls_geo(), source_lst, datum_from_lst, ldtlst, mview) return ret_val @classmethod def get_area_of_interest(cls, min_x, min_y, max_x, max_y, ply, ipj): """ Get the current area of interest from the application. :param min_x: AOI Area Min X :param min_y: AOI Area Min Y :param max_x: AOI Area Max X :param max_y: AOI Area Max y :param ply: AOI Bounding `GXPLY <geosoft.gxapi.GXPLY>` (Filled if available, otherwise empty) :param ipj: AOI Bounding `GXIPJ <geosoft.gxapi.GXIPJ>` :type min_x: float_ref :type min_y: float_ref :type max_x: float_ref :type max_y: float_ref :type ply: GXPLY :type ipj: GXIPJ :returns: :ref:`AOI_RETURN_STATE` :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Depending on what is currently visible on screen and the defined coordinate system the user may be prompted by a warning and optionaly cancel the process. """ ret_val, min_x.value, min_y.value, max_x.value, max_y.value = gxapi_cy.WrapGUI._get_area_of_interest(GXContext._get_tls_geo(), min_x.value, min_y.value, max_x.value, max_y.value, ply, ipj) return ret_val @classmethod def get_area_of_interest_3d(cls, min_x, min_y, min_z, max_x, max_y, max_z, ply, ipj): """ Get the current area of interest from the application in 3D. :param min_x: AOI Area Min X :param min_y: AOI Area Min Y :param min_z: AOI Area Min Z :param max_x: AOI Area Max X :param max_y: AOI Area Max y :param max_z: AOI Area Max Z :param ply: AOI Bounding `GXPLY <geosoft.gxapi.GXPLY>` (Filled if available, otherwise empty) :param ipj: AOI Bounding `GXIPJ <geosoft.gxapi.GXIPJ>` :type min_x: float_ref :type min_y: float_ref :type min_z: float_ref :type max_x: float_ref :type max_y: float_ref :type max_z: float_ref :type ply: GXPLY :type ipj: GXIPJ :returns: :ref:`AOI_RETURN_STATE` :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Depending on what is currently visible on screen and the defined coordinate system the user may be prompted by a warning and optionaly cancel the process. """ ret_val, min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value = gxapi_cy.WrapGUI._get_area_of_interest_3d(GXContext._get_tls_geo(), min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value, ply, ipj) return ret_val @classmethod def get_dat_defaults(cls, flags, open, ext, qual): """ Return the user default extension and qualifier for grids/images. :param flags: :ref:`DAT_TYPE` :param open: :ref:`FILE_FORM` :param ext: Returned default extension (e.g. "grd") :param qual: Returned default qualifier (e.g. "GRD") :type flags: int :type open: int :type ext: str_ref :type qual: str_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The default grid/image filters are normally stored in "MONTAJ.DEFAULT_XGD_IN" and "MONTAJ.DEFAULT_XGD_OUT" If no filter is defined, or the filter is not found then "grd" and "GRD" are returned as the default extension and qualifier. """ ext.value, qual.value = gxapi_cy.WrapGUI._get_dat_defaults(GXContext._get_tls_geo(), flags, open, ext.value.encode(), qual.value.encode()) @classmethod def get_file_filter(cls, file_filter, filter, mask, ext, path): """ Return the defined filter, mask, extension and directory for an input filter. :param file_filter: :ref:`FILE_FILTER` :param filter: Returned file filter string :param mask: Returned file mask string :param ext: Returned file extension :param path: :ref:`GS_DIRECTORY` Returned directory. :type file_filter: int :type filter: str_ref :type mask: str_ref :type ext: str_ref :type path: int_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Returns the four parts of the file filter; e.g. for `FILE_FILTER_GDB <geosoft.gxapi.FILE_FILTER_GDB>` it returns: Filter: "Database (``*.gdb``)" Mask: "``*.gdb``" Extension: "gdb" Directory: "`GS_DIRECTORY_NONE <geosoft.gxapi.GS_DIRECTORY_NONE>`" This function is useful for constuction open/save dialog file filters, especially in GX.Net functions. """ filter.value, mask.value, ext.value, path.value = gxapi_cy.WrapGUI._get_file_filter(GXContext._get_tls_geo(), file_filter, filter.value.encode(), mask.value.encode(), ext.value.encode(), path.value) @classmethod def get_gs_directory(cls, path, dir): """ Return the directory path for value of :ref:`GS_DIRECTORY`. :param path: :ref:`GS_DIRECTORY` Returned directory. :param dir: Returned directory path :type path: int :type dir: str_ref .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Works along with the `get_file_filter <geosoft.gxapi.GXGUI.get_file_filter>` function. Note that most values of FILE_FILTER_XXX will return `GS_DIRECTORY_NONE <geosoft.gxapi.GS_DIRECTORY_NONE>`, and give the current workspace directory. This function is useful for constuction open/save dialog file filters, especially in GX.Net functions. """ dir.value = gxapi_cy.WrapGUI._get_gs_directory(GXContext._get_tls_geo(), path, dir.value.encode()) @classmethod def browse_dir(cls, title, default, dir_path): """ Browses for a specific directory. :param title: Title of the Form :param default: Default path (Can be "") :param dir_path: Result Path Buffer (default on input) :type title: str :type default: str :type dir_path: str_ref :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val, dir_path.value = gxapi_cy.WrapGUI._browse_dir(GXContext._get_tls_geo(), title.encode(), default.encode(), dir_path.value.encode()) return ret_val @classmethod def color_transform_ex(cls, itr, st, zones, load_save, file): """ Define an `GXITR <geosoft.gxapi.GXITR>` of up to 12 zones, with file load/save buttons. :param itr: `GXITR <geosoft.gxapi.GXITR>` object (modified) :param st: `GXST <geosoft.gxapi.GXST>` object (input) :param zones: Max number of zones (8 or 12) :param load_save: Show file load/save buttons (TRUE or FALSE)? :param file: Default color transform file name :type itr: GXITR :type st: GXST :type zones: int :type load_save: int :type file: str_ref :returns: 0 if OK 1 if user cancels :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The statistics object is required in order to determine data ranges, percentiles, etc. Create it using `GXST.create_exact <geosoft.gxapi.GXST.create_exact>`, or be sure to enable histogram statistics. The color transform file name is used as the default when the save button is pushed, and is updated both after the load and save buttons are pushed by the value input or selected by the user. """ ret_val, file.value = gxapi_cy.WrapGUI._color_transform_ex(GXContext._get_tls_geo(), itr, st, zones, load_save, file.value.encode()) return ret_val @classmethod def cumulative_percent(cls, file, itr): """ Define a percent-based `GXITR <geosoft.gxapi.GXITR>` of up to 12 zones. :param file: Default color transform file name :param itr: `GXITR <geosoft.gxapi.GXITR>` object (returned) :type file: str_ref :type itr: GXITR :returns: 0 if OK 1 if user cancels :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The `GXITR <geosoft.gxapi.GXITR>` values are interpreted as cumulative percent values, using the "PERCENT=1" value in the `GXITR <geosoft.gxapi.GXITR>`'s `GXREG <geosoft.gxapi.GXREG>`. Note that processes using ITRs do not automatically know to convert between percent values and "actual" data values. The `GXREG <geosoft.gxapi.GXREG>` "PERCENT" value is simply a flag to indicate to a user that the values are intended to be in the range from 0 < x < 100. The `GXITR <geosoft.gxapi.GXITR>` should not, therefore, be applied directly to data unless that data is already given in percent. If the file name is defined on input, the initial `GXITR <geosoft.gxapi.GXITR>` will be loaded from it. If it is left blank, a default 5-color transform with The color transform file name is used as the default when the save button is pushed, and is updated both after the load and save buttons are pushed by the value input or selected by the user. """ ret_val, file.value = gxapi_cy.WrapGUI._cumulative_percent(GXContext._get_tls_geo(), file.value.encode(), itr) return ret_val @classmethod def dat_file_form(cls, title, default, psz_file_path, type, validation_type, multi): """ Grid and Image file Open/Save Form for Multiple/Single file selections :param title: Title of the Form :param default: Default value :param psz_file_path: Where the file name(s) is returned :param type: :ref:`DAT_TYPE` :param validation_type: :ref:`FILE_FORM` :param multi: Allow Multiple file selections = TRUE Single file selections = FALSE :type title: str :type default: str :type psz_file_path: str_ref :type type: int :type validation_type: int :type multi: int :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Remember to make the string size big enough for multiple file selections. In the case of multiple selections the names will be separated by a semicolon and only the first file will contain the full path. When using the multiple flag on any of these functions please be aware that the string returned will be in the format: drive:\\path1\\path2\\name.grid|name2.grid|name3.grid(QUALIFIERS) All grids are required to be of the same type. """ ret_val, psz_file_path.value = gxapi_cy.WrapGUI._dat_file_form(GXContext._get_tls_geo(), title.encode(), default.encode(), psz_file_path.value.encode(), type, validation_type, multi) return ret_val @classmethod def dat_file_form_ex(cls, title, default, psz_file_path, type, validation_type, multi, sort): """ Grid and Image file Open/Save Form for Multiple/Single file selections and optional filter list sorting. :param title: Title of the Form :param default: Default value :param psz_file_path: Where the file name(s) is returned :param type: :ref:`DAT_TYPE` :param validation_type: :ref:`FILE_FORM` :param multi: Allow Multiple file selections = TRUE Single file selections = FALSE :param sort: Sort file filter list = TRUE Maintain default filter list sorting = FALSE :type title: str :type default: str :type psz_file_path: str_ref :type type: int :type validation_type: int :type multi: int :type sort: int :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 9.9 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Remember to make the string size big enough for multiple file selections. In the case of multiple selections the names will be separated by a semicolon and only the first file will contain the full path. When using the multiple flag on any of these functions please be aware that the string returned will be in the format: drive:\\path1\\path2\\name.grid|name2.grid|name3.grid(QUALIFIERS) All grids are required to be of the same type. """ ret_val, psz_file_path.value = gxapi_cy.WrapGUI._dat_file_form_ex(GXContext._get_tls_geo(), title.encode(), default.encode(), psz_file_path.value.encode(), type, validation_type, multi, sort) return ret_val @classmethod def gen_file_form(cls, title, filt_vv, filter, default, file_path, type, multi): """ General file Open/Save Form for Multiple/Single file selections and multiple filter capability :param title: Title of the Form :param filt_vv: INT `GXVV <geosoft.gxapi.GXVV>` of file filters to use :ref:`FILE_FILTER` The first one is default, can pass (`GXVV <geosoft.gxapi.GXVV>`) 0 for to use next parameter. :param filter: :ref:`FILE_FILTER` (ignored if parameter above is not zero) :param default: Default value :param file_path: Where the file name(s) is returned :param type: :ref:`FILE_FORM` :param multi: Allow Multiple file selections = TRUE Single file selections = FALSE :type title: str :type filt_vv: GXVV :type filter: int :type default: str :type file_path: str_ref :type type: int :type multi: int :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Remember to make the string size big enough for multiple file selections. In the case of multiple selections the names will be separated by a semicolon and only the first file will contain the full path. Defined Functions The following four functions are handy defines and simply pass the appropriate parameter. iFileOpen_GUI iFileSave_GUI iMultiFileOpen_GUI iMultiFileSave_GUI """ ret_val, file_path.value = gxapi_cy.WrapGUI._gen_file_form(GXContext._get_tls_geo(), title.encode(), filt_vv, filter, default.encode(), file_path.value.encode(), type, multi) return ret_val @classmethod def custom_file_form(cls, title, filter, default, file_path, type, multi): """ General file Open/Save Form for Multiple/Single file selections and custom filter capability :param title: Title of the Form :param filter: Custom filter. :param default: Default value :param file_path: Where the file name(s) is returned :param type: :ref:`FILE_FORM` :param multi: Allow Multiple file selections = TRUE Single file selections = FALSE :type title: str :type filter: str :type default: str :type file_path: str_ref :type type: int :type multi: int :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 9.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Remember to make the string size big enough for multiple file selections. In the case of multiple selections the names will be separated by a semicolon and only the first file will contain the full path. """ ret_val, file_path.value = gxapi_cy.WrapGUI._custom_file_form(GXContext._get_tls_geo(), title.encode(), filter.encode(), default.encode(), file_path.value.encode(), type, multi) return ret_val @classmethod def import_drill_database_ado2(cls, connect, temp, table, type, reg): """ Same as `import_drill_database_ado <geosoft.gxapi.GXGUI.import_drill_database_ado>`, but template name is returned. :param connect: External database connection string (Blank for OLEDB Wizard) :param temp: Template to make (if left blank, the created template name is returned) :param table: Name of table :param type: Type of import returned :ref:`DH_DATA` :param reg: Drill Hole Object `GXREG <geosoft.gxapi.GXREG>` handle :type connect: str :type temp: str_ref :type table: str_ref :type type: int_ref :type reg: GXREG :returns: 0 - OK -1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If it is not defined on input, the template name is set to be the Wholeplot table name; e.g. "HOLESURVEY.i4" for "Project_HOLESURVEY" """ ret_val, temp.value, table.value, type.value = gxapi_cy.WrapGUI._import_drill_database_ado2(GXContext._get_tls_geo(), connect.encode(), temp.value.encode(), table.value.encode(), type.value, reg) return ret_val @classmethod def import_drill_database_esri(cls, connect, temp, table, type, geochem, reg): """ Same as iImportDrillDatabaseADO2_GUI, but from an ArcGIS Geodatabase :param connect: External database connection string (e.g. "d:\\Personal\\test.mdb|Table" or "d:\\File\\test.gdb|TableX|FeatureClassY)" :param temp: Template to make (if left blank, the created template name is returned) :param table: Name of table :param type: Type of import returned :ref:`DH_DATA` :param geochem: Geosoft Geochemistry Database? :param reg: Drill Hole Object `GXREG <geosoft.gxapi.GXREG>` handle :type connect: str :type temp: str_ref :type table: str_ref :type type: int_ref :type geochem: bool :type reg: GXREG :returns: 0 - OK -1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If it is not defined on input, the template name is set to be the Wholeplot table name; e.g. "HOLESURVEY.i4" for "Project_HOLESURVEY" """ ret_val, temp.value, table.value, type.value = gxapi_cy.WrapGUI._import_drill_database_esri(GXContext._get_tls_geo(), connect.encode(), temp.value.encode(), table.value.encode(), type.value, geochem, reg) return ret_val @classmethod def import_drill_database_odbc(cls, connect, temp, table, type, reg): """ Generate a template file for importing drill holes from ODBC database data. :param connect: Connection string :param temp: Template to make :param table: Name of table :param type: Type of import returned :ref:`DH_DATA` :param reg: Drill Hole Object `GXREG <geosoft.gxapi.GXREG>` handle :type connect: str_ref :type temp: str_ref :type table: str_ref :type type: int_ref :type reg: GXREG :returns: 0 - OK -1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the input connection string is empty (""), then the ODBC connection dialogs will appear (e.g. to connect to a machine database) before the import wizard is run. The connect string used for this connection is then returned. This string can then be used on input to skip the ODBC connection dialogs and go straight to the Wholeplot import wizard. Because the name of the database is not necessarily known, the template name is created from the name of the table opened - e.g. "HOLELOCATION.i4". """ ret_val, connect.value, temp.value, table.value, type.value = gxapi_cy.WrapGUI._import_drill_database_odbc(GXContext._get_tls_geo(), connect.value.encode(), temp.value.encode(), table.value.encode(), type.value, reg) return ret_val @classmethod def configure_connection(cls, connect, temp, table): """ Configures connection string from ODBC database data. :param connect: Connection string :param temp: Template to make :param table: Name of table :type connect: str_ref :type temp: str_ref :type table: str_ref :returns: 0 - OK -1 - Cancel :rtype: int .. versionadded:: 9.9.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** """ ret_val, connect.value, temp.value, table.value = gxapi_cy.WrapGUI._configure_connection(GXContext._get_tls_geo(), connect.value.encode(), temp.value.encode(), table.value.encode()) return ret_val @classmethod def import_drill_database_odbc_maxwell(cls, connect, temp, table, type, reg): """ Same as `import_drill_database_odbc <geosoft.gxapi.GXGUI.import_drill_database_odbc>` but customized for Maxwell. :param connect: Connection string :param temp: Template to make :param table: Name of table :param type: Type of import returned :ref:`DH_DATA` :param reg: Drill Hole Object `GXREG <geosoft.gxapi.GXREG>` handle :type connect: str_ref :type temp: str_ref :type table: str_ref :type type: int_ref :type reg: GXREG :returns: 0-OK 1-Cancel :rtype: int .. versionadded:: 8.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Same as `import_drill_database_odbc <geosoft.gxapi.GXGUI.import_drill_database_odbc>` but customized for Maxwell. """ ret_val, connect.value, temp.value, table.value, type.value = gxapi_cy.WrapGUI._import_drill_database_odbc_maxwell(GXContext._get_tls_geo(), connect.value.encode(), temp.value.encode(), table.value.encode(), type.value, reg) return ret_val @classmethod def import_ascii_wizard(cls, name, temp): """ Generate a template file from a gui. :param name: Data file name :param temp: Template to make :type name: str :type temp: str :returns: 0 - OK 1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapGUI._import_ascii_wizard(GXContext._get_tls_geo(), name.encode(), temp.encode()) return ret_val @classmethod def import_chem_database(cls, name, temp, table, type): """ Generate a template file for importing Geochems Database. :param name: Data file name :param temp: Template to make :param table: Name of table :param type: :ref:`IMPCH_TYPE` :type name: str :type temp: str :type table: str_ref :type type: int :returns: 0 - OK -1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val, table.value = gxapi_cy.WrapGUI._import_chem_database(GXContext._get_tls_geo(), name.encode(), temp.encode(), table.value.encode(), type) return ret_val @classmethod def import_chem_database_ado(cls, connect, temp, table, type): """ Improved template creation for importing geochem database (ADO). :param connect: External database connection string (Blank for OLEDB Wizard) :param temp: Template to make :param table: Name of table :param type: :ref:`IMPCH_TYPE` :type connect: str :type temp: str :type table: str_ref :type type: int :returns: 0 - OK -1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This is an improved version of ImportChemDatabase_GUI using the new ADO technology, as opposed to DAO. Use in conjuction with `GXDU.import_ado <geosoft.gxapi.GXDU.import_ado>`. See also ImportDatabaseADO_GUI. """ ret_val, table.value = gxapi_cy.WrapGUI._import_chem_database_ado(GXContext._get_tls_geo(), connect.encode(), temp.encode(), table.value.encode(), type) return ret_val @classmethod def import_database(cls, name, temp, table): """ Create template to import an external database table. :param name: External database file name :param temp: Template to make :param table: Name of table imported (returned) :type name: str :type temp: str :type table: str_ref :returns: 0 - OK -1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This is used to select a single database table, and selected fields from that table. If the database is not Microsoft Access (type .mdb), an introductory dialog requests the file type. This function DOES NOT import the table itself, but creates an import template which may be used to import the table (see `GXDU.import_dao <geosoft.gxapi.GXDU.import_dao>`). """ ret_val, table.value = gxapi_cy.WrapGUI._import_database(GXContext._get_tls_geo(), name.encode(), temp.encode(), table.value.encode()) return ret_val @classmethod def import_database_ado(cls, connect, temp, table): """ Create template to import an external database table (ADO Version). :param connect: External database connection string (Blank for OLEDB Wizard) :param temp: Template to make :param table: Name of table imported (returned) :type connect: str :type temp: str :type table: str_ref :returns: 0 - OK -1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** 1. This is used to select a single database table, and selected fields from that table. 2. This function DOES NOT import the table itself, but creates an import template which may be used to import the table (see `GXDU.import_ado <geosoft.gxapi.GXDU.import_ado>`). 3. If connection string is of type "FILENAME=..." the connection will attempt to resolve it as a file database. (see also ODBCFileConnect_GUI) """ ret_val, table.value = gxapi_cy.WrapGUI._import_database_ado(GXContext._get_tls_geo(), connect.encode(), temp.encode(), table.value.encode()) return ret_val @classmethod def import_database_sql(cls, name, sql, temp, line): """ Create template to import an external database table, created using SQL. :param name: External database file name :param sql: Text file with SQL queries to use, ("" - get from database) :param temp: Import template to make :param line: Name of table imported (returned) :type name: str :type sql: str :type temp: str :type line: str_ref :returns: 0 - OK -1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** 1. This is used to build an Oasis montaj group (line) from one or more database tables and fields, by selecting from one or more SQL selection queries. The list of queries is read from a text file with the following syntax: Query_Name_1 Query... Query... (continued) ... ... END_QUERY Query_Name_2 etc. 2. Each query has a title line, the query itself, then the "END_QUERY" line to finish. The title of a subsequent query is on the line after an "END_QUERY" line. 3. If the text file parameter is left blank (""), then selection queries in the database itself are listed. In addition to the pre-defined queries, there is a "User Defined" query which may be filled in by the user. 4. This function DOES NOT import the table itself, but creates an import template which may be used to import the data (see `GXDU.import_dao <geosoft.gxapi.GXDU.import_dao>`). 5. If connection string is of type "FILENAME=..." the connection will attempt to resolve it as a file database. (see also ODBCFileConnect_GUI) """ ret_val, line.value = gxapi_cy.WrapGUI._import_database_sql(GXContext._get_tls_geo(), name.encode(), sql.encode(), temp.encode(), line.value.encode()) return ret_val @classmethod def import_database_sqlado(cls, connect, sql, temp, line): """ Create template to import an external database table, created using SQL (New ADO Version). :param connect: External database connection string (Blank for OLEDB Wizard) :param sql: Text file with SQL queries to use, ("" - get from database) :param temp: Import template to make :param line: Name of table imported (returned) :type connect: str :type sql: str :type temp: str :type line: str_ref :returns: 0 - OK -1 Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This is used to build an Oasis montaj group (line) from one or more database tables and fields, by selecting from one or more SQL selection queries. The list of queries is read from a text file with the following syntax: Query_Name_1 Query... Query... (continued) ... ... END_QUERY Query_Name_2 etc. Each query has a title line, the query itself, then the "END_QUERY" line to finish. The title of a subsequent query is on the line after an "END_QUERY" line. If the text file parameter is left blank (""), then selection queries in the database itself are listed. In addition to the pre-defined queries, there is a "User Defined" query which may be filled in by the user. This function DOES NOT import the table itself, but creates an import template which may be used to import the data (see `GXDU.import_dao <geosoft.gxapi.GXDU.import_dao>`). """ ret_val, line.value = gxapi_cy.WrapGUI._import_database_sqlado(GXContext._get_tls_geo(), connect.encode(), sql.encode(), temp.encode(), line.value.encode()) return ret_val @classmethod def import_drill_database_ado(cls, connect, temp, table, type, reg): """ Generate a template file for importing drill holes. :param connect: External database connection string (Blank for OLEDB Wizard) :param temp: Template to make :param table: Name of table :param type: Type of import returned :ref:`DH_DATA` :param reg: Drill Hole Object `GXREG <geosoft.gxapi.GXREG>` handle :type connect: str :type temp: str :type table: str_ref :type type: int_ref :type reg: GXREG :returns: 0 - OK -1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This is an improved version of ImportDrillDatabase_GUI using the new ADO technology, as opposed to DAO. Use in conjunction with `GXDU.import_ado <geosoft.gxapi.GXDU.import_ado>`. See also ImportDatabaseADO_GUI. """ ret_val, table.value, type.value = gxapi_cy.WrapGUI._import_drill_database_ado(GXContext._get_tls_geo(), connect.encode(), temp.encode(), table.value.encode(), type.value, reg) return ret_val @classmethod def import_template_sql(cls, name, temp, sql, line): """ Create template to import an external database table; provide query. :param name: External database file name :param temp: Import template to make :param sql: SQL selection query to run on database :param line: Name of Oasis table to create :type name: str :type temp: str :type sql: str :type line: str :returns: 0 - OK -1 Cancel terminates on error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This is similar to `import_database_sql <geosoft.gxapi.GXGUI.import_database_sql>`, but dispenses with the dialog offering a selection of queries. Instead, the user supplies the query as a string. This function DOES NOT import the table itself, but creates an import template which may be used to import the data (see `GXDU.import_dao <geosoft.gxapi.GXDU.import_dao>`). """ ret_val = gxapi_cy.WrapGUI._import_template_sql(GXContext._get_tls_geo(), name.encode(), temp.encode(), sql.encode(), line.encode()) return ret_val @classmethod def import_template_sqlado(cls, name, temp, sql, line): """ Create template to import an external database table; provide query. :param name: External database connection string (Blank for OLEDB Wizard) :param temp: Import template to make :param sql: SQL selection query to run on database :param line: Name of Oasis table to create :type name: str :type temp: str :type sql: str :type line: str :returns: 0 - OK -1 - Cancel terminates on error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This is similar to `import_database_sql <geosoft.gxapi.GXGUI.import_database_sql>`, but dispenses with the dialog offering a selection of queries. Instead, the user supplies the query as a string. This function DOES NOT import the table itself, but creates an import template which may be used to import the data (see `GXDU.import_ado <geosoft.gxapi.GXDU.import_ado>`). """ ret_val = gxapi_cy.WrapGUI._import_template_sqlado(GXContext._get_tls_geo(), name.encode(), temp.encode(), sql.encode(), line.encode()) return ret_val @classmethod def import_xyz_template_editor(cls, db, templ, file): """ Allows the user to edit XYZ import templates using a complex dialog. The Template name may change during editing. :param db: Database :param templ: Name of the Template (can change) :param file: Name of the XYZ file to base it on :type db: GXDB :type templ: str_ref :type file: str :returns: 0 - OK 1 - Error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val, templ.value = gxapi_cy.WrapGUI._import_xyz_template_editor(GXContext._get_tls_geo(), db, templ.value.encode(), file.encode()) return ret_val @classmethod def odbc_file_connect(cls, file, connect, usage, table): """ Get the connection string for a file database as well as optional table name and FileUsage attribute :param file: File Name :param connect: Connection string (returned) :param usage: File Usage (0 - ODBC drivers not queried, 1 - Directory containing tables, 2 - File containing tables) :param table: Table name of file (returned if plUsage==1) :type file: str :type connect: str_ref :type usage: int :type table: str_ref :returns: 0 - OK -1 - Cancel terminates on error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the file extension is "mdb" or "xls" then a Microsoft Access or Excel database is assumed. Otherwise, a dialog appears listing the installed ODBC file database drivers. If the driver takes a directory as a database (FileUsage==1) the table name is also returned. This is needed because the table name may or may not include the file extension. """ ret_val, connect.value, table.value = gxapi_cy.WrapGUI._odbc_file_connect(GXContext._get_tls_geo(), file.encode(), connect.value.encode(), usage, table.value.encode()) return ret_val @classmethod def symbol_form(cls, symb_font, geo_font, weight, symb_num, symb_size, symb_ang, edge_col, fill_col): """ - Select a symbol. :param symb_font: Symbol font file name :param geo_font: Geosoft font? :param weight: Weight :ref:`MVIEW_FONT_WEIGHT` :param symb_num: Symbol number :param symb_size: Symbol size :param symb_ang: Symbol angle :param edge_col: Edge color :param fill_col: Fill color :type symb_font: str_ref :type geo_font: bool_ref :type weight: int_ref :type symb_num: int_ref :type symb_size: float_ref :type symb_ang: float_ref :type edge_col: int_ref :type fill_col: int_ref :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Symbols are set on input, and new values returned. """ ret_val, symb_font.value, geo_font.value, weight.value, symb_num.value, symb_size.value, symb_ang.value, edge_col.value, fill_col.value = gxapi_cy.WrapGUI._symbol_form(GXContext._get_tls_geo(), symb_font.value.encode(), geo_font.value, weight.value, symb_num.value, symb_size.value, symb_ang.value, edge_col.value, fill_col.value) return ret_val @classmethod def meta_data_tool(cls, meta, root_token, schema): """ Edit a `GXMETA <geosoft.gxapi.GXMETA>` object :param meta: Meta object :param root_token: Root Token, `H_META_INVALID_TOKEN <geosoft.gxapi.H_META_INVALID_TOKEN>` for root :param schema: Display schema information ? :type meta: GXMETA :type root_token: int :type schema: int :returns: 0 - OK non-zero - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = gxapi_cy.WrapGUI._meta_data_tool(GXContext._get_tls_geo(), meta, root_token, schema) return ret_val @classmethod def import_chem_wizard(cls, name, temp, type): """ Generate a template file for importing geochems. :param name: Data file name :param temp: Template to make :param type: :ref:`IMPCH_TYPE` :type name: str :type temp: str :type type: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._import_chem_wizard(GXContext._get_tls_geo(), name.encode(), temp.encode(), type) @classmethod def import_drill_wizard(cls, name, temp, table, type, reg): """ Generate a template file for importing drill holes. :param name: Data file name :param temp: Template to make :param table: Name of table :param type: Type of import returned :ref:`DH_DATA` :param reg: Drill Hole Object `GXREG <geosoft.gxapi.GXREG>` handle :type name: str :type temp: str :type table: str_ref :type type: int_ref :type reg: GXREG .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ table.value, type.value = gxapi_cy.WrapGUI._import_drill_wizard(GXContext._get_tls_geo(), name.encode(), temp.encode(), table.value.encode(), type.value, reg) @classmethod def import_drill_wizard_ex(cls, name, temp, table, type, reg): """ Generate a template file for importing drill holes where type is known :param name: Data file name :param temp: Template to make :param table: Name of table :param type: Type of import :ref:`DH_DATA` :param reg: Drill Hole Object `GXREG <geosoft.gxapi.GXREG>` handle :type name: str :type temp: str :type table: str_ref :type type: int :type reg: GXREG .. versionadded:: 9.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ table.value = gxapi_cy.WrapGUI._import_drill_wizard_ex(GXContext._get_tls_geo(), name.encode(), temp.encode(), table.value.encode(), type, reg) @classmethod def internet_trust(cls): """ Change the Internet Trust Relationships .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._internet_trust(GXContext._get_tls_geo()) @classmethod def pattern_form(cls, pat, size, thick, dense, col, back_col): """ - Select a pattern. :param pat: Current Pattern :param size: Current Size, // returned :param thick: Current Thick (0-100) // returned :param dense: Current Density, // returned :param col: Current Pattern Color // passed in and returned :param back_col: Current Background Color // passed in and returned; can be `C_TRANSPARENT <geosoft.gxapi.C_TRANSPARENT>` :type pat: int_ref :type size: float_ref :type thick: int_ref :type dense: float_ref :type col: int_ref :type back_col: int_ref :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Pattern values set on input, and new values returned. Solid fill is indicated by Pattern number 0. Returned Values (not set on input) Size pattern tile size in mm. Thick pattern line thickness in percent of the tile size. valid range is 0-100. Density Tile spacing. A value of 1 means tiles are laid with no overlap. A value of 2 means they overlap each other. The pattern Angle and Style parameters are not user-definable. """ ret_val, pat.value, size.value, thick.value, dense.value, col.value, back_col.value = gxapi_cy.WrapGUI._pattern_form(GXContext._get_tls_geo(), pat.value, size.value, thick.value, dense.value, col.value, back_col.value) return ret_val @classmethod def line_pattern_form(cls, pattern, thickness, pitch, colour): """ Select a line pattern. :param pattern: Current Pattern :param thickness: Current Thickness :param pitch: Current Pitch :param colour: Current Pattern Color :type pattern: int_ref :type thickness: float_ref :type pitch: float_ref :type colour: int_ref :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 8.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Same as `pattern_form <geosoft.gxapi.GXGUI.pattern_form>` but for line patterns. """ ret_val, pattern.value, thickness.value, pitch.value, colour.value = gxapi_cy.WrapGUI._line_pattern_form(GXContext._get_tls_geo(), pattern.value, thickness.value, pitch.value, colour.value) return ret_val @classmethod def two_panel_selection(cls, ls_tf, ls_ts, title): """ General purpose two-panel selection. :param ls_tf: All available items for selection. :param ls_ts: Selections (altered on output) :param title: Title for dialog :type ls_tf: GXLST :type ls_ts: GXLST :type title: str :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Takes as input two LSTs, one contains all available items, the second currently selected items. These are processed, and in the left panel are displayed all items in the first `GXLST <geosoft.gxapi.GXLST>` not in the selection `GXLST <geosoft.gxapi.GXLST>`, and on the right all items in the first `GXLST <geosoft.gxapi.GXLST>` which are in the selection `GXLST <geosoft.gxapi.GXLST>`. (Items in the selection `GXLST <geosoft.gxapi.GXLST>` NOT in the first `GXLST <geosoft.gxapi.GXLST>` are ignored). Once the user has finalized the selections, the final selections are returned in the selection `GXLST <geosoft.gxapi.GXLST>`. Selections and display are based on the `LST_ITEM_NAME <geosoft.gxapi.LST_ITEM_NAME>` part of the `GXLST <geosoft.gxapi.GXLST>` item, but on export both the `LST_ITEM_NAME <geosoft.gxapi.LST_ITEM_NAME>` and `LST_ITEM_VALUE <geosoft.gxapi.LST_ITEM_VALUE>` elements of the selected items from the first `GXLST <geosoft.gxapi.GXLST>` are transferred to the second list for output. The sConvertToCSV_LST and sConvertFromCSV_LST functions in lst.h can be used to convert the selection LSTs to forms that can be stored and retrieved from GX parameters (or `GXREG <geosoft.gxapi.GXREG>` or INI, etc.). """ ret_val = gxapi_cy.WrapGUI._two_panel_selection(GXContext._get_tls_geo(), ls_tf, ls_ts, title.encode()) return ret_val @classmethod def two_panel_selection2(cls, ls_tf, ls_ts, title): """ Two-panel selection, items not sorted alphabetically. :param ls_tf: All available items for selection. :param ls_ts: Selections (altered on output) :param title: Title for dialog :type ls_tf: GXLST :type ls_ts: GXLST :type title: str :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Same as `two_panel_selection <geosoft.gxapi.GXGUI.two_panel_selection>`, but the items in the two lists are not sorted alphabetically, but are ordered exactly as input, and when an item is selected it is added at the end of the lists. """ ret_val = gxapi_cy.WrapGUI._two_panel_selection2(GXContext._get_tls_geo(), ls_tf, ls_ts, title.encode()) return ret_val @classmethod def two_panel_selection_ex(cls, ls_tf, ls_ts, sorted, allow_no_select, title): """ Two-panel selection; options for sort and ability to select no items. :param ls_tf: All available items for selection. :param ls_ts: Selections (altered on output) :param sorted: Sort items alphabetically (0:No, 1:Yes) :param allow_no_select: Allow no items selected (0:No, 1:Yes) :param title: Title for dialog :type ls_tf: GXLST :type ls_ts: GXLST :type sorted: int :type allow_no_select: int :type title: str :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Same as `two_panel_selection <geosoft.gxapi.GXGUI.two_panel_selection>`, but the items in the two lists are not sorted alphabetically, but are ordered exactly as input, and when an item is selected it is added at the end of the lists. """ ret_val = gxapi_cy.WrapGUI._two_panel_selection_ex(GXContext._get_tls_geo(), ls_tf, ls_ts, sorted, allow_no_select, title.encode()) return ret_val @classmethod def two_panel_selection_ex2(cls, ls_tf, ls_ts, sorted, allow_no_select, title, help): """ Two-panel selection; extended options including a help link. :param ls_tf: All available items for selection. :param ls_ts: Selections (altered on output) :param sorted: Sort items alphabetically (0:No, 1:Yes) :param allow_no_select: Allow no items selected (0:No, 1:Yes) :param title: Title for dialog :param help: Help link :type ls_tf: GXLST :type ls_ts: GXLST :type sorted: int :type allow_no_select: int :type title: str :type help: str :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Same as `two_panel_selection_ex <geosoft.gxapi.GXGUI.two_panel_selection_ex>`, but user can specify a help link. """ ret_val = gxapi_cy.WrapGUI._two_panel_selection_ex2(GXContext._get_tls_geo(), ls_tf, ls_ts, sorted, allow_no_select, title.encode(), help.encode()) return ret_val @classmethod def launch_single_geo_dotnetx_tool(cls, dll, func, meta): """ Launch a user created .Net GEOXTOOL ensuring a single instance. :param dll: Assembly name :param func: Control Class Name :param meta: `GXMETA <geosoft.gxapi.GXMETA>` Handle (holding tool configuration data) :type dll: str :type func: str :type meta: GXMETA .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._launch_single_geo_dotnetx_tool(GXContext._get_tls_geo(), dll.encode(), func.encode(), meta) @classmethod def launch_geo_dotnetx_tool(cls, dll, func, meta): """ Launch a user created .Net GEOXTOOL. :param dll: Assembly name :param func: Control Class Name :param meta: `GXMETA <geosoft.gxapi.GXMETA>` Handle (holding tool configuration data) :type dll: str :type func: str :type meta: GXMETA .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._launch_geo_dotnetx_tool(GXContext._get_tls_geo(), dll.encode(), func.encode(), meta) @classmethod def launch_geo_x_tool(cls, dll, func, meta): """ Launch a user created GEOXTOOL. :param dll: DLL name :param func: Function Name :param meta: `GXMETA <geosoft.gxapi.GXMETA>` Handle (holding tool configuration data) :type dll: str :type func: str :type meta: GXMETA .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._launch_geo_x_tool(GXContext._get_tls_geo(), dll.encode(), func.encode(), meta) @classmethod def launch_single_geo_dotnetx_tool_ex(cls, dll, func, meta, align, dock, width, height): """ Launch a user created .Net GEOXTOOL ensuring a single instance. :param dll: Assembly name :param func: Control Class Name :param meta: `GXMETA <geosoft.gxapi.GXMETA>` Handle (holding tool configuration data) :param align: :ref:`XTOOL_ALIGN` (can specify one or more or `XTOOL_ALIGN_ANY <geosoft.gxapi.XTOOL_ALIGN_ANY>`) :param dock: :ref:`XTOOL_DOCK` :param width: Default width :param height: Default height :type dll: str :type func: str :type meta: GXMETA :type align: int :type dock: int :type width: int :type height: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._launch_single_geo_dotnetx_tool_ex(GXContext._get_tls_geo(), dll.encode(), func.encode(), meta, align, dock, width, height) @classmethod def launch_geo_dotnetx_tool_ex(cls, dll, func, meta, align, dock, width, height): """ Launch a user created .Net GEOXTOOL. :param dll: Assembly name :param func: Control Class Name :param meta: `GXMETA <geosoft.gxapi.GXMETA>` Handle (holding tool configuration data) :param align: :ref:`XTOOL_ALIGN` (can specify one or more or `XTOOL_ALIGN_ANY <geosoft.gxapi.XTOOL_ALIGN_ANY>`) :param dock: :ref:`XTOOL_DOCK` :param width: Default width :param height: Default height :type dll: str :type func: str :type meta: GXMETA :type align: int :type dock: int :type width: int :type height: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._launch_geo_dotnetx_tool_ex(GXContext._get_tls_geo(), dll.encode(), func.encode(), meta, align, dock, width, height) @classmethod def launch_geo_x_tool_ex(cls, dll, func, meta, align, dock, width, height): """ Launch a user created GEOXTOOL. :param dll: DLL name :param func: Function Name :param meta: `GXMETA <geosoft.gxapi.GXMETA>` Handle (holding tool configuration data) :param align: :ref:`XTOOL_ALIGN` (can specify one or more or `XTOOL_ALIGN_ANY <geosoft.gxapi.XTOOL_ALIGN_ANY>`) :param dock: :ref:`XTOOL_DOCK` :param width: Default width :param height: Default height :type dll: str :type func: str :type meta: GXMETA :type align: int :type dock: int :type width: int :type height: int .. versionadded:: 9.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._launch_geo_x_tool_ex(GXContext._get_tls_geo(), dll.encode(), func.encode(), meta, align, dock, width, height) @classmethod def meta_data_viewer(cls, meta, root_token, schema): """ View a `GXMETA <geosoft.gxapi.GXMETA>` object :param meta: Meta object :param root_token: Root token, `H_META_INVALID_TOKEN <geosoft.gxapi.H_META_INVALID_TOKEN>` for root :param schema: Display schema information ? :type meta: GXMETA :type root_token: int :type schema: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._meta_data_viewer(GXContext._get_tls_geo(), meta, root_token, schema) @classmethod def print_file(cls, file): """ Prints a file to current printer :param file: Filename string :type file: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._print_file(GXContext._get_tls_geo(), file.encode()) @classmethod def render_pattern(cls, hdc, left, bottom, right, top, pat, size, thick, dense, col, back_col, is_enabled, is_button, is_selected): """ - Render a pattern. :param hdc: DC handle :param left: Left value of the render rect in Windows coordinates (bottom>top) :param bottom: Bottom value :param right: Right value :param top: Top value :param pat: Pattern number :param size: Pattern size, // input `GS_R8DM <geosoft.gxapi.GS_R8DM>` to use default :param thick: Pattern thick (0-100) // input `GS_S4DM <geosoft.gxapi.GS_S4DM>` to use default :param dense: Pattern density, // input `GS_R8DM <geosoft.gxapi.GS_R8DM>` to use default :param col: Pattern color // input `GS_S4DM <geosoft.gxapi.GS_S4DM>` to use default :param back_col: Pattern background color // input `GS_S4DM <geosoft.gxapi.GS_S4DM>` to use default; can be `C_TRANSPARENT <geosoft.gxapi.C_TRANSPARENT>` :param is_enabled: Is this window enabled? :param is_button: Is this a button? :param is_selected: Is this window selected? :type hdc: int :type left: int :type bottom: int :type right: int :type top: int :type pat: int :type size: float :type thick: int :type dense: float :type col: int :type back_col: int :type is_enabled: int :type is_button: int :type is_selected: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Renders a Geosoft pattern to a Windows DC. """ gxapi_cy.WrapGUI._render_pattern(GXContext._get_tls_geo(), hdc, left, bottom, right, top, pat, size, thick, dense, col, back_col, is_enabled, is_button, is_selected) @classmethod def render_line_pattern(cls, hdc, left, bottom, right, top, pattern, thickness, pitch, col, is_enabled, is_button, is_selected): """ Render a line pattern. :param hdc: DC Handle :param left: Left value of the render rect in Windows coordinates (bottom>top) :param bottom: Bottom value :param right: Right value :param top: Top value :param pattern: Pattern number :param thickness: Pattern thickness :param pitch: Pattern pitch :param col: Pattern color :param is_enabled: Is this window enabled? :param is_button: Is this a button? :param is_selected: Is this window selected? :type hdc: int :type left: int :type bottom: int :type right: int :type top: int :type pattern: int :type thickness: float :type pitch: float :type col: int :type is_enabled: int :type is_button: int :type is_selected: int .. versionadded:: 8.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Same as `render_pattern <geosoft.gxapi.GXGUI.render_pattern>` but for line patterns. """ gxapi_cy.WrapGUI._render_line_pattern(GXContext._get_tls_geo(), hdc, left, bottom, right, top, pattern, thickness, pitch, col, is_enabled, is_button, is_selected) @classmethod def set_parent_wnd(cls, wnd): """ Set the current parent WND :param wnd: New Parent Window :type wnd: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The parent WND is used by all modal dialogs as a parent to ensure the dialog is correctly modal. """ gxapi_cy.WrapGUI._set_parent_wnd(GXContext._get_tls_geo(), wnd) @classmethod def set_printer(cls, printer): """ Sets the Printer. :param printer: Printer Name :type printer: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._set_printer(GXContext._get_tls_geo(), printer.encode()) @classmethod def set_prog_always_on(cls, on): """ Ability to set the progress bar to stay visible even if main application is processing messages :param on: Should progress bar remain visible :type on: bool .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** In montaj the progress bar is hidden when the main window start processing messages. This is not always desirable in some 3rd party apps, hence this function. """ gxapi_cy.WrapGUI._set_prog_always_on(GXContext._get_tls_geo(), on) @classmethod def show_direct_hist(cls, min, max, mean, std_dev, median, items, vv): """ Display histogram of data directly :param min: Min Value to display :param max: Max Value to display :param mean: Mean Value to display :param std_dev: StdDev Value to display :param median: Median Value to display :param items: Items Number of items this comprises :param vv: `GXVV <geosoft.gxapi.GXVV>` holding hist counts :type min: float :type max: float :type mean: float :type std_dev: float :type median: float :type items: int :type vv: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._show_direct_hist(GXContext._get_tls_geo(), min, max, mean, std_dev, median, items, vv) @classmethod def show_hist(cls, st): """ Display Histogram of data from `GXST <geosoft.gxapi.GXST>` :param st: Statistics obj :type st: GXST .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._show_hist(GXContext._get_tls_geo(), st) @classmethod def simple_map_dialog(cls, map, title, help_id): """ General purpose map display `GXGUI <geosoft.gxapi.GXGUI>` with no interaction. :param map: `GXMAP <geosoft.gxapi.GXMAP>` object :param title: Title :param help_id: HelpID :type map: GXMAP :type title: str :type help_id: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** This function displays a map in a simple resizable dialog that fits the map into it. It is generally useful to display temporary maps as graphs (e.g. variograms). """ gxapi_cy.WrapGUI._simple_map_dialog(GXContext._get_tls_geo(), map, title.encode(), help_id.encode()) @classmethod def thematic_voxel_info(cls, vox): """ Display GX.Net thematic voxel info `GXGUI <geosoft.gxapi.GXGUI>`. :param vox: `GXVOX <geosoft.gxapi.GXVOX>` object :type vox: GXVOX .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Displays the thematic voxel codes, colors, total volume for each code, and number of valid items (cubes) for each code. This is a replacement for the numeric stats done on normal numerical voxel grids. """ gxapi_cy.WrapGUI._thematic_voxel_info(GXContext._get_tls_geo(), vox) @classmethod def show_3d_viewer_dialog(cls, title, o3dv): """ Display a standalone 3D viewer :param title: Title :param o3dv: 3D View name (.geosoft_3dv) :type title: str :type o3dv: str .. versionadded:: 9.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** Any changes made to the 3D View will be persisted. """ gxapi_cy.WrapGUI._show_3d_viewer_dialog(GXContext._get_tls_geo(), title.encode(), o3dv.encode()) # Obsolete @classmethod def fft2_spec_filter(cls, spec_file_name, con_file_name): """ Interactive `GXFFT2 <geosoft.gxapi.GXFFT2>` radially averaged power spectrum filter :param spec_file_name: Name of the input spectrum file :param con_file_name: Name of the output control file :type spec_file_name: str :type con_file_name: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Limitations:** May not be available while executing a command line program. """ gxapi_cy.WrapGUI._fft2_spec_filter(GXContext._get_tls_geo(), spec_file_name.encode(), con_file_name.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXPAT.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXPAT(gxapi_cy.WrapPAT): """ GXPAT class. A `GXPAT <geosoft.gxapi.GXPAT>` object is created from a Geosoft format pattern file. It contains all the individual patterns listed in the file. Notes: You may create your own fill patterns. They can be added to the "user.pat" file in the <geosoft>\\user\\etc directory. User pattern numbers should be in the range between 20000 and 29999. """ def __init__(self, handle=0): super(GXPAT, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXPAT <geosoft.gxapi.GXPAT>` :returns: A null `GXPAT <geosoft.gxapi.GXPAT>` :rtype: GXPAT """ return GXPAT() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls): """ Creates a pattern object with current default patterns. :returns: `GXPAT <geosoft.gxapi.GXPAT>` object :rtype: GXPAT .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapPAT._create(GXContext._get_tls_geo()) return GXPAT(ret_val) def get_lst(self, cl, lst): """ Copies all pattern names into a `GXLST <geosoft.gxapi.GXLST>` object. :param cl: Class name ("" for all classes) :param lst: `GXLST <geosoft.gxapi.GXLST>` Handle :type cl: str :type lst: GXLST .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Returns a list of the available patterns. There will always be at least two items, "None" and "Solid Fill" """ self._get_lst(cl.encode(), lst) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/__init__.py # coding = utf-8 import json from os.path import dirname, join import logging mpl_logger = logging.getLogger('matplotlib') mpl_logger.setLevel(logging.WARNING) with open(join(dirname(__file__), 'pkg_info.json')) as fp: _info = json.load(fp) __version__ = "{}{}".format(_info['version'], _info['pre-release']) version = __version__ __all__ = ['gxapi', 'gxpy'] class GXRuntimeError(RuntimeError): """ A subclass of `RuntimeError <https://docs.python.org/3/library/exceptions.html#RuntimeError>`_ which is the base class for any runtime type error originating from the Geosoft Python APIS .. versionadded:: 9.1 """ def __init__(self, message): super(RuntimeError, self).__init__(message) <file_sep>/geosoft/gxapi/GXMXD.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXMXD(gxapi_cy.WrapMXD): """ GXMXD class. A `GXMXD <geosoft.gxapi.GXMXD>` wraps and provides manipulation and usage for the content of an ArcGIS `GXMXD <geosoft.gxapi.GXMXD>` file. """ def __init__(self, handle=0): super(GXMXD, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXMXD <geosoft.gxapi.GXMXD>` :returns: A null `GXMXD <geosoft.gxapi.GXMXD>` :rtype: GXMXD """ return GXMXD() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create_metadata(cls, mxd): """ Create metadata for this brand new `GXMXD <geosoft.gxapi.GXMXD>` (we are the creator) :param mxd: `GXMXD <geosoft.gxapi.GXMXD>` file name :type mxd: str .. versionadded:: 7.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapMXD._create_metadata(GXContext._get_tls_geo(), mxd.encode()) @classmethod def convert_to_map(cls, mxd, map): """ Create Geosoft map from ArcGIS `GXMXD <geosoft.gxapi.GXMXD>` :param mxd: ArcGIS `GXMXD <geosoft.gxapi.GXMXD>` file name :param map: Geosoft map file name :type mxd: str :type map: str .. versionadded:: 9.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapMXD._convert_to_map(GXContext._get_tls_geo(), mxd.encode(), map.encode()) @classmethod def sync(cls, mxd): """ Syncronize any Metadata for this `GXMXD <geosoft.gxapi.GXMXD>` :param mxd: `GXMXD <geosoft.gxapi.GXMXD>` file name :type mxd: str .. versionadded:: 7.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ gxapi_cy.WrapMXD._sync(GXContext._get_tls_geo(), mxd.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxpy/map.py """ Geosoft maps contain one or more 2D and 3D views. Each 2D view has a specific coordinate system and clip region. Each 3D view is a link to a separate `geosoft_3dv` file, which can be placed in the map as a 2D perspective of the last viewing state of the 3D view. :Classes: :`Map`: map class :Constants: :WRITE_NEW: `geosoft.gxapi.MAP_WRITENEW` :WRITE_OLD: `geosoft.gxapi.MAP_WRITEOLD` :LIST_ALL: `geosoft.gxapi.MAP_LIST_MODE_ALL` :LIST_3D: `geosoft.gxapi.MAP_LIST_MODE_3D` :LIST_2D: `geosoft.gxapi.MAP_LIST_MODE_NOT3D` :VIEW_NAME_SIZE: 2080 :TEXT_BOTTOM_LEFT: -1 :TEXT_BOTTOM_CENTER: 0 :TEXT_BOTTOM_RIGHT: 1 :TEXT_ALL_CENTER: 2 :TEXT_BASE_LEFT: 3 :TEXT_BASE_CENTER: 4 :TEXT_BASE_RIGHT: 5 :TEXT_BASE_ALL_CENTER: 6 :TEXT_BASE_FIT_BY_CHARACTER_WIDTH: 7 :TEXT_BASE_FIT_BY_CHARACTER_SIZE: 8 :MAP_LANDSCAPE: 0 :MAP_PORTRAIT: 1 :TOP_IN: 1 :TOP_OUT: -1 :GRID_NONE: 0 :GRID_DOTTED: 1 :GRID_CROSSES: 2 :GRID_LINES: 3 :GROUP_NEW: 0 :GROUP_APPEND: 1 :VIEW_BASE: 0 :VIEW_DATA: 1 :STYLE_FIGURE: 0 :STYLE_MAP: 1 :RASTER_FORMAT_EMF: `geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_EMF` :RASTER_FORMAT_BMP: `geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_BMP` :RASTER_FORMAT_JPEGL: `geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_JPEGL` :RASTER_FORMAT_JPEG: `geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_JPEG` :RASTER_FORMAT_JPEGH: `geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_JPEGH` :RASTER_FORMAT_GIF: `geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_GIF` :RASTER_FORMAT_PCX: `geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_PCX` :RASTER_FORMAT_PNG: `geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_PNG` :RASTER_FORMAT_EPS: `geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_EPS` :RASTER_FORMAT_TIFF: `geosoft.gxapi.MAP_EXPORT_RASTER_FORMAT_TIFF` .. seealso:: :mod:`geosoft.gxpy.view`, :mod:`geosoft.gxpy.group` :mod:`geosoft.gxapi.GXMAP`, :mod:`geosoft.gxapi.GXMVIEW`, :mod:`geosoft.gxapi.GXMVU` .. note:: Regression tests provide usage examples: `map tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_map.py>`_ """ import os from math import ceil import geosoft import geosoft.gxapi as gxapi from . import gx as gx from . import grid as gxgrd from . import utility as gxu from . import dataframe as gxdf from . import group as gxg from . import view as gxv from . import geometry as gxgeo from . import coordinate_system as gxcs from . import metadata as gxmeta __version__ = geosoft.__version__ def _t(s): return s class MapException(geosoft.GXRuntimeError): """ Exceptions from :mod:`geosoft.gxpy.map`. .. versionadded:: 9.2 """ pass WRITE_NEW = gxapi.MAP_WRITENEW WRITE_OLD = gxapi.MAP_WRITEOLD LIST_ALL = gxapi.MAP_LIST_MODE_ALL LIST_3D = gxapi.MAP_LIST_MODE_3D LIST_2D = gxapi.MAP_LIST_MODE_NOT3D VIEW_NAME_SIZE = 2080 TEXT_BOTTOM_LEFT = -1 TEXT_BOTTOM_CENTER = 0 TEXT_BOTTOM_RIGHT = 1 TEXT_ALL_CENTER = 2 TEXT_BASE_LEFT = 3 TEXT_BASE_CENTER = 4 TEXT_BASE_RIGHT = 5 TEXT_BASE_ALL_CENTER = 6 TEXT_BASE_FIT_BY_CHARACTER_WIDTH = 7 TEXT_BASE_FIT_BY_CHARACTER_SIZE = 8 MAP_LANDSCAPE = 0 MAP_PORTRAIT = 1 TOP_IN = 1 TOP_OUT = -1 GRID_NONE = 0 GRID_DOTTED = 1 GRID_CROSSES = 2 GRID_LINES = 3 GROUP_NEW = 0 GROUP_APPEND = 1 VIEW_BASE = 0 VIEW_DATA = 1 STYLE_FIGURE = 0 STYLE_MAP = 1 RASTER_FORMAT_EMF = gxapi.MAP_EXPORT_RASTER_FORMAT_EMF RASTER_FORMAT_BMP = gxapi.MAP_EXPORT_RASTER_FORMAT_BMP RASTER_FORMAT_JPEGL = gxapi.MAP_EXPORT_RASTER_FORMAT_JPEGL RASTER_FORMAT_JPEG = gxapi.MAP_EXPORT_RASTER_FORMAT_JPEG RASTER_FORMAT_JPEGH = gxapi.MAP_EXPORT_RASTER_FORMAT_JPEGH RASTER_FORMAT_GIF = gxapi.MAP_EXPORT_RASTER_FORMAT_GIF RASTER_FORMAT_PCX = gxapi.MAP_EXPORT_RASTER_FORMAT_PCX RASTER_FORMAT_PNG = gxapi.MAP_EXPORT_RASTER_FORMAT_PNG RASTER_FORMAT_EPS = gxapi.MAP_EXPORT_RASTER_FORMAT_EPS RASTER_FORMAT_TIFF = gxapi.MAP_EXPORT_RASTER_FORMAT_TIFF def map_file_name(file_name, file_type='map'): """ Return a fully resolved map file path using the file name, with .map extension :param file_name: file name, with ot without path and/or extension :param file_type: Geosoft file type, 'map' or 'geosoft_3dv' expected. Default is 'map' :returns: file name path with extension .map .. versionadded:: 9.2 """ ext = os.path.splitext(file_name)[1].lower() if ext not in ('.map', '.geosoft_3dv'): if file_type[0] != '.': file_name = file_name + '.' + file_type else: file_name += file_type return os.path.abspath(file_name) def delete_files(file_name): """ Delete all files associated with this map name. :param file_name: .. versionadded:: 9.2 """ def remove(fn): try: os.remove(fn) except FileNotFoundError: pass file_name = map_file_name(file_name) # remove child files, if any try: meta = Map.open(file_name).metadata views = gxmeta.get_node_from_meta_dict('geosoft/dataset/map/views', meta) if views: for v in views: child_files = gxmeta.get_node_from_meta_dict(v + '/child_files', views) if child_files: for f in child_files: remove(f) except: pass remove(file_name + '.xml') remove(os.path.splitext(file_name)[0] + '.mdf') remove(file_name) def save_as_image(mapfile, imagefile=None, type=RASTER_FORMAT_PNG, pix_width=1000, pix_height=0, pix_32_bit=False): """ Save a map file to an image file :param mapfile: map or geosoft_3dv file name :param imagefile: name of the output raster file, default is a temporary png file. :param type: one of the RASTER_FORMAT types, default`RASTER_FORMAT_PNG` :param pix_width: image pixel width, if 0 use pix_height only :param pix_height: image pixel height, if 0 use pix_width only :param pix_32_bit: make 32-bit image (with 8-bit alpha background) .. versionadded:: 9.2 """ return Map.open(mapfile).image_file(imagefile=imagefile, type=type, pix_width=pix_width, pix_height=pix_height, pix_32_bit=pix_32_bit) def crc_map(mapfile, pix_width=1000): """ Return the CRC of a map based on the output bitmap image. :param mapfile: name of the map file :param pix_width: image pixel width - use a higher resolution to test more detail :returns: CRC as an int .. versionadded:: 9.2 """ return Map.open(mapfile).crc_image(pix_width=pix_width) class Map: """ Geosoft map files. A Geosoft map is a container for views. A view has a defined coordinate system (2D or 3D) and contains groups of graphical elements defined relative to the coordinate system of the view. The :mod:`geosoft.gxpy.view` module provides classes and methods for working with individual 2D or 3D views, and the :mod:`geosoft.gxpy.group` modules contains classes that deal with drawing groups.. Geosoft maps will always have a 2D 'base' view and is intended for drawing map annotations, such as titles, a scale bar, North arrow and legends. The lower-left corner of the base view ia at location (0, 0) and the upper-right corner is defined by the media size and may be adjusted to fit the data view. When drawing to the base view the native unit is millimetres and locations and graphic entity sizes are assumed to be mm. When drawing to the map using map methods the units are in map cm, including graphic entity scaling. Maps will also have one or more data views, each with it's own defined coordinate system and graphical content. Creating a new map will create one data view, which will become the map's `current_data_view`, within which any spatial data drawn by Geosoft 2D drawing applications will be placed. Maps may have more than one data view, including 3D data views, and the `current_data_view` can be changed to any 2D or 3D view, and subsequent drawing will be placed in that view. 3D views define a 3D spatial volume and accept both 2D and 3D drawing elements. A 3D view will always contain a plane or surface on which 2D elements are drawn, and when a 3D view is the `default data view`, 2D elements will be drawn to the identified plane or surface. When a 3D view is rendered on a map, which is a flat surface, the view is rendered from the last use point of view. Geosoft map viewing applications allow a user to open a 3D view in a 3D viewer, which provides for 3D viewing, 3D navigation and 3D drawing capabilities. :Constructors: ============ ======================================== :meth:`open` open an existing map :meth:`new` create a new map ============ ======================================== .. versionadded:: 9.2 """ def __enter__(self): return self def __exit__(self, xtype, xvalue, xtraceback): self.__del__() def __del__(self): if hasattr(self, '_close'): self._close() def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): return self._file_name def __init__(self, file_name, mode=WRITE_NEW, _internal=False, gxmap=None): if not _internal: raise MapException(_t("Map must be created from Map.new(), or Map.open(), or Map.from_gxapi().")) super().__init__() self._gx = gx.gx() self._remove = False if gxmap is not None: self._gxmap = gxmap file_name_ref = gxapi.str_ref() self._gxmap.get_file_name(file_name_ref) self._file_name = map_file_name(file_name_ref.value) else: self._file_name = map_file_name(file_name) self._gxmap = gxapi.GXMAP.create(self.file_name, mode) self._name = os.path.splitext(os.path.split(self._file_name)[1])[0] self._annotation_outer_edge = 0.0 self._metadata = None self._metadata_changed = False self._metadata_root = '' self._open = gx.track_resource(self.__class__.__name__, self._file_name) def _close(self, pop=True): if hasattr(self, '_open'): if self._open: if self._gxmap: self._gxmap = None if self._metadata_changed: with open(self._file_name + '.xml', 'w+') as f: f.write(gxu.xml_from_dict(self._metadata)) gxapi.GXMAP.sync(self._file_name) if self._remove: try: delete_files(self._file_name) except OSError: # TODO We ignore this and it can be reproduced by using with statement with some exception. # E.g. in test_grid.test_image_file without an IMG DAT dll present. Should be investigated. # See issue #82 pass if pop: gx.pop_resource(self._open) self._open = None @classmethod def from_gxapi(cls, gxmap): """ Instantiate Map from gxapi instance. :param gxmap: a gxapi.CGXMAP .. versionadded:: 9.9 """ return cls(None, gxmap=gxmap, _internal=True) @classmethod def open(cls, file_name): """ Open an existing map file. :param file_name: name of the map file .. versionadded:: 9.2 """ map = cls(file_name, mode=WRITE_OLD, _internal=True) return map @classmethod def new(cls, file_name=None, data_area=(0., 0., 100., 100.), scale=None, coordinate_system=None, media=None, layout=None, fixed_size=False, map_style=None, margins=None, inside_margin=1.0, overwrite=False, no_data_view=False): """ Create and open a new Geosoft map. :parameters: :file_name: Map file name. If not specified a temporary file is created in the instance temporary folder. Use ``file_name()`` to get the file name if needed. The temporary map file will be unique and will exist through the life of the Python GX instance, but will be deleted along with all temporary files when the GX loses context. :data_area: (min_x, min_y, max_x, max_y) data area for a 2D data view on the map :scale: required scale, default will fit data to the map media :coordinate_system: coordinate system, default is an unknown coordinate system. You may pass a :class:`geosoft.gxpy.coordinate_system.Coordinate_system` instance, a string descriptor, such as '`WGS 84 / UTM zone 15N`', or another valid constructor supported by :class:`geosoft.gxpy.coordinate_system.Coordinate_system`. :media: media size as a tuple (x_cm, y_cm), or as a standard media name string. If the media string contains 'portrait', the media aspect will be portrait. Named media sizes are read from media.csv, which includes A4, A3, A2, A1, A0, letter, legal, ledger, A, B, C, D, E. For example `media='A4 portrait'`. :layout: MAP_PORTRAIT or MAP_LANDSCAPE, overrides media setting. If the layout is not defined by media or this parameter, the layout is determined by the aspect ratio of the data area. :map_style: STYLE_FIGURE or STYLE_MAP (default). A MAP style is intended for A3 or larger media with a larger right or left margin for map annotations. A FIGURE style is intended for smaller media with a larger bottom margin for a title and limited annotations. :fixed_size: True for fixed media size, if, and only if, a media size is defined. If False, the base view boundary will be reduced to the data view plus margins. If True, the base view boundary is fixed to the media size and margins are adjusted to locate the data view proportionally relative to the requested margins. :margins: (left, right, bottom, top) map margins in map cm. The default for STYLE_MAP is (3, 14, 6, 3), and for figure (1, 4, 1, 1). :inside_margin: additional margin (cm) inside the base view. This margin effectively expands the data_area to allow room for graphical elements related to spatial data near the edge of the defined data area. :overwrite: `True` to overwrite map file should it exist :no_data_view: True to create a map without a 'data' view. Use :class:`geosoft.gxpy.view.View` and :class:`geosoft.gxpy.view.View_3d` to add data views to a map. .. versionadded:: 9.2 """ def data_window_on_map(): mx = media[0] - m_left - m_right my = media[1] - m_bottom - m_top im = inside_margin * 2 return mx - im, my - im # data window on map cm def set_registry(map, style, inside_margin): rd = {'MAP.STYLE': style, 'MAP.MARGIN_INSIDE': str(inside_margin), 'MAP.UP_DIRECTION': 'right', 'MAP.UP_ANGLE': '67.5'} map.gxmap.set_reg(gxu.reg_from_dict(rd)) if ((data_area[2] - data_area[0]) <= 0.0) or ((data_area[3] - data_area[1]) <= 0.0): raise MapException(_t('Invalid data area {}'.format(data_area))) if layout is None: if (data_area[2] - data_area[0]) < (data_area[3] - data_area[1]): layout = MAP_PORTRAIT else: layout = MAP_LANDSCAPE if file_name is None: file_name = gx.gx().temp_file('.map') else: if not overwrite: file_name = map_file_name(file_name) if os.path.isfile(file_name): raise MapException(_t('Cannot overwrite existing file: "{}"').format(file_name)) map = cls(file_name, WRITE_NEW, _internal=True) if type(media) is str: try: spec = gxdf.table_record('media', media.upper()) media = (float(spec['SIZE_X']), float(spec['SIZE_Y'])) except: media = None fixed_size = False if map_style is None: map_style = STYLE_FIGURE if media is None: fixed_size = False if scale: media = (5000., 4000.) # crazy large, will be trimmed to scale if margins is None: if map_style == STYLE_MAP: margins = (1.5, 14.0, 5.0, 1.5) else: margins = (1.0, 1.0, 4.0, 1.0) else: media = (50., 40.) if (layout == MAP_PORTRAIT) and (media[0] > media[1]): media = (media[1], media[0]) if margins: m_left, m_right, m_bottom, m_top = margins else: mx, my = media if map_style == STYLE_MAP: if mx <= 30.0: raise MapException(_t('\'map\' style requires minimum 30cm media. Yours is {}cm'.format(mx))) m_left = max(1.5, mx * 0.025) m_right = max(14.0, mx * 0.15) m_bottom = max(5.0, my * 0.1) m_top = max(1.5, my * 0.025) else: m_left = max(1.0, mx * 0.04) m_right = max(1.0, mx * 0.04) m_bottom = max(4.0, my * 0.15) m_top = max(1.0, my * 0.04) if scale is None: # determine largest scale to fit the media mx, my = data_window_on_map() sx = (data_area[2] - data_area[0]) * 100.0 / mx sy = (data_area[3] - data_area[1]) * 100.0 / my scale = float(gxu.str_significant(max(sx, sy), 4, 1)) if fixed_size: mx, my = data_window_on_map() x_adjust = max(0., (mx - ((data_area[2] - data_area[0]) * 100.0 / scale)) * 0.5) y_adjust = max(0., (my - ((data_area[3] - data_area[1]) * 100.0 / scale)) * 0.5) m_left += x_adjust m_right += x_adjust m_bottom += y_adjust m_top += y_adjust # ensure the data fits on this media mx, my = data_window_on_map() dmx = (data_area[2] - data_area[0]) * 100.0 / scale dmy = (data_area[3] - data_area[1]) * 100.0 / scale if (mx - dmx) < -0.01 or (my - dmy) < -0.01: raise MapException(_t('The data does not fit media ({},{})cm at a scale of 1:{}') .format(media[0], media[1], scale)) gxapi.GXMVU.mapset(map.gxmap, 'base', '' if no_data_view else 'data', data_area[0], data_area[2], data_area[1], data_area[3], '{},{}'.format(media[0] + 50.0, media[1] + 50.0), layout, 0, scale, gxapi.rDUMMY, m_left, m_right, m_bottom, m_top, float(inside_margin)) with gxv.View.open(map, '*data') as view: view.coordinate_system = coordinate_system set_registry(map, 'figure' if (map_style == STYLE_FIGURE) else 'map', inside_margin) map._make_base_mm() return map @property def gxmap(self): """ The :class:`geosoft.gxapi.GXMAP` instance handle.""" return self._gxmap @property def name(self): """map name, which is to root name of the map file""" return self._name @property def file_name(self): """ full map file path name. """ return self._file_name def _init_metadata(self): if not self._metadata: self._metadata = gxu.geosoft_metadata(self._file_name) self._metadata_root = tuple(self._metadata.items())[0][0] @property def metadata(self): """ Return the map file metadata as a dictionary. Can be set, in which case the dictionary items passed will be added to, or replace existing metadata. .. versionadded:: 9.2 """ self._init_metadata() return self._metadata[self._metadata_root] @metadata.setter def metadata(self, meta): self._init_metadata() self._metadata[self._metadata_root] = gxu.merge_dict(self._metadata[self._metadata_root], meta) self._metadata_changed = True @property def current_data_view(self): """ Name of the current default data view which accepts drawing groups from Geosoft methods that do not explicitly identify a view. Set this to a view name that should accept default drawing groups. If this is a 3D view, new 2D groups are placed on the default drawing plane of the view. """ return self.get_class_name('data') @current_data_view.setter def current_data_view(self, s): if not self.has_view(s): raise MapException(_t('Map does not contain a view named "{}"').format(s)) self.gxmap.set_class_name('data', s) @property def current_base_view(self): """ Name of the current default base view which accepts map annotation drawing groups (like titles, North arrow, etc.) from Geosoft methods. This can be set, though Geosoft uses the 'base' view in most standard cases. """ return self.get_class_name('base') @current_base_view.setter def current_base_view(self, s): if not self.has_view(s): raise MapException(_t('Map does not contain a view named "{}"').format(s)) self.gxmap.set_class_name('base', s) @property def current_section_view(self): """ Name of the current default section view which accepts drawing commands to a section from Geosoft methods. Can be set. """ return self.get_class_name('section') @current_section_view.setter def current_section_view(self, s): if not self.has_view(s): raise MapException(_t('Map does not contain a view named "{}"').format(s)) self.gxmap.set_class_name('section', s) @property def map_scale(self): """ Map scale, based on the current default "data" view if it exists. WARNING: If the "data" view is open this returns 1000. TODO - add test for open views. Can be set, but must not be called if any views are open. Resetting the map scale changes the scale of all views in the map. """ return self.gxmap.get_map_scale() @map_scale.setter def map_scale(self, scale): if scale > 0: self.gxmap.set_map_scale(scale) def close(self): """ Close the map and release resources. """ self._close() def remove_on_close(self, remove=True): """ :param remove: if True (the default), remove the map file when finished. """ self._remove = remove def commit_changes(self): """Commit changes to the map.""" self.gxmap.commit() def _make_base_cm(self): if self.has_view('*base'): with gxv.View.open(self, '*base') as view: ex_cm = view.extent_map_cm() view.locate(gxcs.Coordinate_system('cm'), map_location=(0,0), area=(0, 0, ex_cm[2], ex_cm[3]), scale=1.0) pass def _make_base_mm(self): if self.has_view('*base'): with gxv.View.open(self, '*base') as view: ex_cm = view.extent_map_cm() view.locate(gxcs.Coordinate_system('mm'), map_location=(0, 0), area=(0, 0, ex_cm[2] * 10., ex_cm[3] * 10.), scale=1.0) pass def extent_data_views(self): """ Returns the extent of all data views on the map in map cm. .. versionadded:: 9.2 """ def extents(ex, ext): ex = (min(ex[0], ext[0]), min(ex[1], ext[1]), max(ex[2], ext[2]), max(ex[3], ext[3])) return ex vlist = self.view_list ex = (1.0e10, 1.0e10, -1.0e10, -1.0e10) base_view = self.classview('*base').lower() for view_name in vlist: if view_name.lower() != base_view: with gxv.View.open(self, view_name) as v: ex = extents(ex, v.extent_map_cm(v.extent_clip)) if ex[0] == 1.0e10: raise MapException(_t('Map "{}" has no data views.').format(self.name)) return ex def classview(self, name): """ Given a view name that may be a class name ('*' prefix), return the view name for that class. if not class decorated, the name passed is returned. :param name: view name, `'*data'` will return the name associated with the `'data'` class, while `'my_view'` will return `'my_view'`. :returns: the name, or if a class name, the view name associated with that class. .. versionadded: 9.2 """ if name[0] != '*': return name return self.get_class_name(name[1:]) def _views(self, view_type=LIST_ALL): """ Return dictionary of view names. :param view_type: `gxmap.LIST_ALL`, `gxapi.LIST_2D` or `gxapi.LIST_3D` :returns: list of views """ glst = gxapi.GXLST.create(VIEW_NAME_SIZE) self.gxmap.view_list_ex(glst, view_type) return list(gxu.dict_from_lst(glst)) @property def view_list(self): """list of views in the map, both 2D and 3D""" return self._views() @property def view_list_2D(self): """list of 2D views in the map""" return self._views(LIST_2D) @property def view_list_3D(self): """list of 3D views in the map""" return self._views(LIST_3D) def aggregate_list(self, mode=0): """ List of all aggregates in the map as 'view_name/group_name' (mode=0) or 'view_name/group_name/layer' (mode=1). ..versionadded:: 9.2 """ glst = gxapi.GXLST.create(gxg.GROUP_NAME_SIZE) self.gxmap.agg_list_ex(glst, mode, 0) return list(gxu.dict_from_lst(glst)) def has_view(self, view): """ Returns True if the map contains this view.""" return self.gxmap.exist_view(self.classview(view)) def copy_view(self, old, new, overwrite=False, copy_all=True): """ Copy an existing view into a new view. :param old: name of the existing view :param new: name for the new view :param overwrite: True to overwrite an existing view if it exists :param copy_all: True to copy content of old to new, false to create an empty new view with the same coordinate system, scale and clipping as the old view. .. versionadded:: 9.2 """ old = self.classview(old) new = self.classview(new) if not self.has_view(old): raise MapException(_t('"{}" view does not exist.').format(old)) if self.has_view(new): if overwrite: self.gxmap.delete_view(new) else: raise MapException(_t('Cannot overwtite existing view "{}"').format(new)) s = gxapi.str_ref() s.value = new self.gxmap.duplicate_view(old, s, copy_all) if s.value != new: self.gxmap.delete_view(new) raise MapException(_t('Invalid view name "{}", suggest "{}"').format(new, s.value)) def delete_view(self, name): """ Delete a view from a map. You cannot delete the last view in a map. :param name: name of the view to delete .. versionadded:: 9.2 """ self.gxmap.delete_view(self.classview(name)) def mdf(self): """ Returns the Map Description File specification for maps that contain both a base view and a data view: ((x_size, y_size, margin_bottom, margin_right, margin_top, margin_left), (scale, units_per_metre, x_origin, y_origin)) .. versionadded: 9.2 """ views = self.view_list_2D if not (self.has_view(self.current_data_view) and self.has_view(self.current_base_view)): raise MapException(_t('The map must have both a base view and a data view.')) with gxv.View.open(self, self.current_base_view, read_only=True) as base: with gxv.View.open(self, self.current_data_view, read_only=True) as data: mdf = data.mdf(base_view=base) return mdf def get_class_name(self, view_class): """ Get the view name associated with a class. :param view_class: desired class Common view class names are:: 'base' the base map/figure view, uses map cm 'data' the default data view for drawing spatial data. 'section' the default section view for things drawn in section Other class names may be defined, though they are not used by Geosoft. :returns: view name associated with the class, '' if not defined. .. versionadded:: 9.2 """ sr = gxapi.str_ref() self.gxmap.get_class_name(view_class, sr) return sr.value.lower() def set_class_name(self, view_class, name): """ Set the view name associated with a class. :param view_class: class name :param name: name of the view associated with this class. Common view class names are:: 'base' the base map/figure view, uses map cm 'data' the default data view for drawing spatial data. 'section' the default section view for things drawn in section .. versionadded:: 9.2 """ self.gxmap.set_class_name(view_class, name) def create_linked_3d_view(self, view, name='3D', area_on_map=(0, 0, 300, 300)): """ Create a linked 3D view inside a 2D map to a `geosoft.gxpy.view.View_3d` in a 3DV :param view: `geosoft.gxpy.view.View_3d` instance :param name: name of the linked view to create :param area_on_map: min_x, min_y, max_x, max_y) placement of view on map in mm .. versionadded:: 9.2 """ self.gxmap.create_linked_3d_view(view.gxview, name, area_on_map[0], area_on_map[1], area_on_map[2], area_on_map[3]) def image_file(self, imagefile=None, type=RASTER_FORMAT_PNG, pix_width=1000, pix_height=0, pix_32_bit=False): """ Save a map to an image file :param mapfile: map or geosoft_3dv file name :param imagefile: name of the output raster file, default will be a temporary png file. :param type: one of the RASTER_FORMAT types, default`RASTER_FORMAT_PNG` :param pix_width: image pixel width, if 0 use pix_height only :param pix_height: image pixel height, if 0 use pix_width only :param pix_32_bit: make 32-bit image (with 8-bit alpha background) :returns: image file name .. versionadded:: 9.3 """ if imagefile is None: imagefile = gx.gx().temp_file('.png') type = RASTER_FORMAT_PNG self.gxmap.export_all_raster(imagefile, '', pix_width, pix_height, gxapi.rDUMMY, gxapi.MAP_EXPORT_BITS_32 if pix_32_bit else gxapi.MAP_EXPORT_BITS_24, gxapi.MAP_EXPORT_METHOD_NONE, type, '') return imagefile def crc_image(self, pix_width=1000): """ Return the CRC of a map based on the output bitmap image. :param pix_width: image pixel width - use a higher resolution to test more detail :returns: CRC as an int .. versionadded:: 9.3 """ crc_image = gx.gx().temp_file('.bmp') self.image_file(crc_image, type=RASTER_FORMAT_BMP, pix_width=pix_width) crc = gxu.crc32_file(crc_image) gxgrd.delete_files(crc_image) return crc @classmethod def figure(cls, data_area, file_name=None, overwrite=False, title=None, features=('SCALE', 'NEATLINE', 'ANNOT_XY'), **kwargs): """ Create a figure-style map. :param data_area: the area extend for the data view as (xmin, ymin, xmax, ymax) :param file_name: map file name, default creates a temporary map :param overwrite: `True` to overwrite file should it exist :param title: figure title :param features: list of features to place on the map, default is ('SCALE', 'NEATLINE', 'ANNOT_XY') =========== ========================================= 'ALL' all features 'SCALE' show a scale bar 'NEATLINE' draw a neat-line around the image 'ANNOT_XY' annotate map coordinates 'ANNOT_LL' annotate map Latitude, Longitude =========== ========================================= :return: `Map` instance with 'base' and 'data' views. .. seealso:: `Map.new` arguments to modify map layout requirements .. versionadded:: 9.3 """ # uppercase features, use a dict so we pop things we use and report error if isinstance(features, str): features = (features,) feature_list = {} if features is not None: for f in features: feature_list[f.upper()] = None if 'ALL' in feature_list: feature_list = {'SCALE': None, 'NEATLINE': None, 'ANNOT_LL': None, 'ANNOT_XY': None} if not 'margins' in kwargs: bottom_margin = 1 if title: bottom_margin += 1 if 'SCALE' in feature_list: bottom_margin += 1.2 right_margin = 1 if 'LEGEND' in feature_list: right_margin += 3.5 kwargs['margins'] = (1, right_margin, bottom_margin, 1) if not 'media' in kwargs: kwargs['media'] = 'A4' if not 'inside_margin' in kwargs: kwargs['inside_margin'] = 0.2 # data area adjustment data_area = list(data_area) dx = data_area[2] - data_area[0] dy = data_area[3] - data_area[1] if dx < dy * 0.67: d = (dy * 0.67 - dx) * 0.5 data_area[0] -= d data_area[2] += d elif dy < dx * 0.67: d = (dx * 0.67 - dy) * 0.5 data_area[1] -= d data_area[3] += d kwargs['data_area'] = data_area # over-ride gmap = Map.new(file_name, overwrite=overwrite, **kwargs) if 'ANNOT_XY' in feature_list: gmap.annotate_data_xy(grid=GRID_CROSSES) if 'ANNOT_LL' in feature_list: gmap.annotate_data_ll(grid=GRID_LINES, grid_pen='b255r100g100t150', text_def=gxg.Text_def(height=0.18, italics=True)) if 'SCALE' in feature_list: gmap.scale_bar(location=(2, 0, 1.2), sections=2, text_def=gxg.Text_def(height=0.15)) bottom = 15 else: bottom = 0 if 'NEATLINE' in feature_list: gmap.surround() with gxv.View.open(gmap, "data") as v: # map title if title: with gxv.View.open(gmap, "base") as v: with gxg.Draw(v, 'annotations') as g: x = (v.extent_clip[2] - v.extent_clip[0]) / 2 g.text(title, reference=1, location=(x, bottom + 5), text_def=gxg.Text_def(height=3.5, weight=gxg.FONT_WEIGHT_BOLD)) return gmap def surround(self, outer_pen=None, inner_pen=None, gap=0): """ Draw a map surround. This will draw a single or a double neat-line around the base view of the map. :param outer_pen: outer-line pen attributes (cm) :param inner_pen: inner-line pen attributes (cm) :param gap: gap between the outer and inner line in cm. If 0, only the outer line is drawn. .. versionadded:: 9.2 """ if outer_pen is None: outer_pen = gxg.Pen(line_thick=0.05) elif isinstance(outer_pen, str): outer_pen = gxg.Pen.from_mapplot_string(outer_pen) outer_pen.line_thick = outer_pen.line_thick / 10.0 # to cm if inner_pen is None: inner_pen = gxg.Pen(line_thick=outer_pen.line_thick * 0.5) elif isinstance(inner_pen, str): inner_pen = gxg.Pen.from_mapplot_string(inner_pen) inner_pen.line_thick = inner_pen.line_thick / 10.0 # to cm with _Mapplot(self) as mpl: mpl.start_group('surround', view=VIEW_BASE, mode=GROUP_APPEND) mpl.define_named_attribute('outer', pen=outer_pen) if gap <= 0: inner = '' gap = 0 else: if inner_pen is None: inner_pen = gxg.Pen(line_thick=0.01) # cm inner = 'inner' mpl.define_named_attribute(inner, pen=inner_pen) mpl.command('SURR "{}",{},"{}"'.format('outer', gap, inner)) def north_arrow(self, location=(1, 2., 2.7), direction=None, length=3, inclination=None, declination=None, text_def=None, pen=None): """ Add a North arrow to the base view of the map. :param location: (reference, x_offset, y_offset) reference is a reference point relative to the base map extents (1 through 9) nd the offsets are the offset from that reference point in map cm. :param direction: North direction in degrees azimuth (clockwise from map Y axis). The efault is calculated direction of North at the center of the data view. :param length: arrow length in cm on the map. :param inclination: magnetic inclination, not shown if not specified :param declination: magnetic declination, not shown if not specified :param text_def: :class:`geosoft.gxpy.group.Text_def` instance, or `None` for the default. :param pen: :class:`geosoft.gxpy.group.Pen` instance, or `None` for the default .. versionadded:: 9.2 """ if direction is None: with gxv.View.open(self, '*data') as v: direction = round(v.gxview.north(), 1) if direction == gxapi.rDUMMY: direction = '' if inclination is None: inclination = '' if declination is None: declination = '' if pen is None: pen = gxg.Pen(line_thick=0.015) if text_def is None: text_def = gxg.Text_def(height=0.25, italics=True, weight=gxg.FONT_WEIGHT_LIGHT) with _Mapplot(self) as mpl: mpl.start_group('north_arrow', view=VIEW_BASE, mode=GROUP_APPEND) mpl.define_named_attribute('arrow', pen=pen) mpl.define_named_attribute('annot', text_def=text_def) mpl.command("NARR {},{},{},{},{},{},{},{}".format(location[0], location[1], location[2], direction, length, 'arrow', inclination, declination)) mpl.command(' annot') def scale_bar(self, location=(1, 5, 2), length=5, sections=None, post_scale=False, text_def=None, pen=None): """ Draw a scale bar. :param location: (ref_point, x_off, y_off) bar location reference point an offset from that point :param length: maximum scale bar length in map cm, default is 5 cm. 0 will suppress drawing of the bar. :param sections: number of major sections in the bar, default is determined automatically. :param post_scale: True to post the actual scale as a string, e.g. '1:50,000'. Note that a posted scale is only relevant for printed maps. The default does not post the scale. :param text_def: :class:`geosoft.gxpy.view.Text_def` instance. :param pen: :class:`geosoft.gxpy.view.Pen` instance. .. versionadded:: 9.2 """ if sections is None: sections = '' if post_scale: option = 2 else: option = 1 if text_def is None: text_def = gxg.Text_def(height=0.25, weight=gxg.FONT_WEIGHT_LIGHT, italics=True) if pen is None: pen = gxg.Pen(line_thick=0.001) with _Mapplot(self) as mpl: mpl.start_group('scale_bar', view=VIEW_BASE, mode=GROUP_APPEND) mpl.define_named_attribute('scale_text', text_def=text_def) mpl.define_named_attribute('scale_bar', pen=pen) mpl.command("SCAL {},{},{},,,{},{},,{},".format(location[0], location[1], location[2], length, sections, option)) mpl.command(' scale_text') def _annotation_offset(self, offset, text_height): inside = text_height * 0.25 if offset: offset = offset + inside else: offset = self._annotation_outer_edge + inside self._annotation_outer_edge += offset + text_height + inside * 0.5 return offset def annotate_data_xy(self, view_name='*data', tick='', offset='', x_sep='', x_dec='', y_sep='', y_dec='', compass=None, top=TOP_OUT, text_def=None, edge_pen=None, grid=GRID_NONE, grid_pen=None): """ Annotate a data view axis :param view_name: name of the data view to annotate :param tick: inner tick size in cm :param offset: posting offset from the edge in map cm. The posting edge is adjusted to be outside character height for a subsequent call to an edge annotation. This allows one to annotate both geographic and projected coordinates. :param top: TOP_IN or TOP_OUT (default) for vertical annotations :param x_sep: separation between X annotations, default is calculated from data :param x_dec: X axis label decimals, default is 0 :param y_sep: separation between Y annotations, default is calculated from data :param y_dec: Y axis label decimals, default is 0 :param compass: True to append compass direction to annotations, default True if known coordinate system. :param grid: :: GRID_NONE no grid GRID_DOTTED dotted lines GRID_CROSSES crosses at intersections GRID_LINES lines :param text_def: `geosoft.gxpy.group.Text_def` :param edge_pen: `geosoft.gxpy.group.Pen` :param grid_pen: `geosoft.gxpy.group.Pen` .. versionadded:: 9.2 """ if text_def is None: text_def = gxg.Text_def(height=0.18) if edge_pen is None: edge_pen = gxg.Pen() if grid_pen is None: grid_pen = edge_pen current_view = self.current_data_view view_name = self.classview(view_name) self.current_data_view = view_name try: offset = self._annotation_offset(offset, text_def.height) with gxv.View.open(self, view_name) as v: with gxg.Draw(v) as g: g.rectangle(v.extent_clip, pen=gxg.Pen(default=edge_pen, factor=v.units_per_map_cm)) # if view has a known coordinate system, use compass annotations if compass is None: compass = gxcs.is_known(v.coordinate_system) with _Mapplot(self) as mpl: mpl.start_group(view_name + '_edge', 1, view_name) if not tick and grid == GRID_LINES: tick = 0.0 mpl.define_named_attribute('annot', text_def=text_def, pen=gxg.Pen(line_color=text_def.color, line_thick=text_def.line_thick)) mpl.define_named_attribute(pen=edge_pen) mpl.command("ANOX ,,,,,{},{},,{},,,,{},{},1".format(x_sep, tick, 0 if compass else -1, offset, x_dec)) mpl.command(' annot') mpl.command( "ANOY ,,,,,{},{},,{},{},,,{},{},1".format(y_sep, tick, 0 if compass else -1, top, offset, y_dec)) mpl.command(' annot') if grid: mpl.define_named_attribute(pen=grid_pen) mpl.command("GRID {},,,,,_".format(grid)) except: raise finally: self.current_data_view = current_view def annotate_data_ll(self, view_name='*data', tick='', offset='', sep='', top=TOP_OUT, text_def=None, edge_pen=None, grid=GRID_LINES, grid_pen=None): """ Annotate the data view axis :param view_name: name of the data view to annotate :param tick: inner tick size in cm :param offset: posting offset from the edge in cm. The posting edge is adjusted to be outside character height for a subsequent call to an edge annotation. This allows one to annotate both geographic and projected coordinates. :param sep: separation between annotations, default is calculated from data :param top: TOP_IN or TOP_OUT (default) for vertical annotations :param grid: :: GRID_NONE no grid GRID_DOTTED dotted lines GRID_CROSSES crosses at intersections GRID_LINES lines :param text_def: `geosoft.gxpy.group.Text_def`, units cm :param edge_pen: `geosoft.gxpy.group.Pen`, units cm :param grid_pen: `geosoft.gxpy.group.Pen`, units cm .. versionadded:: 9.2 """ if text_def is None: text_def = gxg.Text_def(height=0.18) if edge_pen is None: edge_pen = gxg.Pen(factor=0.1) if grid_pen is None: grid_pen = edge_pen current_view = self.current_data_view view_name = self.classview(view_name) self.current_data_view = view_name try: offset = self._annotation_offset(offset, text_def.height) with gxv.View.open(self, view_name) as v: with gxg.Draw(v) as g: pen = gxg.Pen(default=edge_pen, factor=v.units_per_map_cm) g.rectangle(v.extent_clip, pen=pen) with _Mapplot(self) as mpl: mpl.start_group(view_name + '_edge', 1, view_name) if not tick and grid == GRID_LINES: tick = 0.0 mpl.define_named_attribute('annot', text_def=text_def, pen=gxg.Pen(line_color=text_def.color, line_thick=text_def.line_thick) ) mpl.define_named_attribute(pen=edge_pen) mpl.command("ALON {},{},{},,1".format(sep, tick, offset)) mpl.command(' annot') mpl.command("ALAT {},{},{},,,{}".format(sep, tick, offset, top)) mpl.command(' annot') if grid: mpl.define_named_attribute(pen=grid_pen) mpl.command("GRID -{},,,,,_".format(grid)) except: raise finally: self.current_data_view = current_view def export_geotiff(self, geotiff, dpi=96): """ Export map as a GeoTIFF image :param geotiff: GeoTIFF file name :param dpi: resolution in dots-per-inch, default is common screen resolution of 96 dpi. .. versionadded:: 9.3 """ self.gxmap.export_all_in_view(geotiff, "*data", gxapi.rDUMMY, dpi, gxapi.MAP_EXPORT_BITS_24, gxapi.MAP_EXPORT_METHOD_NONE, gxapi.MAP_EXPORT_FORMAT_GTIFF, "") class _Mapplot: """Internal class to marshal MAPPLOT commands to support basic map annotations.""" def __enter__(self): return self def __exit__(self, xtype, xvalue, xtraceback): self.__del__() def __del__(self): if hasattr(self, '_process'): self._process() def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): return "mapplot({})".format(self._map.file_name) def __init__(self, map, data_view=None, ref_prefix='', **kwargs): if not (map.has_view(map.current_base_view) and map.has_view(map.current_data_view)): raise MapException(_t("Map must have a '*base' and '*data' view.")) self._map = map self._ref_pre = ref_prefix if data_view: self.prior_data_view = map.current_data_view map.current_data_view = data_view else: self.prior_data_view = None # mapplot control file self._maplfile_name = os.path.join(gx.gx().temp_folder(), 'mapl_' + gxu.uuid() + ".con") self._maplfile = open(self._maplfile_name, "w") self._annotation_outer_edge = 0.0 self._open = gx.track_resource(self.__class__.__name__, self._maplfile_name) self.define_named_attribute() def _process(self, pop=True): if self._maplfile: self._maplfile.close() self._maplfile = None gxmapl = gxapi.GXMAPL.create(self._maplfile_name, self._ref_pre, 0) gxmapl.process(self._map.gxmap) os.remove(self._maplfile_name) if self.prior_data_view: self.map.current_data_view = self.prior_data_view if pop: gx.pop_resource(self._open) def command(self, command): self._maplfile.write(command) if command and command[-1] != '\n': self._maplfile.write('\n') # geosoft.gxpy.gx.gx().log(command) def define_named_attribute(self, name='_', pen=None, text_def=None): if (pen is None) and (text_def is None): self.command("DATT {}".format(name)) else: if pen is None: pen = gxg.Pen(line_color=text_def.color, line_thick=text_def.line_thick) elif isinstance(pen, str): pen = gxg.Pen.from_mapplot_string(pen) pen.line_thick = pen.line_thick * 0.1 # to cm ls = pen.line_style lp = pen.line_pitch pen.line_thick = pen.line_thick * 10.0 # to mm for mapplot_string penstr = pen.mapplot_string pen.line_thick = pen.line_thick / 10.0 # back to cm if text_def is None: textstr = '' else: textstr = text_def.mapplot_string self.command("DATT {}={},{},{},{}".format(name, penstr, ls, lp, textstr)) def start_group(self, name, mode=GROUP_NEW, view=VIEW_BASE): """ Start a view group, or append to an existing group. Graphic entities can be organized into named groups, which appear as separate components that can be managed within a Geosoft viewer. :param name: Group name (required). :param mode: GROUP_NEW (default) or GROUP_APPEND. GROUP_NEW relaces an existing group, and the content of an existing group will be deleted. :param view: VIEW_BASE or VIEW_DATA. Coordinates in the base view are map cm, and coordionates in the data view are in data view units. .. versionadded:: 9.2 """ if type(view) is str: if view.lower() == 'base': view = VIEW_BASE else: view = VIEW_DATA self.command('MGRP {},{},{}'.format(name, mode, view)) <file_sep>/geosoft/gxapi/GXMULTIGRID3D.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXGRID3D import GXGRID3D from .GXPG import GXPG ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXMULTIGRID3D(gxapi_cy.WrapMULTIGRID3D): """ GXMULTIGRID3D class. High Performance 3D Grid. """ def __init__(self, handle=0): super(GXMULTIGRID3D, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` :returns: A null `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` :rtype: GXMULTIGRID3D """ return GXMULTIGRID3D() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def open(cls, name): """ Opens an existing Multivoxset :param name: File Name :type name: str :returns: `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` handle, terminates if creation fails :rtype: GXMULTIGRID3D .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMULTIGRID3D._open(GXContext._get_tls_geo(), name.encode()) return GXMULTIGRID3D(ret_val) @classmethod def modify(cls, name): """ Opens an existing Multivoxset with an plan to modify it :param name: File Name :type name: str :returns: `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` handle, terminates if creation fails :rtype: GXMULTIGRID3D .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMULTIGRID3D._modify(GXContext._get_tls_geo(), name.encode()) return GXMULTIGRID3D(ret_val) @classmethod def create(cls, name, size_x, size_y, size_z): """ Creates a new Multivoxset :param name: File Name :param size_x: Size in X. :param size_y: Size in Y. :param size_z: Size in Z. :type name: str :type size_x: int :type size_y: int :type size_z: int :returns: `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` handle, terminates if creation fails :rtype: GXMULTIGRID3D .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapMULTIGRID3D._create(GXContext._get_tls_geo(), name.encode(), size_x, size_y, size_z) return GXMULTIGRID3D(ret_val) def duplicate(self, name): """ Creates an MULTIGRID3D with identical geometry to the input :param name: File Name :type name: str :returns: `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` handle, terminates if creation fails :rtype: GXMULTIGRID3D .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._duplicate(name.encode()) return GXMULTIGRID3D(ret_val) def get_default(self): """ Get the default voxset :returns: `GXGRID3D <geosoft.gxapi.GXGRID3D>` handle, terminates if creation fails :rtype: GXGRID3D .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_default() return GXGRID3D(ret_val) def create_default(self, type): """ Get the default voxset :param type: :ref:`GRID3D_TYPE` :type type: int :returns: `GXGRID3D <geosoft.gxapi.GXGRID3D>` handle, terminates if creation fails :rtype: GXGRID3D .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._create_default(type) return GXGRID3D(ret_val) def is_uniform_cell_size_x(self): """ Is the cell uniform in the X direction :rtype: bool .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_uniform_cell_size_x() return ret_val def is_uniform_cell_size_y(self): """ Is the cell uniform in the Y direction :rtype: bool .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_uniform_cell_size_y() return ret_val def is_uniform_cell_size_z(self): """ Is the cell uniform in the Z direction :rtype: bool .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._is_uniform_cell_size_z() return ret_val def get_size_x(self): """ Get the number of cells in the X direction :rtype: int .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_size_x() return ret_val def get_size_y(self): """ Get the number of cells in the X direction :rtype: int .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_size_y() return ret_val def get_size_z(self): """ Get the number of cells in the X direction :rtype: int .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_size_z() return ret_val def get_cell_sizes_x(self, vv): """ Get the cell sizes in the X direction :param vv: X `GXVV <geosoft.gxapi.GXVV>` :type vv: GXVV .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_cell_sizes_x(vv) def get_cell_sizes_y(self, vv): """ Get the cell sizes in the Y direction :param vv: Y `GXVV <geosoft.gxapi.GXVV>` :type vv: GXVV .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_cell_sizes_y(vv) def get_cell_sizes_z(self, vv): """ Get the cell sizes in the Z direction :param vv: Z `GXVV <geosoft.gxapi.GXVV>` :type vv: GXVV .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_cell_sizes_z(vv) def set_cell_sizes_x(self, vv): """ Set the cell sizes in the X direction :param vv: X `GXVV <geosoft.gxapi.GXVV>` :type vv: GXVV .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_cell_sizes_x(vv) def set_cell_sizes_y(self, vv): """ Set the cell sizes in the Y direction :param vv: Y `GXVV <geosoft.gxapi.GXVV>` :type vv: GXVV .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_cell_sizes_y(vv) def set_cell_sizes_z(self, vv): """ Set the cell sizes in the Z direction :param vv: Z `GXVV <geosoft.gxapi.GXVV>` :type vv: GXVV .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_cell_sizes_z(vv) def get_uniform_cell_size_x(self): """ Get the uniform cell size in the X direction :rtype: float .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_uniform_cell_size_x() return ret_val def get_uniform_cell_size_y(self): """ Get the uniform cell size in the Y direction :rtype: float .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_uniform_cell_size_y() return ret_val def get_uniform_cell_size_z(self): """ Get the uniform cell size in the Z direction :rtype: float .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._get_uniform_cell_size_z() return ret_val def set_uniform_cell_size_x(self, cellsize): """ Set the uniform cell size in the X direction :param cellsize: cell size :type cellsize: float .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_uniform_cell_size_x(cellsize) def set_uniform_cell_size_y(self, cellsize): """ Get the uniform cell size in the Y direction :param cellsize: cell size :type cellsize: float .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_uniform_cell_size_y(cellsize) def set_uniform_cell_size_z(self, cellsize): """ Get the uniform cell size in the Z direction :param cellsize: cell size :type cellsize: float .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_uniform_cell_size_z(cellsize) def get_origin(self, origin_x, origin_y, origin_z): """ Get the origin :param origin_x: x :param origin_y: y :param origin_z: z :type origin_x: float_ref :type origin_y: float_ref :type origin_z: float_ref .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ origin_x.value, origin_y.value, origin_z.value = self._get_origin(origin_x.value, origin_y.value, origin_z.value) def set_origin(self, origin_x, origin_y, origin_z): """ Set the origin :param origin_x: x :param origin_y: y :param origin_z: z :type origin_x: float :type origin_y: float :type origin_z: float .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_origin(origin_x, origin_y, origin_z) def get_bounding_box(self, min_x, min_y, min_z, max_x, max_y, max_z): """ Get the bounding box :param min_x: minx :param min_y: miny :param min_z: minz :param max_x: maxx :param max_y: maxy :param max_z: maxz :type min_x: float_ref :type min_y: float_ref :type min_z: float_ref :type max_x: float_ref :type max_y: float_ref :type max_z: float_ref .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value = self._get_bounding_box(min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value) def get_volume_vectors(self, origin_x, origin_y, origin_z, X_vector_x, X_vector_y, X_vector_z, Y_vector_x, Y_vector_y, Y_vector_z, Z_vector_x, Z_vector_y, Z_vector_z): """ Get the direction of the volume :param origin_x: origin_x :param origin_y: origin_y :param origin_z: origin_z :param X_vector_x: X Vector x :param X_vector_y: X Vector y :param X_vector_z: X Vector z :param Y_vector_x: Y Vector x :param Y_vector_y: Y Vector y :param Y_vector_z: Y Vector z :param Z_vector_x: Z Vector x :param Z_vector_y: Z Vector y :param Z_vector_z: Z Vector z :type origin_x: float_ref :type origin_y: float_ref :type origin_z: float_ref :type X_vector_x: float_ref :type X_vector_y: float_ref :type X_vector_z: float_ref :type Y_vector_x: float_ref :type Y_vector_y: float_ref :type Y_vector_z: float_ref :type Z_vector_x: float_ref :type Z_vector_y: float_ref :type Z_vector_z: float_ref .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ origin_x.value, origin_y.value, origin_z.value, X_vector_x.value, X_vector_y.value, X_vector_z.value, Y_vector_x.value, Y_vector_y.value, Y_vector_z.value, Z_vector_x.value, Z_vector_y.value, Z_vector_z.value = self._get_volume_vectors(origin_x.value, origin_y.value, origin_z.value, X_vector_x.value, X_vector_y.value, X_vector_z.value, Y_vector_x.value, Y_vector_y.value, Y_vector_z.value, Z_vector_x.value, Z_vector_y.value, Z_vector_z.value) def get_oriented_data_extents(self, oriented_origin_x, oriented_origin_y, oriented_origin_z, X_vector_x, X_vector_y, X_vector_z, Y_vector_x, Y_vector_y, Y_vector_z, Z_vector_x, Z_vector_y, Z_vector_z, p1_x, p1_y, p1_z, p2_x, p2_y, p2_z): """ Get the data extents based on an orientation :param oriented_origin_x: oriented_origin_x :param oriented_origin_y: oriented_origin_y :param oriented_origin_z: oriented_origin_z :param X_vector_x: X Vector x :param X_vector_y: X Vector y :param X_vector_z: X Vector z :param Y_vector_x: Y Vector x :param Y_vector_y: Y Vector y :param Y_vector_z: Y Vector z :param Z_vector_x: Z Vector x :param Z_vector_y: Z Vector y :param Z_vector_z: Z Vector z :param p1_x: Point1 x :param p1_y: Point1 y :param p1_z: Point1 z :param p2_x: Point2 x :param p2_y: Point2 y :param p2_z: Point2 z :type oriented_origin_x: float :type oriented_origin_y: float :type oriented_origin_z: float :type X_vector_x: float :type X_vector_y: float :type X_vector_z: float :type Y_vector_x: float :type Y_vector_y: float :type Y_vector_z: float :type Z_vector_x: float :type Z_vector_y: float :type Z_vector_z: float :type p1_x: float_ref :type p1_y: float_ref :type p1_z: float_ref :type p2_x: float_ref :type p2_y: float_ref :type p2_z: float_ref .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ p1_x.value, p1_y.value, p1_z.value, p2_x.value, p2_y.value, p2_z.value = self._get_oriented_data_extents(oriented_origin_x, oriented_origin_y, oriented_origin_z, X_vector_x, X_vector_y, X_vector_z, Y_vector_x, Y_vector_y, Y_vector_z, Z_vector_x, Z_vector_y, Z_vector_z, p1_x.value, p1_y.value, p1_z.value, p2_x.value, p2_y.value, p2_z.value) def get_section_cell_sizes(self, azimuth, scale, origin_x, origin_y, origin_z, cell_size_x, cell_size_y): """ Get the cell sizes of a section :param azimuth: azimuth :param scale: scale :param origin_x: x origin :param origin_y: y origin :param origin_z: z origin :param cell_size_x: cell size in x :param cell_size_y: cell size in y :type azimuth: float :type scale: float :type origin_x: float :type origin_y: float :type origin_z: float :type cell_size_x: float_ref :type cell_size_y: float_ref .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ cell_size_x.value, cell_size_y.value = self._get_section_cell_sizes(azimuth, scale, origin_x, origin_y, origin_z, cell_size_x.value, cell_size_y.value) def get_vector_orientation(self, inc, dec, cell_size_y): """ Get the vector voxel orientation :param inc: inclination :param dec: declination :param cell_size_y: rotated :type inc: float_ref :type dec: float_ref :type cell_size_y: int_ref .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ inc.value, dec.value, cell_size_y.value = self._get_vector_orientation(inc.value, dec.value, cell_size_y.value) def fill(self, output_file, method, fill_value): """ Fill a grid3d. :param output_file: Name of the output grid3d :param method: :ref:`PGU_INTERP_ORDER` :param fill_value: Fill Value :type output_file: str :type method: int :type fill_value: float .. versionadded:: 9.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._fill(output_file.encode(), method, fill_value) def get_ipj(self, ipj): """ Get the projection of the multigrid3d. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` object :type ipj: GXIPJ .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._get_ipj(ipj) def set_ipj(self, ipj): """ Set the projection of the multigrid3d. :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` object :type ipj: GXIPJ .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_ipj(ipj) def export_to_xyz(self, xyz, dir, rev_x, rev_y, rev_z, dummies): """ Export a `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` to an XYZ File :param xyz: File Name :param dir: :ref:`DIRECTION3D` :param rev_x: Reverse X? :param rev_y: Reverse Y? :param rev_z: Reverse Z? :param dummies: Write Dummies? :type xyz: str :type dir: int :type rev_x: bool :type rev_y: bool :type rev_z: bool :type dummies: bool .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._export_to_xyz(xyz.encode(), dir, rev_x, rev_y, rev_z, dummies) def export_to_binary(self, binary_file, dir, rev_x, rev_y, rev_z, swap, output_type): """ Export contents of `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` to a Binary File. :param binary_file: Binary file to write to :param dir: :ref:`DIRECTION3D` :param rev_x: Reverse X? :param rev_y: Reverse Y? :param rev_z: Reverse Z? :param swap: Swap Bytes? :param output_type: Output Type (Geosoft Type) :type binary_file: str :type dir: int :type rev_x: bool :type rev_y: bool :type rev_z: bool :type swap: bool :type output_type: int .. versionadded:: 9.4 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._export_to_binary(binary_file.encode(), dir, rev_x, rev_y, rev_z, swap, output_type) def export_to_binary_ex(self, binary_file, dir, rev_x, rev_y, rev_z, swap, dummy, output_type): """ Export contents of `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` to a Binary File, with dummy replacement. :param binary_file: Binary file to write to :param dir: :ref:`DIRECTION3D` :param rev_x: Reverse X? :param rev_y: Reverse Y? :param rev_z: Reverse Z? :param swap: Swap Bytes? :param dummy: Replace dummy values with this value on export :param output_type: Output Type (Geosoft Type) :type binary_file: str :type dir: int :type rev_x: bool :type rev_y: bool :type rev_z: bool :type swap: bool :type dummy: float :type output_type: int .. versionadded:: 9.7 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._export_to_binary_ex(binary_file.encode(), dir, rev_x, rev_y, rev_z, swap, dummy, output_type) def export_to_xml(self, xml_file): """ Export a `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` to XML :param xml_file: XML file :type xml_file: str .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._export_to_xml(xml_file.encode()) def export_to_wa(self, wa, dir, rev_x, rev_y, rev_z, dummy): """ Export To GDB :param wa: `GXWA <geosoft.gxapi.GXWA>` File :param dir: :ref:`DIRECTION3D` :param rev_x: Reverse X? :param rev_y: Reverse Y? :param rev_z: Reverse Z? :param dummy: The Dummy string to write :type wa: GXWA :type dir: int :type rev_x: bool :type rev_y: bool :type rev_z: bool :type dummy: str .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._export_to_wa(wa, dir, rev_x, rev_y, rev_z, dummy.encode()) def export_to_gdb(self, db, chan, dir, rev_x, rev_y, rev_z, dummies): """ Export To GDB :param db: Database :param chan: Channel Name :param dir: :ref:`DIRECTION3D` :param rev_x: Reverse X? :param rev_y: Reverse Y? :param rev_z: Reverse Z? :param dummies: Write Dummies? :type db: GXDB :type chan: str :type dir: int :type rev_x: bool :type rev_y: bool :type rev_z: bool :type dummies: bool .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._export_to_gdb(db, chan.encode(), dir, rev_x, rev_y, rev_z, dummies) def export_to_pg(self): """ Export a MULTIGRID3D To a PG :returns: `GXPG <geosoft.gxapi.GXPG>` Object :rtype: GXPG .. versionadded:: 9.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = self._export_to_pg() return GXPG(ret_val) def get_data_extents(self, min_x, min_y, min_z, max_x, max_y, max_z): """ Get the voxel size that has non-dummy data. :param min_x: Index of minimum valid data in X. :param min_y: Index of minimum valid data in Y. :param min_z: Index of minimum valid data in Z. :param max_x: Index of maximum valid data in X. :param max_y: Index of maximum valid data in Y. :param max_z: Index of maximum valid data in Z. :type min_x: int_ref :type min_y: int_ref :type min_z: int_ref :type max_x: int_ref :type max_y: int_ref :type max_z: int_ref .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Find the non-dummy volume of a `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` object. If the voxel is all dummies, returns `iMAX <geosoft.gxapi.iMAX>` for the minima, and `iMIN <geosoft.gxapi.iMIN>` for the maxima. """ min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value = self._get_data_extents(min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value) def get_data_ground_extents(self, min_x, min_y, min_z, max_x, max_y, max_z): """ Get the voxel size in ground units that has non-dummy data. :param min_x: Ground location of minimum valid data in X. :param min_y: Ground location of minimum valid data in Y. :param min_z: Ground location of minimum valid data in Z. :param max_x: Ground location of maximum valid data in X. :param max_y: Ground location of maximum valid data in Y. :param max_z: Ground location of maximum valid data in Z. :type min_x: float_ref :type min_y: float_ref :type min_z: float_ref :type max_x: float_ref :type max_y: float_ref :type max_z: float_ref .. versionadded:: 9.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Find the non-dummy volume of a `GXMULTIGRID3D <geosoft.gxapi.GXMULTIGRID3D>` object. If the voxel is all dummies, returns `iMAX <geosoft.gxapi.iMAX>` for the minima, and `iMIN <geosoft.gxapi.iMIN>` for the maxima. """ min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value = self._get_data_ground_extents(min_x.value, min_y.value, min_z.value, max_x.value, max_y.value, max_z.value) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/extra_tests/test_ui_prompts.py # import pydevd # pydevd.settrace('localhost', port=34765, stdoutToServer=True, stderrToServer=True) import os import numpy as np import unittest import geosoft.gxapi as gxapi import geosoft.gxpy.geometry as gxgm import geosoft.gxpy.view as gxv import geosoft.gxpy.map as gxmap import geosoft.gxpy.group as gxg import geosoft.gxpy.system as gxsys from base import GXPYTest def rungx(): raise Exception("This is not an extension. Please use a python interpreter.") class Test(GXPYTest): def test_full(self): self.start() verification_checked = gxapi.int_ref() verification_checked.value = 1 gxapi.GXSYS.display_task_dialog_ui('Message Title', 'Main Instruction', 'Content, with <a href="https://google.com">link</a>', gxapi.TD_BUTTON_OK, gxapi.GXLST.null(), gxapi.TD_ICON_ERROR, 'Footer with <a href="https://google.com">another link</a>', gxapi.TD_ICON_WARNING, 'Verification checkbox text (uncheck this!)', verification_checked, 'Expanded stuff...\n<a href="https://my.geosoft.com/subscriptions#/">My subscriptions</a>', '', '') self.assertEqual(verification_checked.value, 0) def test_confirm(self): self.start() verification_checked = gxapi.int_ref() answer = gxapi.GXSYS.display_task_dialog_ui('Message Title', '', 'Are you sure (click yes)?', gxapi.TD_BUTTON_YES + gxapi.TD_BUTTON_NO, gxapi.GXLST.null(), gxapi.TD_ICON_CONFIRMATION, '', gxapi.TD_ICON_NONE, '', verification_checked, '', '', '') self.assertEqual(answer, gxapi.TD_ID_YES) def test_success(self): self.start() verification_checked = gxapi.int_ref() gxapi.GXSYS.display_task_dialog_ui('Message Title', '', 'Success!', gxapi.TD_BUTTON_CLOSE, gxapi.GXLST.null(), gxapi.TD_ICON_SUCCESS, '', gxapi.TD_ICON_NONE, '', verification_checked, '', '', '') def test_information(self): self.start() verification_checked = gxapi.int_ref() gxapi.GXSYS.display_task_dialog_ui('Message Title', '', 'Information', gxapi.TD_BUTTON_CLOSE, gxapi.GXLST.null(), gxapi.TD_ICON_INFORMATION, '', gxapi.TD_ICON_NONE, '', verification_checked, '', '', '') def test_none(self): self.start() verification_checked = gxapi.int_ref() gxapi.GXSYS.display_task_dialog_ui('Message Title', '', 'No Icon', gxapi.TD_BUTTON_CLOSE, gxapi.GXLST.null(), gxapi.TD_ICON_NONE, '', gxapi.TD_ICON_NONE, '', verification_checked, '', '', '') def test_custom_buttons(self): self.start() lst = gxapi.GXLST.create(1024); lst.add_item("Don't press this one", "50"); lst.add_item("Press this one!", "123"); verification_checked = gxapi.int_ref() answer = gxapi.GXSYS.display_task_dialog_ui('Message Title', '', 'Custom Buttons', gxapi.TD_BUTTON_CLOSE, lst, gxapi.TD_ICON_CONFIRMATION, '', gxapi.TD_ICON_NONE, '', verification_checked, '', '', '') self.assertEqual(answer, 123) if __name__ == '__main__': unittest.main() <file_sep>/docs/GXPGEXP.rst .. _GXPGEXP: GXPGEXP class ================================== .. autoclass:: geosoft.gxapi.GXPGEXP :members: <file_sep>/docs/GXVECTOR3D.rst .. _GXVECTOR3D: GXVECTOR3D class ================================== .. autoclass:: geosoft.gxapi.GXVECTOR3D :members: <file_sep>/docs/GXDBREAD.rst .. _GXDBREAD: GXDBREAD class ================================== .. autoclass:: geosoft.gxapi.GXDBREAD :members: <file_sep>/docs/GX3DC.rst .. _GX3DC: GX3DC class ================================== .. autoclass:: geosoft.gxapi.GX3DC :members: <file_sep>/geosoft/gxpy/grid_fft.py """ Geosoft Fast Fourier Transform processes for 2D gridded data. :Classes: :`GridFFT`: Grid FFT Note that 'wavenumber' in this module refers to cycles/unit_distance. Multiply by 2*math.pi for the angular wavenumber. .. seealso:: :class:`geosoft.gxapi.GXFFT2` .. note:: Regression tests provide usage examples: `Tests <https://github.com/GeosoftInc/gxpy/blob/master/geosoft/gxpy/tests/test_grid_fft.py>`_ """ import numpy as np import math import geosoft import geosoft.gxapi as gxapi from . import gx as gx from . import grid as gxgrd from . import grid_utility as gxgrdu from . import utility as gxu from . import vv as gxvv __version__ = geosoft.__version__ def _t(s): return geosoft.gxpy.system.translate(s) FILL_MAXIMUM_ENTROPY = 0 FILL_MINIMUM_CURVATURE = 1 TRN_SOURCE = 0 TRN_FILTERED = 1 I_WAVENUMBER = 0 I_SAMPLE_COUNT = 1 I_LOG_POWER = 2 I_DEPTH_3 = 3 I_DEPTH_5 = 4 class GridFFTException(geosoft.GXRuntimeError): """ Exceptions from this module. """ pass class GridFFT: """ Descrete Fourier Transform of a grid. :param grid: grid file name or a `geosoft.gxpy.grid.Grid` instance. :param expand: minimum expansion percent to create a periodic function. The default is 10. :param trend_order: trend order to remove, default is 1 :param fill_method: FILL_MAXIMUM_ENTROPY (default) or FILL_MINIMUM_CURVATURE. Maximum entropy prediction fills the expanded area in a way that preserves the character of the radially-averaged power spectrum so that spectral analysis based on the shape of the spectrum will be more reliable. The following parameters only apply for maximum-entropy prediction. The defaults will be fine in all but exceptional situations where edge effects unduly distort the result. :param buffer: percentage buffer area, default 2%. The buffer expands the size of the grid footprint before filling internal space with a minimum-curvacture surface. This minimizes edge effects from high-amplitude features at the edge of the grid. :param buff_iterations: maximum iterations to resolve the minimum curvature surface for the internal fill. :param filter_length: maximum entropy filter length, :param amplitude_limit: Amplitudes limiting, which starts at halve this setting. Default no limiting. :param edge_limit: edge amplitude limiting, starting at half this value. Default no limiting. :param edge_limit_cells: if edge limiting, start this many cells from the edge :param smooth: `True` (default) to smooth the filled expanded area. :param feather: `True` to feather expanded data to mean value at the expanded edges. If `False` the data will be periodic at the edges. Feathering may be useful should the prediction function introduce unreasonable edge effects. .. versionadded:: 9.4 """ def __enter__(self): return self def __exit__(self, _type, _value, _traceback): self.__del__() def __del__(self): if hasattr(self, '_close'): self._close() def _close(self): if hasattr(self, '_open'): if self._open: self._source_transform.close(discard=True) if self._filtered_transform: self._filtered_transform.close(discard=True) self._prep_grid.close(discard=True) self._source_grid = None self._trend = None gx.pop_resource(self._open) self._open = None def __repr__(self): return "{}({})".format(self.__class__, self.__dict__) def __str__(self): return '<class GridFFT>: {} ({}, {})'.format(self._name, self._source_grid.nx, self._source_grid.ny) def __init__(self, grid, buffer=2., buff_iterations=250, buffer_tolerance=None, trend_order=1, trend_edge=1, expand=10., fill_method=FILL_MAXIMUM_ENTROPY, filter_length=0, amplitude_limit=0., edge_limit=-1., edge_limit_cells=0, smooth=1, feather=False): def max_entropy_fill(btol, melen, feath): def tpg_rows(n): if n >= grid.ny: return None tpg.read_row(n, 0, 0, rvv.gxvv) xyv[:, 1] = grid.y0 + n * grid.dy xyv[:, 2] = rvv.np return xyv xyv = np.empty((grid.nx, 3)) rvv = gxvv.GXvv() # expand buffer and fill if buffer == 0.: buffer_cells = 0 else: buffer_cells = max(int(0.5 + min(grid.nx, grid.ny) * buffer / 100.), 1) gxc.log(_t('Internal fill with {} cell buffer...').format(buffer_cells)) expanded_area = (grid.x0 - buffer_cells * grid.dx, grid.y0 - buffer_cells * grid.dy, grid.x0 + (grid.nx + buffer_cells - 1) * grid.dx, grid.y0 + (grid.ny + buffer_cells - 1) * grid.dy) rvv.length = grid.nx xyv[:, 0] = [(grid.x0 + i * grid.dx) for i in range(grid.nx)] bkd = max(grid.nx, grid.ny) * grid.dx if btol is None: btol = grid.statistics()['sd'] * 0.001 buffer_grid = gxgrd.Grid.minimum_curvature(tpg_rows, cs=grid.dx, area=expanded_area, bkd=bkd, itrmax=buff_iterations, pastol=99., tol=btol, icgr=16, max_segments=grid.ny) # expand for periodic function gxc.log(_t('Expand from ({}, {})').format(grid.nx, grid.ny)) xpg = gxapi.GXPG.create(1, 1, tpg.e_type()) gxapi.GXPGU.expand(buffer_grid.gxpg(), xpg, expand, 1, 0, 0) xnx = xpg.n_cols() xny = xpg.n_rows() gxc.log(_t(' to ({}, {})...').format(xnx, xny)) # fill gxc.log(_t('Maximum-entropy prediction fill...')) reference_file = gx.gx().temp_file('grd') gxapi.GXPGU.ref_file(xpg, reference_file) gxapi.GXPGU.fill(xpg, 2, # Roll off weighting option: 1 - linear, 2 - square gxapi.rDUMMY, # the value to roll off to, GS_R8DM for line mean 0, # roll-off distance in cells, 0 for none, -1 default melen, # max. filter length. -1 for no max. entropy. 0 for the default. 0, # max. pred. sample 0 for the default of 2*lMxf. amplitude_limit, # limit amplitudes to this level, starting at half, 0. for none edge_limit, # limit edge amplitudes to this level. <0.0 for no none edge_limit_cells, # edge limit width in cells, 0 for default. int(bool(smooth)), # pass smooth filter, 0 or 1. reference_file) gxu.delete_files_by_root(reference_file) # prepped grid properties = grid.properties() properties['x0'], properties['y0'] = grid.xy_from_index((grid.nx - xpg.n_cols()) / 2., (grid.ny - xpg.n_rows()) / 2.) prep_grid = gxgrd.Grid.from_data_array(xpg, properties=properties) if feath: _xx, _xy = (xpg.n_cols() - grid.nx) // 2, (xpg.n_rows() - grid.ny) // 2 prep_grid = gxgrdu.feather(prep_grid, min(_xx, _xy)) return prep_grid # lets do this... gxc = gx.gx() # for logging if not isinstance(grid, gxgrd.Grid): grid = gxgrd.Grid.open(grid) self._source_grid = grid self._name = self._source_grid.name if grid.rot != 0.: # TODO: add support for rotated grids raise GridFFTException(_t('Rotated grids are not supported.')) if grid.dx != grid.dy: raise GridFFTException(_t('Cell size must be square')) gxc.log(_t('\nGridFFT from: {}').format(grid.file_name)) # remove trend method = _t('edge') if trend_edge == 1 else _t('all') gxc.log(_t('Remove {} order trend determined from {} data ...').format(trend_order, method)) self._trend = gxapi.GXTR.create(trend_order) tpg = gxapi.GXPG.create(grid.ny, grid.nx, self._source_grid.gxtype) gxapi.GXPGU.trend(grid.gxpg(), tpg, self._trend, 0, trend_edge, grid.x0, grid.y0, grid.dx, grid.dy) if fill_method == FILL_MAXIMUM_ENTROPY: self._prep_grid = max_entropy_fill(buffer_tolerance, filter_length, feather) else: # minimum-curvature gxc.log(_t('Expand from ({}, {})').format(grid.nx, grid.ny)) ppg = gxapi.GXPG.create(1, 1, tpg.e_type()) gxapi.GXPGU.expand(grid.gxpg(), ppg, expand, 1, 0, 0) gxc.log(_t(' to ({}, {})...').format(ppg.n_cols(), ppg.n_rows())) props = grid.properties() xx, xy = (ppg.n_cols() - grid.nx) // 2, (ppg.n_rows() - grid.ny) // 2 props['x0'], props['y0'] = grid.xy_from_index(-xx, -xy) exp_grid = gxgrd.Grid.from_data_array(ppg, properties=props) gxc.log(_t('Minimum-curvature surface fill...')) self._prep_grid = gxgrdu.feather(gxgrdu.flood(exp_grid), min(xx, xy)) self._prep_grid.gximg.set_tr(self._trend) # fft gxc.log(_t('FFT...')) fpg = self._prep_grid.gxpg(True) fpg.re_allocate(self._prep_grid.ny, self._prep_grid.nx + 2) gxapi.GXFFT2.trans_pg(fpg, gxapi.FFT2_PG_FORWARD) trn_file = gx.gx().temp_file('.trn(GRD)') self._source_transform = gxgrd.Grid.from_data_array(fpg, file_name=trn_file, properties=self._prep_grid.properties()) self._source_transform.gximg.set_tr(self._trend) self._filtered_transform = None self._next = 0 self._ny2 = self._source_transform.ny // 2 self._u = None self._u2 = None self._source_spectrum = None self._filtered_spectrum = None self._source_average_spectral_density = None self._filtered_average_spectral_density = None # track self._open = gx.track_resource(self.__class__.__name__, str(self)) def uv_row_from_tr(self, i): """ Returns (u, v) space row index of a transform row. .. versionadded:: 9.4 """ return (i + self._ny2) if i < self._ny2 else (i - self._ny2) def tr_row_from_uv(self, i): """ Returns transform row index from (u, v) space row index. .. versionadded:: 9.4 """ return (i - self._ny2) if i >= self._ny2 else (i + self._ny2) def read_uv_row(self, row, trn=TRN_SOURCE): """ Read a row (constant wavenumber v) from (u, v) transform. :param row: row number in (u, v) space, row 0 is minimum v :param trn: `TRN_SOURCE` from the source transform (default) or `TRN_FILTERED` :return: (u_array, v, real_array, imaginary_array) To calculate a wavenumber array: wavenumber = np.sqrt(u_array**2 + v**2). Upward continuation example: .. code:: import geosoft.gxpy.gx as gx import geosoft.gxpy.grid_fft as gfft gxc = gx.GXpy(log=print) with gxfft.GridFFT('some_mag_grid_file.grd') as fft: # for each row v in (u, v) for vrow in range(fft.nv): # read the row u, v, r, i = fft.read_uv_row(vrow) # angular wavenumber along the row wn = np.sqrt(u**2 + v**2) * 2. * math.pi # upward continue 500 grid distance units continuation_filter = np.exp(wn * -500.) r *= continuation_filter i *= continuation_filter # write the filtered result to the TRN_FILTERED transform fft.write_uv_row(r, i, vrow, trn=gxfft.TRN_FILTERED) # create an output grid of the upward-continued result fft.result_grid(file_name='upward_continued_500.grd') .. seealso:: `write_uv_row()` .. versionadded:: 9.4 """ if trn == TRN_SOURCE: tr = self.source_transform else: tr = self.filtered_transform data = tr.read_row(self.tr_row_from_uv(row)).np r = data[0::2] i = data[1::2] if self._u is None: self._u = np.arange(len(r)) * self.dv self._u2 = self._u**2 v = (row - self._ny2) * self.dv return self._u, v, r, i def write_uv_row(self, r, i, row, trn=TRN_SOURCE): """ Write a row (constant wavenumber v) to the (u, v) transform. :param r: reals as a numpy array length half the width of the transform (as returned from `read_row`). :param i: imaginary as a numpy array, matches r. :param row: row number in (u, v) space, row 0 is minimum v :param trn: `TRN_SOURCE` from the source transform (default) or `TRN_FILTERED` .. seealso:: `read_uv_row()` .. versionadded:: 9.4 """ if trn == TRN_SOURCE: tr = self.source_transform else: tr = self.filtered_transform data = np.empty(len(r) * 2, dtype=tr.dtype) data[0::2] = r data[1::2] = i tr.write_row(data, self.tr_row_from_uv(row)) if row == tr.ny - 1: if trn == TRN_SOURCE: self._source_transform = gxgrd.reopen(tr) else: self._filtered_transform = gxgrd.reopen(tr) @property def du(self): """ Wavenumber increment in the grid X direction in (cycles / grid distance uom)""" return 1.0 / (self._source_transform.dx * (self._source_transform.nx - 2)) @property def dv(self): """ Wavenumber increment in the grid Y direction in (cycles / grid distance uom).""" return 1.0 / (self._source_transform.dy * self._source_transform.ny) @property def nu(self): """ Number of discrete wavenumbers in grid X direction. The transform is folded in the x direction, will be half the transform width + 1 """ return self._source_transform.nx // 2 @property def nv(self): """ Number of discrete wavenumbers in the grid Y direction. """ return self._source_transform.ny @property def u0(self): """ First u (X-direction) wavenumber, always 0. """ return 0. @property def v0(self): """ First v (Y-direction) wavenumber. """ return (self.nv // 2) * self.dv def filter(self, filters=None, trn=TRN_SOURCE, height='', mag_inclination='', mag_declination='', mag_strength=''): """ Apply a pre-defined filter. See filter reference: https://github.com/GeosoftInc/gxc/blob/master/reference/con_files/magmap.con :param filters: list of filters to apply. Each filter can be a string, or a tuple with the first item being the filter name followed by the filter parameters. See `magmap.con` referenced above for the full list of filters. :param trn: `TRN_SOURCE` apply to the source transform (default) or `TRN_FILTERED` to apply to the current filtered transform. The following parameter are the default for magnetic filed filters like pole/equator reduction and aparent susceptibility. :param height: survey ground clearance in grid distance units :param mag_inclination: magnetic field inclination :param mag_declination: magnetic field declination :param mag_strength: total magnetic filed strength for converting magnetization to susceptibility. Example upward continuation 500 grid distance units and a first vertical derivative: .. code:: import geosoft.gxpy.gx as gx import geosoft.gxpy.grid_fft as gfft gxc = gx.GXpy(log=print) with gxfft.GridFFT('some_mag_grid_file.grd') as fft: # apply the filer fft.filter(['CNUP 500', 'DRVZ 1']) # equlavalent to `fft.filter([('CNUP', 500), ('DRVZ', 1)])` # create an output grid of the upward-continued result fft.result_grid(file_name='upward_continued_500.grd') .. versionadded:: 9.4 """ if (trn == TRN_SOURCE) or (self._filtered_transform is None): transform = self._source_transform else: transform = self._filtered_transform tpg = transform.gxpg(True) transform.gximg.get_tr(self._trend) # control file con_file = gx.gx().temp_file('con') with open(con_file, 'x') as cf: # control-file header parameters: cf.write('\n') # title not used cf.write('{} /\n'.format(height)) cf.write('{} /\n'.format(mag_inclination)) cf.write('{} /\n'.format(mag_declination)) cf.write('{} /\n'.format(mag_strength)) if isinstance(filters, str): filters = [filters] # filters if filters: for f in filters: if isinstance(f, str): cf.write('{} /\n'.format(f)) else: for p in f: cf.write('{} '.format(p)) cf.write('/\n') # filter gxapi.GXFFT2.filter_pg(tpg, con_file, self._trend, self._source_grid.dx, self._source_grid.dy, self._source_grid.rot) gxu.delete_file(con_file) file_name = gx.gx().temp_file('.trn(GRD)') self._filtered_transform = gxgrd.Grid.from_data_array(tpg, file_name=file_name, properties=self._source_transform.properties()) self._filtered_transform.gximg.set_tr(self._trend) self._filtered_average_spectral_density = None self._filtered_spectrum = None def radially_averaged_spectrum(self, trn=TRN_SOURCE): """ Radially averaged spectrum as a Numpy array shaped (n_wavenumbers, 5). :param trn: `TRN_SOURCE` (default) return spectrum of the source data, or `TRN_FILTERED` return spectrum of the current filtered state. .. note:: Numpy array shaped (n_wavenumbers, 5), where each row contains: [wavenumber, sample_count, log_power, 3-point_depth, 5-point_depth], wavenumber in cycles per 1000 * distance unit of measure (cycle/km for metres), and log_power is the natural log of the power. Point depths are calculated by dividing the local slope(3 points and 5 points) of the log_power by (4 * pi) (see Spector and Grant, 1970). For code clarity, the following index constants can be used to reference columns in the spectrum array: ============== === I_WAVENUMBER 0 I_SAMPLE_COUNT 1 I_LOG_POWER 2 I_DEPTH_3 3 I_DEPTH_5 4 ============== === .. versionadded:: 9.4 """ if trn == TRN_SOURCE: if self._source_spectrum is not None: return self._source_spectrum tr = self.source_transform else: if self._filtered_spectrum is not None: return self._filtered_spectrum tr = self.filtered_transform # spectrum spec_file = gx.gx().temp_file() try: gxapi.GXFFT2.rad_spc(tr.gximg, spec_file) except geosoft.gxapi.GXAPIError: tpg = gxapi.GXPG.create(tr.ny, tr.nx, gxapi.GS_FLOAT) tpg.copy(tr.gxpg()) with gxgrd.Grid.from_data_array(tpg, properties=tr.properties()) as tgd: tgd.delete_files() gxapi.GXFFT2.rad_spc(tgd.gximg, spec_file) length = max(tr.nx, tr.ny) // 2 spectrum = np.zeros((length, 5)) wavenumber = spectrum[:, 0] n_sample = spectrum[:, 1] log_power = spectrum[:, 2] depth_3 = spectrum[:, 3] depth_5 = spectrum[:, 4] i = 0 asd = None with open(spec_file) as f: for sl in f: if sl: if sl[0] == '/': if '=' in sl: try: asd = float(sl.split('=')[1]) except ValueError: asd = None else: pv = sl.split() wavenumber[i] = float(pv[0]) n_sample[i] = float(pv[1]) log_power[i] = float(pv[2]) try: depth_3[i] = float(pv[3]) except ValueError: depth_3[i] = np.nan try: depth_5[i] = float(pv[4]) except ValueError: depth_5[i] = np.nan i += 1 spectrum = spectrum[:i] gxu.delete_file(spec_file) # add the average spectral density back into the log_power spectrum[:, I_LOG_POWER] += asd if trn == TRN_SOURCE: self._source_spectrum = spectrum self._source_average_spectral_density = asd else: self._filtered_spectrum = spectrum self._filtered_average_spectral_density = asd return spectrum def log_average_spectral_density(self, trn=TRN_SOURCE): """ Log of the average spectral density of the transform. :param trn: `TRN_SOURCE` (default) source data spectrum, or `TRN_FILTERED` current filtered transform. .. versionadded:: 9.4 """ if trn == TRN_SOURCE: if self._source_average_spectral_density: return self._source_average_spectral_density else: if self._filtered_average_spectral_density: return self._filtered_average_spectral_density # estimate from radial spectrum data rspec = self.radially_averaged_spectrum(trn) tot_samples = np.sum(rspec[I_SAMPLE_COUNT]) tot_energy = np.sum(np.exp(rspec[I_LOG_POWER])) asd = math.log(tot_energy / tot_samples) if trn == TRN_SOURCE: self._source_average_spectral_density = asd else: self._filtered_average_spectral_density = asd return asd def spectrum_grid(self, trn=TRN_SOURCE, file_name=None, overwrite=False): """ Return the 2D log(power) amplitude as a grid in wavenumber domain (u, v). Amplitude = log(real**2 + imaginary**2) :param trn: `TRN_SOURCE` source spectrum (default) or `TRN_FILTERED` filtered spectrum :param file_name: name for the grid file, default is a temporary grid. :param overwrite: `True` to overwrite existing grid. :return: `geosoft.gxpy.grid.Grid` instance .. versionadded:: 9.4 """ if trn == TRN_SOURCE: tr = self._source_transform else: tr = self._filtered_transform du = 1.0 / (tr.dx * (tr.nx - 2)) dv = 1.0 / (tr.dy * tr.ny) props = tr.properties() props['nx'] = tr.nx // 2 props['ny'] = tr.ny props['x0'] = 0 props['y0'] = -(tr.ny // 2) * dv props['dx'] = du props['dy'] = dv nperr = {} sgrd = gxgrd.Grid.new(file_name=file_name, properties=props, overwrite=overwrite) try: nperr = np.seterr(under='ignore') for row in range(tr.ny): data = tr.read_row(row).np r = np.clip(data[0::2]**2, 1.0e-20, None) i = np.clip(data[1::2]**2, 1.0e-20, None) sgrd.write_row(np.log(r + i), self.uv_row_from_tr(row)) finally: np.seterr(**nperr) return sgrd @property def source_grid(self): """ Source grid as a `geosoft.gxpy.grid.Grid` instance. """ return self._source_grid @property def expanded_filled_grid(self): """ Expanded and filled grid as a `geosoft.gxpy.grid.Grid` instance. """ return self._prep_grid @property def source_transform(self): """ Folded descrete Fourier transform as a `geosoft.gxpy.grid.Grid` instance.""" return self._source_transform @property def filtered_transform(self): """ Folded descrete Fourier transform after filters applied.""" if self._filtered_transform is None: self._filtered_transform = gxgrd.Grid.new(properties=self._source_transform.properties()) return self._filtered_transform def result_grid(self, file_name=None, overwrite=False): """ Produce a filter result grid. :param file_name: result grid file, default greates a temporary grid :param overwrite: `True` to overwrite an existing grid :return: `geosoft.gxpy.grid.Grid` instance .. versionadded:: 9.4 """ if self._filtered_transform is None: self._source_transform = gxgrd.reopen(self._source_transform) trn = self._source_transform else: self._filtered_transform = gxgrd.reopen(self._filtered_transform) trn = self._filtered_transform tpg = trn.gxpg(True) # fft result gxapi.GXFFT2.trans_pg(tpg, gxapi.FFT2_PG_INVERSE) # reduce ix0 = ((tpg.n_cols() - 2) - self._source_grid.nx) // 2 iy0 = (tpg.n_rows() - self._source_grid.ny) // 2 rpg = gxapi.GXPG.create(self._source_grid.ny, self._source_grid.nx, self._source_grid.gxtype) rpg.copy_subset(tpg, 0, 0, iy0, ix0, self._source_grid.ny, self._source_grid.nx) # put back the trend, which may have been filtered gxapi.GXIMG.get_tr(trn.gximg, self._trend) result_pg = gxapi.GXPG.create(rpg.n_rows(), rpg.n_cols(), rpg.e_type()) gxapi.GXPGU.trend(rpg, result_pg, self._trend, 2, 1, self._source_grid.x0, self._source_grid.y0, self._source_grid.dx, self._source_grid.dy) result = gxgrd.Grid.from_data_array(result_pg, properties=self._source_grid.properties(), file_name=file_name, overwrite=overwrite) # mask against original grid result.mask(self.source_grid) return result <file_sep>/geosoft/gxpy/__init__.py from . import system from . import gx from . import dataframe from . import project from . import utility from . import vv from . import va from . import coordinate_system from . import geometry from . import geometry_utility from . import grid from . import grid_fft from . import grid_utility from . import gdb from . import agg from . import map from . import view from . import group from . import viewer from . import vox from . import vox_display from . import metadata from . import spatialdata from . import surface from . import dap_client from . import segy_reader __all__ = ['agg', 'coordinate_system', 'dap_client', 'dataframe', 'geometry', 'geometry_utility', 'gdb', 'grid', 'grid_fft', 'grid_utility', 'group', 'gx', 'map', 'metadata', 'project', 'segy_reader', 'spatialdata', 'surface', 'system', 'utility', 'va', 'view', 'viewer', 'vox', 'vox_display', 'vv'] #: global constants not defined in GXAPI MAX_LST = 4096 #: maximum Geosoft LST entry size <file_sep>/docs/GXSHP.rst .. _GXSHP: GXSHP class ================================== .. autoclass:: geosoft.gxapi.GXSHP :members: .. _SHP_GEOM_TYPE: SHP_GEOM_TYPE constants ----------------------------------------------------------------------- Shape file geometry types .. autodata:: geosoft.gxapi.SHP_GEOM_TYPE_POINT :annotation: .. autoattribute:: geosoft.gxapi.SHP_GEOM_TYPE_POINT .. autodata:: geosoft.gxapi.SHP_GEOM_TYPE_ARC :annotation: .. autoattribute:: geosoft.gxapi.SHP_GEOM_TYPE_ARC .. autodata:: geosoft.gxapi.SHP_GEOM_TYPE_POLYGON :annotation: .. autoattribute:: geosoft.gxapi.SHP_GEOM_TYPE_POLYGON .. autodata:: geosoft.gxapi.SHP_GEOM_TYPE_POINTZ :annotation: .. autoattribute:: geosoft.gxapi.SHP_GEOM_TYPE_POINTZ .. autodata:: geosoft.gxapi.SHP_GEOM_TYPE_ARCZ :annotation: .. autoattribute:: geosoft.gxapi.SHP_GEOM_TYPE_ARCZ .. autodata:: geosoft.gxapi.SHP_GEOM_TYPE_POLYGONZ :annotation: .. autoattribute:: geosoft.gxapi.SHP_GEOM_TYPE_POLYGONZ <file_sep>/geosoft/gxapi/GXDMPPLY.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXDMPPLY(gxapi_cy.WrapDMPPLY): """ GXDMPPLY class. Datamine Multiple polygon object """ def __init__(self, handle=0): super(GXDMPPLY, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXDMPPLY <geosoft.gxapi.GXDMPPLY>` :returns: A null `GXDMPPLY <geosoft.gxapi.GXDMPPLY>` :rtype: GXDMPPLY """ return GXDMPPLY() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def clear(self): """ Clear/remove all polygons from the `GXDMPPLY <geosoft.gxapi.GXDMPPLY>`. .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._clear() def copy(self, source): """ Copy :param source: Source :type source: GXDMPPLY .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._copy(source) @classmethod def create(cls): """ Creates a `GXDMPPLY <geosoft.gxapi.GXDMPPLY>` object. :returns: DMPLY Object :rtype: GXDMPPLY .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = gxapi_cy.WrapDMPPLY._create(GXContext._get_tls_geo()) return GXDMPPLY(ret_val) def get_azimuth(self, p, az): """ Get the azimuth of a given polygon. :param p: Polygon number (1 to NP) :param az: Azimuth (degrees) (o) :type p: int :type az: float_ref .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The azimuth is the equivalent section azimuth, equal to the azimuth of the normal vector plus 90 degrees. """ az.value = self._get_azimuth(p, az.value) def get_extents(self, p, x, y, z, w, h): """ Get the center, width and height of a given polygon. :param p: Polygon number (1 to NP) :param x: Center point X (o) :param y: Center point Y (o) :param z: Center point Z (o) :param w: Width of polygon (in its plane) (o) :param h: Height of polygon (Z extent) (o) :type p: int :type x: float_ref :type y: float_ref :type z: float_ref :type w: float_ref :type h: float_ref .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ x.value, y.value, z.value, w.value, h.value = self._get_extents(p, x.value, y.value, z.value, w.value, h.value) def get_joins(self, p, vv): """ Get join lines for each vertex in a specific polygon. :param p: Polygon number (1 to N) :param vv: INT `GXVV <geosoft.gxapi.GXVV>` of join indices (1 to NJoins). :type p: int :type vv: GXVV .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** If a specific vertex is not joined, the returned value is 0. If the vertex is joined, then the index of the join line (1 to NJoins) is returned. """ self._get_joins(p, vv) def get_normal_vectors(self, p, x1, y1, z1, x2, y2, z2, x3, y3, z3): """ Get the normal vectors of a given polygon. :param p: Polygon number (1 to NP) :param x1: X component (o) (Horizontal azimuth vector) :param y1: Y component (o) :param z1: Z component (o) :param x2: X component (o) (Down-dip, in the vertical plane) :param y2: Y component (o) :param z2: Z component (o) :param x3: X component (o) (Normal vector) :param y3: Y component (o) :param z3: Z component (o) :type p: int :type x1: float_ref :type y1: float_ref :type z1: float_ref :type x2: float_ref :type y2: float_ref :type z2: float_ref :type x3: float_ref :type y3: float_ref :type z3: float_ref .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Three normalized vectors are returned. The first is horizontal, in the plane of the polygon. The second is in the vertical plane, corresponding to the "down-dip" direction. The third is the normal vector to the polygon plane. """ x1.value, y1.value, z1.value, x2.value, y2.value, z2.value, x3.value, y3.value, z3.value = self._get_normal_vectors(p, x1.value, y1.value, z1.value, x2.value, y2.value, z2.value, x3.value, y3.value, z3.value) def get_poly(self, p, vv_x, vv_y, vv_z): """ Get a specific polygon from a `GXDMPPLY <geosoft.gxapi.GXDMPPLY>` object. :param p: Polygon number (1 to NP) (i) :param vv_x: X Locations (o) :param vv_y: Y Locations (o) :param vv_z: Z Locations (o) :type p: int :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Get the number of points from the `GXVV <geosoft.gxapi.GXVV>` length. """ self._get_poly(p, vv_x, vv_y, vv_z) def get_swing(self, p, az): """ Get the swing of a given polygon. :param p: Polygon number (1 to NP) :param az: Swing (degrees) (o) :type p: int :type az: float_ref .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The swing is the equivalent section swing, equal to zero for vertical plates, and increasing as the normal vector goes from horizontal upward. """ az.value = self._get_swing(p, az.value) def get_vertex(self, p, v, x, y, z): """ Get a vertex location from a `GXDMPPLY <geosoft.gxapi.GXDMPPLY>` object. :param p: Polygon number (1 to NP) :param v: Vertex number (1 to NV) :param x: X Location (o) :param y: Y Location (o) :param z: Z Location (o) :type p: int :type v: int :type x: float_ref :type y: float_ref :type z: float_ref .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ x.value, y.value, z.value = self._get_vertex(p, v, x.value, y.value, z.value) def num_joins(self): """ Get the number of joining lines in a `GXDMPPLY <geosoft.gxapi.GXDMPPLY>` object. :returns: Number of joining lines :rtype: int .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._num_joins() return ret_val def num_polys(self): """ Get the number of polygons in a `GXDMPPLY <geosoft.gxapi.GXDMPPLY>` object. :returns: Number of polygons :rtype: int .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The value returned is the "NP" used in function descriptions below. """ ret_val = self._num_polys() return ret_val def num_vertices(self, p): """ Get the number of vertices in a polygon. :param p: Polygon number (1 to NP) :type p: int :returns: Number of vertices in a polygon :rtype: int .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The value returned is the "NV" used in function descriptions below. """ ret_val = self._num_vertices(p) return ret_val def load(self, file): """ Loads a Datamine polygon file. :param file: Name of the file to load :type file: str .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._load(file.encode()) def move_vertex(self, p, v, x, y, z): """ Moves a vertex and any associated lines. :param p: Polygon number (1 to NP) :param v: Vertex number (1 to NV) :param x: New location X :param y: New location Y :param z: New location Z :type p: int :type v: int :type x: float :type y: float :type z: float .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._move_vertex(p, v, x, y, z) def project_poly(self, p, xp, yp, zp, az, swing, vv_x, vv_y, vv_z): """ Project a polygon onto a vertical plane. :param p: Polygon number (1 to NP) :param xp: X location of plane origin in 3D :param yp: Y location of plane origin in 3D :param zp: Z location of plane origin in 3D :param az: Azimuth of the plane in degrees :param swing: Swing of the plane in degrees :param vv_x: X (horizontal along-section locations on vertical plane (o) :param vv_y: Y (vertical locations on vertical plane (o) :param vv_z: Z (horizontal distances perpendicular to the plane (o) :type p: int :type xp: float :type yp: float :type zp: float :type az: float :type swing: float :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Gives the location in plane coordinates of a selected polygon, after it has been projected perpendicularly onto the plane. Plane coodinates: X - horizontal in plane Y - "vertical" in plane (can be a swing) Z - horizontal, "perpendicular" to plane (RH) """ self._project_poly(p, xp, yp, zp, az, swing, vv_x, vv_y, vv_z) def re_project_poly(self, p, xp, yp, zp, az, vv_x, vv_y, vv_x3, vv_y3, vv_z3): """ Recover polygon locations from 2D locations on vertical plane. :param p: Polygon number (1 to lNP) (i) :param xp: X location of plane origin in 3D (i) :param yp: Y location of plane origin in 3D (i) :param zp: Z location of plane origin in 3D (i) :param az: Azimuth of the plane in degrees (i) :param vv_x: X locations on vertical plane (i) :param vv_y: Y (actually Z) locations on vertical plane (i) :param vv_x3: X Locations of polygon (o) :param vv_y3: Y Locations of polygon (o) :param vv_z3: Z Locations of polygon (o) :type p: int :type xp: float :type yp: float :type zp: float :type az: float :type vv_x: GXVV :type vv_y: GXVV :type vv_x3: GXVV :type vv_y3: GXVV :type vv_z3: GXVV .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** This is the inverse operation of `project_poly <geosoft.gxapi.GXDMPPLY.project_poly>`. Input the 2D locations on the projected vertical plane. These locations are projected back onto the original polygon plane. """ self._re_project_poly(p, xp, yp, zp, az, vv_x, vv_y, vv_x3, vv_y3, vv_z3) def save(self, file): """ Save to a Datamine polygon file :param file: Name of the file to save to :type file: str .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._save(file.encode()) def set_poly(self, p, vv_x, vv_y, vv_z): """ Set a specific polygon into a `GXDMPPLY <geosoft.gxapi.GXDMPPLY>` object. :param p: Polygon number (1 to NP) (i) :param vv_x: X Locations (i) :param vv_y: Y Locations (i) :param vv_z: Z Locations (i) :type p: int :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV .. versionadded:: 6.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Get the number of points from the `GXVV <geosoft.gxapi.GXVV>` length. """ self._set_poly(p, vv_x, vv_y, vv_z) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXSBF.rst .. _GXSBF: GXSBF class ================================== .. autoclass:: geosoft.gxapi.GXSBF :members: .. _SBF_OPEN: SBF_OPEN constants ----------------------------------------------------------------------- `GXSBF <geosoft.gxapi.GXSBF>` Open defines .. autodata:: geosoft.gxapi.SBF_READ :annotation: .. autoattribute:: geosoft.gxapi.SBF_READ .. autodata:: geosoft.gxapi.SBF_READWRITE_NEW :annotation: .. autoattribute:: geosoft.gxapi.SBF_READWRITE_NEW .. autodata:: geosoft.gxapi.SBF_READWRITE_OLD :annotation: .. autoattribute:: geosoft.gxapi.SBF_READWRITE_OLD .. _SBF_TYPE: SBF_TYPE constants ----------------------------------------------------------------------- `GXSBF <geosoft.gxapi.GXSBF>` Object type defines .. autodata:: geosoft.gxapi.SBF_TYPE_DIRS :annotation: .. autoattribute:: geosoft.gxapi.SBF_TYPE_DIRS .. autodata:: geosoft.gxapi.SBF_TYPE_FILES :annotation: .. autoattribute:: geosoft.gxapi.SBF_TYPE_FILES .. autodata:: geosoft.gxapi.SBF_TYPE_BOTH :annotation: .. autoattribute:: geosoft.gxapi.SBF_TYPE_BOTH <file_sep>/geosoft/gxapi/GXSTK.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXSTK(gxapi_cy.WrapSTK): """ GXSTK class. The `GXSTK <geosoft.gxapi.GXSTK>` class is used for plotting a single data profile in an `GXMVIEW <geosoft.gxapi.GXMVIEW>`. The `GXMSTK <geosoft.gxapi.GXMSTK>` class (see `GXMSTK <geosoft.gxapi.GXMSTK>`) is used to plot multiple `GXSTK <geosoft.gxapi.GXSTK>` objects to a single map. Use `GXMSTK.add_stk <geosoft.gxapi.GXMSTK.add_stk>` fuction to create a `GXSTK <geosoft.gxapi.GXSTK>` object before using functions in this file SEE `GXMSTK <geosoft.gxapi.GXMSTK>` FILE FOR DETAILED DESCRIPTIONS OF ALL FUNCTION PARAMETERS. """ def __init__(self, handle=0): super(GXSTK, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXSTK <geosoft.gxapi.GXSTK>` :returns: A null `GXSTK <geosoft.gxapi.GXSTK>` :rtype: GXSTK """ return GXSTK() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def get_trans_parms(self, x_trans_t, x_log_min, xvv_lev, xvv_cmp, y_trans_t, y_log_min, yvv_lev, yvv_cmp): """ Get transformation parameters in `GXSTK <geosoft.gxapi.GXSTK>` object :param x_trans_t: Type of transformation for horizontal axis :param x_log_min: Minimum value to apply logarithmic :param xvv_lev: Comma separated parameters defining linear compress data range :param xvv_cmp: Comma separated parameters defining scaling factors for :param y_trans_t: Type of scaling for vertical axis :param y_log_min: Minimum value to apply logarithmic :param yvv_lev: Comma separated parameters defining linear compress data range :param yvv_cmp: Comma separated parameters defining scaling factors for :type x_trans_t: int_ref :type x_log_min: float_ref :type xvv_lev: GXVV :type xvv_cmp: GXVV :type y_trans_t: int_ref :type y_log_min: float_ref :type yvv_lev: GXVV :type yvv_cmp: GXVV .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See above full description of each parameters `GXVV <geosoft.gxapi.GXVV>`'s for X channel transformation can be NULL if the transformation is log or loglinear. The same for Y channel. See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters """ x_trans_t.value, x_log_min.value, y_trans_t.value, y_log_min.value = self._get_trans_parms(x_trans_t.value, x_log_min.value, xvv_lev, xvv_cmp, y_trans_t.value, y_log_min.value, yvv_lev, yvv_cmp) def get_axis_format(self, xy): """ Get axis number display format. :param xy: :ref:`STK_AXIS` :type xy: int :returns: The current format - :ref:`DB_CHAN_FORMAT` :rtype: int .. versionadded:: 5.1.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** By default, `DB_CHAN_FORMAT_NORMAL <geosoft.gxapi.DB_CHAN_FORMAT_NORMAL>` """ ret_val = self._get_axis_format(xy) return ret_val def get_axis_parms(self, bar_draw, min_loc, max_loc, thick, color, tick_interval, tick_size1, tick_size2, min_tick, xy): """ Get parameters in `GXSTK <geosoft.gxapi.GXSTK>` object relating drawing X/Y axis :param bar_draw: ?BARDRAW: Bottom and/or Top, or Left and/or Right :param min_loc: Bottom Y/Left X location :param max_loc: Top Y/Right X location :param thick: ?BARLINETHICK - Line thickness in mm. Default is 0.05 :param color: ?BARCOLOR - Line color string in RGB model. Default is black :param tick_interval: ?BARTICKINTEERVAL :param tick_size1: Major tick size in mm for bottom/left axis bar. :param tick_size2: Major tick size in mm for top/right axis bar. :param min_tick: ?BARMINORTICK - Number of minor ticks. (0) none, (-1) automatic :param xy: :ref:`STK_AXIS` :type bar_draw: int_ref :type min_loc: float_ref :type max_loc: float_ref :type thick: float_ref :type color: str_ref :type tick_interval: float_ref :type tick_size1: float_ref :type tick_size2: float_ref :type min_tick: int_ref :type xy: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters ? mark in the note represent either X and Y """ bar_draw.value, min_loc.value, max_loc.value, thick.value, color.value, tick_interval.value, tick_size1.value, tick_size2.value, min_tick.value = self._get_axis_parms(bar_draw.value, min_loc.value, max_loc.value, thick.value, color.value.encode(), tick_interval.value, tick_size1.value, tick_size2.value, min_tick.value, xy) def get_fid_parms(self, fid_y_loc, fid_tick_size, fid_interval, fid_text_font, fid_text_size, fid_text_color): """ Get parameters in `GXSTK <geosoft.gxapi.GXSTK>` object relating drawing fid ticks :param fid_y_loc: Y location in data unit to draw Fid ticks. Default is the bottom of the stack :param fid_tick_size: Fid tick size in mm. Default is 2.0mm :param fid_interval: Fid interval to draw ticks. Nice number is calculated by default :param fid_text_font: Font to use to label fids. Default is use 'default' font set in Montaj :param fid_text_size: Text size in mm to label fids. Default is 5mm :param fid_text_color: Text color string in RGB model. Default is black :type fid_y_loc: float_ref :type fid_tick_size: float_ref :type fid_interval: float_ref :type fid_text_font: str_ref :type fid_text_size: float_ref :type fid_text_color: str_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters """ fid_y_loc.value, fid_tick_size.value, fid_interval.value, fid_text_font.value, fid_text_size.value, fid_text_color.value = self._get_fid_parms(fid_y_loc.value, fid_tick_size.value, fid_interval.value, fid_text_font.value.encode(), fid_text_size.value, fid_text_color.value.encode()) def get_flag(self, part): """ Get flag indicating part of `GXSTK <geosoft.gxapi.GXSTK>` object is to be drawn or not :param part: :ref:`STK_FLAG` :type part: int :returns: FALSE (0) if part of the object is not to be drawn TRUE (1) if part of the object is drawn :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ ret_val = self._get_flag(part) return ret_val def get_gen_parms(self, x_ch, y_ch, grp_name, x_scale, y_scale, x_start, x_end, y_start, left, bottom, height): """ Get general parameters in `GXSTK <geosoft.gxapi.GXSTK>` object :param x_ch: X channel name, REQUIRED :param y_ch: Y channel name, REQUIRED :param grp_name: Group name :param x_scale: X scale (map scale, units/metre), REQUIRED :param y_scale: Y scale (plot scale, units/mm), REQUIRED :param x_start: Minimum X value (data unit) to draw :param x_end: Maximum X value (data unit) to draw :param y_start: Minimum Y value (data unit) to draw :param left: Minimum horizontal location in mm of the stack on the map :param bottom: Minimum vertical location in mm on the map :param height: Profile height in mm on the map, must be > 0.0 :type x_ch: str_ref :type y_ch: str_ref :type grp_name: str_ref :type x_scale: float_ref :type y_scale: float_ref :type x_start: float_ref :type x_end: float_ref :type y_start: float_ref :type left: float_ref :type bottom: float_ref :type height: float_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters """ x_ch.value, y_ch.value, grp_name.value, x_scale.value, y_scale.value, x_start.value, x_end.value, y_start.value, left.value, bottom.value, height.value = self._get_gen_parms(x_ch.value.encode(), y_ch.value.encode(), grp_name.value.encode(), x_scale.value, y_scale.value, x_start.value, x_end.value, y_start.value, left.value, bottom.value, height.value) def get_grid_parms(self, grid, min_x, max_x, min_y, max_y, thick, cross, x_sep, y_sep, color, grid12): """ Get background grid parameters in `GXSTK <geosoft.gxapi.GXSTK>` object :param grid: Type of grid to draw: :param min_x: Minimum X in ground unit to draw grid :param max_x: Maximum X in ground unit to draw grid :param min_y: Minimum Y in ground unit to draw grid :param max_y: Maximum Y in ground unit to draw grid :param thick: Line thickness in mm. Default is 0.01mm :param cross: Cross size or separation between dots in mm. :param x_sep: Separation between vertical grid lines. :param y_sep: Separation between horizontal grid lines. :param color: Grid line color string in RGB model. Default is black :param grid12: :ref:`STK_GRID` :type grid: int_ref :type min_x: float_ref :type max_x: float_ref :type min_y: float_ref :type max_y: float_ref :type thick: float_ref :type cross: float_ref :type x_sep: float_ref :type y_sep: float_ref :type color: str_ref :type grid12: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters ? mark in the note represent either X and Y """ grid.value, min_x.value, max_x.value, min_y.value, max_y.value, thick.value, cross.value, x_sep.value, y_sep.value, color.value = self._get_grid_parms(grid.value, min_x.value, max_x.value, min_y.value, max_y.value, thick.value, cross.value, x_sep.value, y_sep.value, color.value.encode(), grid12) def get_label_parms(self, axis, min_loc, min_orient, max_loc, max_orient, interval, font, text_size, color, bound, xy): """ Get parameters in `GXSTK <geosoft.gxapi.GXSTK>` object relating X/Y axis labels :param axis: Which axes to draw: Bottom/Top or Left/Right axes :param min_loc: Bottom or Left axis label location :param min_orient: Bottom or Left labels orientation. :param max_loc: Top or Right axis label location :param max_orient: Top or Right axis label orientation :param interval: Label interval. Default is to use related axis tick interval :param font: Font to use to label. Default is use 'default' font set in Montaj :param text_size: Text size in mm to draw profile labels. Default is 5mm :param color: Text color string in RGB model. Default is black :param bound: ?LABELBOUND - Edge bound. 0 - No :param xy: :ref:`STK_AXIS` :type axis: int_ref :type min_loc: float_ref :type min_orient: int_ref :type max_loc: float_ref :type max_orient: int_ref :type interval: float_ref :type font: str_ref :type text_size: float_ref :type color: str_ref :type bound: int_ref :type xy: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters ? mark in the note represent either X and Y Sets the label format to GSF_NORMAL. To override this, use the `set_axis_format <geosoft.gxapi.GXSTK.set_axis_format>` function AFTER calling this. """ axis.value, min_loc.value, min_orient.value, max_loc.value, max_orient.value, interval.value, font.value, text_size.value, color.value, bound.value = self._get_label_parms(axis.value, min_loc.value, min_orient.value, max_loc.value, max_orient.value, interval.value, font.value.encode(), text_size.value, color.value.encode(), bound.value, xy) def get_profile(self, prof_type, pitch, thick, ln_clr, wrap, clip, smooth, vv_ind, label, ref, font, text_size, text_clr, prof_va_num): """ Get profile parameters in `GXSTK <geosoft.gxapi.GXSTK>` object :param prof_type: Profile line type. 1 - solid (default) :param pitch: Patterned line pitch in mm. Default is 10 mm :param thick: Line thickness in mm. Default is 0.05mm :param ln_clr: Color string in RGB model. Default is black :param wrap: Wrap option :param clip: Clip option :param smooth: Plot smoothed polyline. :param vv_ind: Only use for `GXVA <geosoft.gxapi.GXVA>` channels. NULL is acceptable which means all profiles in the `GXVA <geosoft.gxapi.GXVA>` are plotted. `GXVV <geosoft.gxapi.GXVV>` type of INT (integer) :param label: Characters string to label profiles :param ref: Reference location to draw label. :param font: Font to use to draw profile labels. Default is use 'default' font set in Montaj :param text_size: Text size in mm to draw profile labels. Default is 5mm :param text_clr: Text color string in RGB model. Default is black :param prof_va_num: Include `GXVA <geosoft.gxapi.GXVA>` column numbers as part of the profile label 0 - no, 1 - yes :type prof_type: int_ref :type pitch: float_ref :type thick: float_ref :type ln_clr: str_ref :type wrap: int_ref :type clip: int_ref :type smooth: int_ref :type vv_ind: GXVV :type label: str_ref :type ref: int_ref :type font: str_ref :type text_size: float_ref :type text_clr: str_ref :type prof_va_num: int_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters """ prof_type.value, pitch.value, thick.value, ln_clr.value, wrap.value, clip.value, smooth.value, label.value, ref.value, font.value, text_size.value, text_clr.value, prof_va_num.value = self._get_profile(prof_type.value, pitch.value, thick.value, ln_clr.value.encode(), wrap.value, clip.value, smooth.value, vv_ind, label.value.encode(), ref.value, font.value.encode(), text_size.value, text_clr.value.encode(), prof_va_num.value) def get_profile_ex(self, prof_type, pitch, thick, ln_clr, break_dum, wrap, clip, smooth, vv_ind, label, ref, font, text_size, text_clr, prof_va_num): """ Get profile parameters in `GXSTK <geosoft.gxapi.GXSTK>` object (added Break on dummy option) :param prof_type: Profile line type. 1 - solid (default) :param pitch: Patterned line pitch in mm. Default is 10 mm :param thick: Line thickness in mm. Default is 0.05mm :param ln_clr: Color string in RGB model. Default is black :param break_dum: Break on dummy option :param wrap: Wrap option :param clip: Clip option :param smooth: Plot smoothed polyline. :param vv_ind: Only use for `GXVA <geosoft.gxapi.GXVA>` channels. NULL is acceptable which means all profiles in the `GXVA <geosoft.gxapi.GXVA>` are plotted. `GXVV <geosoft.gxapi.GXVV>` type of INT (integer) :param label: Characters string to label profiles :param ref: Reference location to draw label. :param font: Font to use to draw profile labels. Default is use 'default' font set in Montaj :param text_size: Text size in mm to draw profile labels. Default is 5mm :param text_clr: Text color string in RGB model. Default is black :param prof_va_num: Include `GXVA <geosoft.gxapi.GXVA>` column numbers as part of the profile label 0 - no, 1 - yes :type prof_type: int_ref :type pitch: float_ref :type thick: float_ref :type ln_clr: str_ref :type break_dum: int_ref :type wrap: int_ref :type clip: int_ref :type smooth: int_ref :type vv_ind: GXVV :type label: str_ref :type ref: int_ref :type font: str_ref :type text_size: float_ref :type text_clr: str_ref :type prof_va_num: int_ref .. versionadded:: 5.0.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters """ prof_type.value, pitch.value, thick.value, ln_clr.value, break_dum.value, wrap.value, clip.value, smooth.value, label.value, ref.value, font.value, text_size.value, text_clr.value, prof_va_num.value = self._get_profile_ex(prof_type.value, pitch.value, thick.value, ln_clr.value.encode(), break_dum.value, wrap.value, clip.value, smooth.value, vv_ind, label.value.encode(), ref.value, font.value.encode(), text_size.value, text_clr.value.encode(), prof_va_num.value) def get_symb_parms(self, symb_font, symb_size, line_clr, fill_clr, wrap, clip, symb_y_loc, no_levels, vv_level, vv_type, label, text_font, text_size, text_clr): """ Get parameters in `GXSTK <geosoft.gxapi.GXSTK>` object relating drawing symbols :param symb_font: Font to use to draw symbols. Default is use 'symbols.gfn' font :param symb_size: Symbol size in mm. Default is 5mm :param line_clr: Edge color string in RGB model. Default is black :param fill_clr: Fill color string in RGB model. Default is black :param wrap: Wrap option :param clip: Clip option :param symb_y_loc: Y location to draw symbols. Default is to use the data from Y channel :param no_levels: Number of levels to draw symbols :param vv_level: Y values to define data ranges for each symbol types Type of REAL :param vv_type: Symbol numbers (given in the symbol font) to draw, default is 20 TYPE of INT :param label: Draw symbols ID (1) or not (0) :param text_font: Font to use to draw symbol ID (A,B,C...). Default is use 'default' :param text_size: Text size in mm to draw profile labels. Default is 5mm :param text_clr: Text color string in RGB model. Default is black :type symb_font: str_ref :type symb_size: float_ref :type line_clr: str_ref :type fill_clr: str_ref :type wrap: int_ref :type clip: int_ref :type symb_y_loc: float_ref :type no_levels: int_ref :type vv_level: GXVV :type vv_type: GXVV :type label: int_ref :type text_font: str_ref :type text_size: float_ref :type text_clr: str_ref .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters """ symb_font.value, symb_size.value, line_clr.value, fill_clr.value, wrap.value, clip.value, symb_y_loc.value, no_levels.value, label.value, text_font.value, text_size.value, text_clr.value = self._get_symb_parms(symb_font.value.encode(), symb_size.value, line_clr.value.encode(), fill_clr.value.encode(), wrap.value, clip.value, symb_y_loc.value, no_levels.value, vv_level, vv_type, label.value, text_font.value.encode(), text_size.value, text_clr.value.encode()) def get_title_parms(self, title1, title2, title1_orient, title1_x, title1_y, title2_orient, title2_x, title2_y, font, text_size, color, xy): """ Get parameters in `GXSTK <geosoft.gxapi.GXSTK>` object relating X/Y axis titles :param title1: Title for bottom X axis/left Y axis. Default is no title. :param title2: Title for top X axis/right Y axis. Default is no title. :param title1_orient: Bottom/Left axis title orientation. :param title1_x: X location to draw bottom/left axis title :param title1_y: Y location to draw bottom/left axis title :param title2_orient: Top/Right axis title orientation. :param title2_x: X location to draw top/right axis title :param title2_y: Y location to draw top/right axis title :param font: Font to draw titles. Default is use 'default' font set in Montaj :param text_size: Text size in mm to draw titles. Default is 5mm :param color: Text color string in RGB model. Default is black :param xy: :ref:`STK_AXIS` :type title1: str_ref :type title2: str_ref :type title1_orient: int_ref :type title1_x: float_ref :type title1_y: float_ref :type title2_orient: int_ref :type title2_x: float_ref :type title2_y: float_ref :type font: str_ref :type text_size: float_ref :type color: str_ref :type xy: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters ? mark in the note represent either X and Y """ title1.value, title2.value, title1_orient.value, title1_x.value, title1_y.value, title2_orient.value, title2_x.value, title2_y.value, font.value, text_size.value, color.value = self._get_title_parms(title1.value.encode(), title2.value.encode(), title1_orient.value, title1_x.value, title1_y.value, title2_orient.value, title2_x.value, title2_y.value, font.value.encode(), text_size.value, color.value.encode(), xy) def set_flag(self, flag, part): """ Set flag indicating part of `GXSTK <geosoft.gxapi.GXSTK>` object is to be drawn or not :param flag: Flag to set (0 or 1) :param part: :ref:`STK_FLAG` :type flag: int :type part: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ """ self._set_flag(flag, part) def set_array_colors(self, itr): """ Set colors for individual channels in a `GXVA <geosoft.gxapi.GXVA>`, via an `GXITR <geosoft.gxapi.GXITR>` :param itr: `GXITR <geosoft.gxapi.GXITR>` object for colors :type itr: GXITR .. versionadded:: 5.0.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The `GXITR <geosoft.gxapi.GXITR>` is consulted by taking the channel index and dividing by the number of channels; hence the `GXITR <geosoft.gxapi.GXITR>` maximum values should be in the range: 0 > values >= 1.0. """ self._set_array_colors(itr) def set_axis_format(self, format, xy): """ Set axis number display format. :param format: :ref:`DB_CHAN_FORMAT` :param xy: :ref:`STK_AXIS` :type format: int :type xy: int .. versionadded:: 5.1.5 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** By default, `DB_CHAN_FORMAT_NORMAL <geosoft.gxapi.DB_CHAN_FORMAT_NORMAL>` is used to display the values, or for values > 1.e7, `DB_CHAN_FORMAT_EXP <geosoft.gxapi.DB_CHAN_FORMAT_EXP>`. """ self._set_axis_format(format, xy) def set_axis_parms(self, bar_draw, min_loc, max_loc, thick, color, tick_interval, tick_size1, tick_size2, min_tick, xy): """ Set parameters in `GXSTK <geosoft.gxapi.GXSTK>` object relating drawing X/Y axis :param bar_draw: ?BARDRAW, :ref:`STK_AXIS_POS` :param min_loc: Bottom Y/Left X location :param max_loc: Top Y/Right X location :param thick: ?BARLINETHICK - Line thickness in mm. Default is 0.05 :param color: ?BARCOLOR - Line color string in RGB model. Default is black :param tick_interval: ?BARTICKINTEERVAL :param tick_size1: Major tick size in mm for bottom/left axis bar. :param tick_size2: Major tick size in mm for top/right axis bar. :param min_tick: ?BARMINORTICK - Number of minor ticks. (0) none, (-1) automatic :param xy: :ref:`STK_AXIS` :type bar_draw: int :type min_loc: float :type max_loc: float :type thick: float :type color: str :type tick_interval: float :type tick_size1: float :type tick_size2: float :type min_tick: int :type xy: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters ? mark in the note represent either X and Y """ self._set_axis_parms(bar_draw, min_loc, max_loc, thick, color.encode(), tick_interval, tick_size1, tick_size2, min_tick, xy) def set_fid_parms(self, fid_y_loc, fid_tick_size, fid_interval, fid_text_font, fid_text_size, fid_text_color): """ Set parameters in `GXSTK <geosoft.gxapi.GXSTK>` object relating drawing fid ticks :param fid_y_loc: Y location in data unit to draw Fid ticks. Default is the bottom of the stack :param fid_tick_size: Fid tick size in mm. Default is 2.0mm :param fid_interval: Fid interval to draw ticks. Nice number is calculated by default :param fid_text_font: Font to use to label fids. Default is use 'default' font set in Montaj :param fid_text_size: Text size in mm to label fids. Default is 5mm :param fid_text_color: Text color string in RGB model. Default is black :type fid_y_loc: float :type fid_tick_size: float :type fid_interval: float :type fid_text_font: str :type fid_text_size: float :type fid_text_color: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters """ self._set_fid_parms(fid_y_loc, fid_tick_size, fid_interval, fid_text_font.encode(), fid_text_size, fid_text_color.encode()) def set_gen_parms(self, x_ch, y_ch, grp_name, x_scale, y_scale, x_start, x_end, y_start, left, bottom, height): """ Set general parameters in `GXSTK <geosoft.gxapi.GXSTK>` object :param x_ch: X channel name, REQUIRED :param y_ch: Y channel name, REQUIRED :param grp_name: Group name :param x_scale: X scale (map scale, units/metre), REQUIRED :param y_scale: Y scale (plot scale, units/mm), REQUIRED :param x_start: Minimum X value (data unit) to draw :param x_end: Maximum X value (data unit) to draw :param y_start: Minimum Y value (data unit) to draw :param left: Minimum horizontal location in mm of the stack on the map :param bottom: Minimum vertical location in mm on the map :param height: Profile height in mm on the map, must be > 0.0 :type x_ch: str :type y_ch: str :type grp_name: str :type x_scale: float :type y_scale: float :type x_start: float :type x_end: float :type y_start: float :type left: float :type bottom: float :type height: float .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters """ self._set_gen_parms(x_ch.encode(), y_ch.encode(), grp_name.encode(), x_scale, y_scale, x_start, x_end, y_start, left, bottom, height) def set_grid_parms(self, grid, min_x, max_x, min_y, max_y, thick, cross, x_sep, y_sep, color, grid12): """ Set background grid parameters in `GXSTK <geosoft.gxapi.GXSTK>` object :param grid: Type of grid to draw: :param min_x: Minimum X in ground unit to draw grid :param max_x: Maximum X in ground unit to draw grid :param min_y: Minimum Y in ground unit to draw grid :param max_y: Maximum Y in ground unit to draw grid :param thick: Line thickness in mm. Default is 0.01mm :param cross: Cross size or separation between dots in mm. :param x_sep: Separation between vertical grid lines. :param y_sep: Separation between horizontal grid lines. :param color: Grid line color string in RGB model. Default is black :param grid12: :ref:`STK_GRID` :type grid: int :type min_x: float :type max_x: float :type min_y: float :type max_y: float :type thick: float :type cross: float :type x_sep: float :type y_sep: float :type color: str :type grid12: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters ? mark in the note represent either X and Y """ self._set_grid_parms(grid, min_x, max_x, min_y, max_y, thick, cross, x_sep, y_sep, color.encode(), grid12) def set_label_parms(self, axis, min_loc, min_orient, max_loc, max_orient, interval, font, text_size, color, bound, xy): """ Set parameters in `GXSTK <geosoft.gxapi.GXSTK>` object relating X/Y axis labels :param axis: :ref:`STK_AXIS_POS` :param min_loc: Bottom or Left axis label location :param min_orient: Bottom or Left labels orientation. :param max_loc: Top or Right axis label location :param max_orient: Top or Right axis label orientation :param interval: Label interval. Default is to use related axis tick interval :param font: Font to use to label. Default is use 'default' font set in Montaj :param text_size: Text size in mm to draw profile labels. Default is 5mm :param color: Text color string in RGB model. Default is black :param bound: ?LABELBOUND - Edge bound. 0 - No :param xy: :ref:`STK_AXIS` :type axis: int :type min_loc: float :type min_orient: int :type max_loc: float :type max_orient: int :type interval: float :type font: str :type text_size: float :type color: str :type bound: int :type xy: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters ? mark in the note represent either X and Y Sets the label format to GSF_NORMAL. To override this, use the `set_axis_format <geosoft.gxapi.GXSTK.set_axis_format>` function AFTER calling this. """ self._set_label_parms(axis, min_loc, min_orient, max_loc, max_orient, interval, font.encode(), text_size, color.encode(), bound, xy) def set_line_parm(self, line): """ Set line parameter (of Y Chan) in `GXSTK <geosoft.gxapi.GXSTK>` object :param line: Line symb :type line: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters """ self._set_line_parm(line) def set_profile(self, prof_type, pitch, thick, ln_clr, wrap, clip, smooth, vv_ind, label, ref, font, text_size, text_clr, prof_va_num): """ Set profile parameters in `GXSTK <geosoft.gxapi.GXSTK>` object :param prof_type: Profile line type. 1 - solid (default) :param pitch: Patterned line pitch in mm. Default is 10 mm :param thick: Line thickness in mm. Default is 0.05mm :param ln_clr: Color string in RGB model. Default is black :param wrap: Wrap option :param clip: Clip option :param smooth: Plot smoothed polyline. :param vv_ind: Integers starting from 0 indicating windows in `GXVA <geosoft.gxapi.GXVA>` channel to draw `GXVV <geosoft.gxapi.GXVV>` type of INT (integer) :param label: Characters string to label profiles :param ref: Reference location to draw label. :param font: Font to use to draw profile labels. Default is use 'default' font set in Montaj :param text_size: Text size in mm to draw profile labels. Default is 5mm :param text_clr: Text color string in RGB model. Default is black :param prof_va_num: Include `GXVA <geosoft.gxapi.GXVA>` column numbers as part of the profile label 0 - no, 1 - yes :type prof_type: int :type pitch: float :type thick: float :type ln_clr: str :type wrap: int :type clip: int :type smooth: int :type vv_ind: GXVV :type label: str :type ref: int :type font: str :type text_size: float :type text_clr: str :type prof_va_num: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters """ self._set_profile(prof_type, pitch, thick, ln_clr.encode(), wrap, clip, smooth, vv_ind, label.encode(), ref, font.encode(), text_size, text_clr.encode(), prof_va_num) def set_profile_ex(self, prof_type, pitch, thick, ln_clr, break_dum, wrap, clip, smooth, vv_ind, label, ref, font, text_size, text_clr, prof_va_num): """ Set profile parameters in `GXSTK <geosoft.gxapi.GXSTK>` object (added Break on dummy option) :param prof_type: Profile line type. 1 - solid (default) :param pitch: Patterned line pitch in mm. Default is 10 mm :param thick: Line thickness in mm. Default is 0.05mm :param ln_clr: Color string in RGB model. Default is black :param break_dum: Break on dummy option :param wrap: Wrap option :param clip: Clip option :param smooth: Plot smoothed polyline. :param vv_ind: Integers starting from 0 indicating windows in `GXVA <geosoft.gxapi.GXVA>` channel to draw `GXVV <geosoft.gxapi.GXVV>` type of INT (integer) :param label: Characters string to label profiles :param ref: Reference location to draw label. :param font: Font to use to draw profile labels. Default is use 'default' font set in Montaj :param text_size: Text size in mm to draw profile labels. Default is 5mm :param text_clr: Text color string in RGB model. Default is black :param prof_va_num: Include `GXVA <geosoft.gxapi.GXVA>` column numbers as part of the profile label 0 - no, 1 - yes :type prof_type: int :type pitch: float :type thick: float :type ln_clr: str :type break_dum: int :type wrap: int :type clip: int :type smooth: int :type vv_ind: GXVV :type label: str :type ref: int :type font: str :type text_size: float :type text_clr: str :type prof_va_num: int .. versionadded:: 5.0.3 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters """ self._set_profile_ex(prof_type, pitch, thick, ln_clr.encode(), break_dum, wrap, clip, smooth, vv_ind, label.encode(), ref, font.encode(), text_size, text_clr.encode(), prof_va_num) def set_symb_parms(self, symb_font, symb_size, line_clr, fill_clr, wrap, clip, symb_y_loc, no_levels, vv_level, vv_type, label, text_font, text_size, text_clr): """ Set parameters in `GXSTK <geosoft.gxapi.GXSTK>` object relating drawing symbols :param symb_font: Font to use to draw symbols. Default is use 'symbols.gfn' font :param symb_size: Symbol size in mm. Default is 5mm :param line_clr: Edge color string in RGB model. Default is black :param fill_clr: Fill color string in RGB model. Default is black :param wrap: Wrap option :param clip: Clip option :param symb_y_loc: Y location to draw symbols. Default is to use the data from Y channel :param no_levels: Number of symbols levels :param vv_level: Y values to define data ranges for each symbol types Type of REAL :param vv_type: Symbol numbers (given in the symbol font) to draw Type of INT :param label: Draw symbols ID (1) or not (0) :param text_font: Font to use to draw symbol ID (A,B,C...). Default is use 'default' :param text_size: Text size in mm to draw profile labels. Default is 5mm :param text_clr: Text color string in RGB model. Default is black :type symb_font: str :type symb_size: float :type line_clr: str :type fill_clr: str :type wrap: int :type clip: int :type symb_y_loc: float :type no_levels: int :type vv_level: GXVV :type vv_type: GXVV :type label: int :type text_font: str :type text_size: float :type text_clr: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters """ self._set_symb_parms(symb_font.encode(), symb_size, line_clr.encode(), fill_clr.encode(), wrap, clip, symb_y_loc, no_levels, vv_level, vv_type, label, text_font.encode(), text_size, text_clr.encode()) def set_title_parms(self, title1, title2, title1_orient, title1_x, title1_y, title2_orient, title2_x, title2_y, font, text_size, color, xy): """ Set parameters in `GXSTK <geosoft.gxapi.GXSTK>` object relating X/Y axis titles :param title1: Title for bottom X axis/left Y axis. Default is no title. :param title2: Title for top X axis/right Y axis. Default is no title. :param title1_orient: Bottom/Left axis title orientation. :param title1_x: X location to draw bottom/left axis title :param title1_y: Y location to draw bottom/left axis title :param title2_orient: Top/Right axis title orientation. :param title2_x: X location to draw top/right axis title :param title2_y: Y location to draw top/right axis title :param font: Font to draw titles. Default is use 'default' font set in Montaj :param text_size: Text size in mm to draw titles. Default is 5mm :param color: Text color string in RGB model. Default is black :param xy: :ref:`STK_AXIS` :type title1: str :type title2: str :type title1_orient: int :type title1_x: float :type title1_y: float :type title2_orient: int :type title2_x: float :type title2_y: float :type font: str :type text_size: float :type color: str :type xy: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters ? mark in the note represent either X and Y """ self._set_title_parms(title1.encode(), title2.encode(), title1_orient, title1_x, title1_y, title2_orient, title2_x, title2_y, font.encode(), text_size, color.encode(), xy) def set_trans_parms(self, x_trans_t, x_log_min, xvv_lev, xvv_cmp, y_trans_t, y_log_min, yvv_lev, yvv_cmp): """ Set transformation parameters in `GXSTK <geosoft.gxapi.GXSTK>` object :param x_trans_t: Type of transformation for horizontal axis :param x_log_min: Minimum value to apply logarithmic :param xvv_lev: Future use :param xvv_cmp: Future use :param y_trans_t: Type of scaling for vertical axis :param y_log_min: Minimum value to apply logarithmic :param yvv_lev: Future use :param yvv_cmp: Future use :type x_trans_t: int :type x_log_min: float :type xvv_lev: int :type xvv_cmp: int :type y_trans_t: int :type y_log_min: float :type yvv_lev: int :type yvv_cmp: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** See above full description of each parameters `GXVV <geosoft.gxapi.GXVV>`'s for X channel transformation can be NULL if the transformation is log or loglinear. The same for Y channel. See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters """ self._set_trans_parms(x_trans_t, x_log_min, xvv_lev, xvv_cmp, y_trans_t, y_log_min, yvv_lev, yvv_cmp) def set_va_index_start(self, index0): """ Start array profile index labels at 0 or 1. :param index0: Starting index (0 or 1) :type index0: int .. versionadded:: 6.0.1 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** By default, the index labels for array channel profiles begin at 0. Use this function to start them at either 0 or 1. """ self._set_va_index_start(index0) def set_error_plot_params(self, enabled, error_channel, elements_csv): """ Set error bar plot parameters for the current profile. :param enabled: Enable(1) or disable(0) error bar plot rendering. :param error_channel: The name of the error channel. :param elements_csv: A comma-separated list of integers pertaining to the array channel elementsto apply the error bar plot to. For example, supplying '1,2,7,10' will rendererror bar plots for the first, second, seventh and tenth array channel elementsof the source and error channels. :type enabled: int :type error_channel: str :type elements_csv: str .. versionadded:: 2022.2 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Set parameters for the error bar plot that is associated with the current profile. See `GXMSTK <geosoft.gxapi.GXMSTK>` for detailed description of all function parameters """ self._set_error_plot_params(enabled, error_channel.encode(), elements_csv.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXINTERNET.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXINTERNET(gxapi_cy.WrapINTERNET): """ GXINTERNET class. This library provides functions for accessing the internet and MAPI-compliant e-mail services. Supported by Oasis montaj ONLY. """ def __init__(self, handle=0): super(GXINTERNET, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXINTERNET <geosoft.gxapi.GXINTERNET>` :returns: A null `GXINTERNET <geosoft.gxapi.GXINTERNET>` :rtype: GXINTERNET """ return GXINTERNET() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def download_http(cls, url, file, size): """ Download `GXHTTP <geosoft.gxapi.GXHTTP>` file from the internet to file. :param url: `GXHTTP <geosoft.gxapi.GXHTTP>` URL :param file: File Name to save to :param size: No longer used, just pass 0 :type url: str :type file: str :type size: int :returns: 0 - Ok 1 - Error :rtype: int .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** The file must be stored on a server that supports the `GXHTTP <geosoft.gxapi.GXHTTP>` protocol and not require a password. .. seealso:: iserver.gxh internet class. """ ret_val = gxapi_cy.WrapINTERNET._download_http(GXContext._get_tls_geo(), url.encode(), file.encode(), size) return ret_val @classmethod def send_mail(cls, recipient, p2, p3, p4, p5, p6, p7, p8): """ Prepaire an email for the user. :param recipient: Recipient Name ("" for none) :param p2: Recipient Address ("" for none) :param p3: szSubject ("" for none) :param p4: Message Text ("" for none) :param p5: Attachment1 File Name ("" for none) :param p6: Attachment1 User Name ("" for none) :param p7: Attachment2 File Name ("" for none) :param p8: Attachment2 User Name ("" for none) :type recipient: str :type p2: str :type p3: str :type p4: str :type p5: str :type p6: str :type p7: str :type p8: str .. versionadded:: 5.0 **License:** `Geosoft End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-end-user-lic>`_ **Note:** Requires a MAPI complient mail system to be installed on the client machine. """ gxapi_cy.WrapINTERNET._send_mail(GXContext._get_tls_geo(), recipient.encode(), p2.encode(), p3.encode(), p4.encode(), p5.encode(), p6.encode(), p7.encode(), p8.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXSEMPLOT.rst .. _GXSEMPLOT: GXSEMPLOT class ================================== .. autoclass:: geosoft.gxapi.GXSEMPLOT :members: .. _SEMPLOT_GROUP_CLASS: SEMPLOT_GROUP_CLASS constants ----------------------------------------------------------------------- `GXSEMPLOT <geosoft.gxapi.GXSEMPLOT>` group class. .. autodata:: geosoft.gxapi.SEMPLOT_GROUP_CLASS :annotation: .. autoattribute:: geosoft.gxapi.SEMPLOT_GROUP_CLASS .. _SEMPLOT_EXPORT: SEMPLOT_EXPORT constants ----------------------------------------------------------------------- `GXSEMPLOT <geosoft.gxapi.GXSEMPLOT>` export type selection. .. autodata:: geosoft.gxapi.SEMPLOT_EXPORT_NORMAL :annotation: .. autoattribute:: geosoft.gxapi.SEMPLOT_EXPORT_NORMAL .. autodata:: geosoft.gxapi.SEMPLOT_EXPORT_NOEXTRA :annotation: .. autoattribute:: geosoft.gxapi.SEMPLOT_EXPORT_NOEXTRA .. _SEMPLOT_EXT: SEMPLOT_EXT constants ----------------------------------------------------------------------- `GXSEMPLOT <geosoft.gxapi.GXSEMPLOT>` file extension selection .. autodata:: geosoft.gxapi.SEMPLOT_EXT_ALL :annotation: .. autoattribute:: geosoft.gxapi.SEMPLOT_EXT_ALL .. autodata:: geosoft.gxapi.SEMPLOT_EXT_SEMPLOT :annotation: .. autoattribute:: geosoft.gxapi.SEMPLOT_EXT_SEMPLOT .. autodata:: geosoft.gxapi.SEMPLOT_EXT_CHIMERA :annotation: .. autoattribute:: geosoft.gxapi.SEMPLOT_EXT_CHIMERA .. _SEMPLOT_PLOT: SEMPLOT_PLOT constants ----------------------------------------------------------------------- `GXSEMPLOT <geosoft.gxapi.GXSEMPLOT>` plot type selection. .. autodata:: geosoft.gxapi.SEMPLOT_PLOT_ALL :annotation: .. autoattribute:: geosoft.gxapi.SEMPLOT_PLOT_ALL .. autodata:: geosoft.gxapi.SEMPLOT_PLOT_XYPLOT :annotation: .. autoattribute:: geosoft.gxapi.SEMPLOT_PLOT_XYPLOT .. autodata:: geosoft.gxapi.SEMPLOT_PLOT_TRIPLOT :annotation: .. autoattribute:: geosoft.gxapi.SEMPLOT_PLOT_TRIPLOT .. autodata:: geosoft.gxapi.SEMPLOT_PLOT_UNKNOWN :annotation: .. autoattribute:: geosoft.gxapi.SEMPLOT_PLOT_UNKNOWN <file_sep>/geosoft/gxapi/GXSTR.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXSTR(gxapi_cy.WrapSTR): """ GXSTR class. This library is not a class. Use the `GXSTR <geosoft.gxapi.GXSTR>` library functions to work with and manipulate string variables. Since the GX Programming Language does not provide string literal tokens, you must use these functions for any string operations you want to perform. """ def __init__(self, handle=0): super(GXSTR, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXSTR <geosoft.gxapi.GXSTR>` :returns: A null `GXSTR <geosoft.gxapi.GXSTR>` :rtype: GXSTR """ return GXSTR() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Data Input @classmethod def scan_i(cls, str_val): """ Convert a string to a GX int. :param str_val: String to convert to an integer :type str_val: str :returns: Resulting Integer, `iDUMMY <geosoft.gxapi.iDUMMY>` is bad integer :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSTR._scan_i(GXContext._get_tls_geo(), str_val.encode()) return ret_val @classmethod def scan_date(cls, str_val, type): """ Convert a date string to a GX real. :param str_val: Date string :param type: :ref:`DATE_FORMAT` :type str_val: str :type type: int :returns: Resulting Real, `rDUMMY <geosoft.gxapi.rDUMMY>` if conversion fails. :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** OLD usage, use ScanForm_STR instead. """ ret_val = gxapi_cy.WrapSTR._scan_date(GXContext._get_tls_geo(), str_val.encode(), type) return ret_val @classmethod def scan_form(cls, str_val, type): """ Convert a formated string to a real. :param str_val: Date string :param type: :ref:`GS_FORMATS` :type str_val: str :type type: int :returns: Resulting Real, `rDUMMY <geosoft.gxapi.rDUMMY>` if conversion fails. :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSTR._scan_form(GXContext._get_tls_geo(), str_val.encode(), type) return ret_val @classmethod def scan_r(cls, str_val): """ Convert a string to a GX real. :param str_val: String to convert to a real :type str_val: str :returns: Resulting Real, `rDUMMY <geosoft.gxapi.rDUMMY>` if bad string. :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSTR._scan_r(GXContext._get_tls_geo(), str_val.encode()) return ret_val @classmethod def scan_time(cls, str_val, type): """ Convert a time string to a GX real. :param str_val: Date string :param type: :ref:`TIME_FORMAT` :type str_val: str :type type: int :returns: Resulting Real, `rDUMMY <geosoft.gxapi.rDUMMY>` if conversion fails. :rtype: float .. versionadded:: 6.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** OLD usage, use ScanForm_STR instead. """ ret_val = gxapi_cy.WrapSTR._scan_time(GXContext._get_tls_geo(), str_val.encode(), type) return ret_val # File Name @classmethod def file_combine_parts(cls, drive, dir, file, ext, qual, file_name): """ Combine file parts to build a file name. :param drive: Drive :param dir: Directory :param file: Name :param ext: Extension :param qual: Qualifiers :param file_name: Destination string, can be same as input :type drive: str :type dir: str :type file: str :type ext: str :type qual: str :type file_name: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ file_name.value = gxapi_cy.WrapSTR._file_combine_parts(GXContext._get_tls_geo(), drive.encode(), dir.encode(), file.encode(), ext.encode(), qual.encode(), file_name.value.encode()) @classmethod def file_ext(cls, ifile, ext, ofile, opt): """ Add a file extension onto a file name string. :param ifile: File name to extend :param ext: Extension if "", extenstion and '.' are stripped. :param ofile: Extended file name (can be same as input) :param opt: :ref:`FILE_EXT` :type ifile: str :type ext: str :type ofile: str_ref :type opt: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ofile.value = gxapi_cy.WrapSTR._file_ext(GXContext._get_tls_geo(), ifile.encode(), ext.encode(), ofile.value.encode(), opt) @classmethod def file_name_part(cls, file, file_part, part): """ Get part of a file name. :param file: File name :param file_part: Destination string, can be same as input :param part: :ref:`STR_FILE_PART` :type file: str :type file_part: str_ref :type part: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ file_part.value = gxapi_cy.WrapSTR._file_name_part(GXContext._get_tls_geo(), file.encode(), file_part.value.encode(), part) @classmethod def get_m_file(cls, in_str, out_str, index): """ Get the indexed filepath from a multiple filepath string :param in_str: Input multifile string :param out_str: Output filepath string :param index: Index of file :type in_str: str :type out_str: str_ref :type index: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The multifile string must use '|' as a delimiter. Do not pass a string after calling `tokenize <geosoft.gxapi.GXSTR.tokenize>`. """ out_str.value = gxapi_cy.WrapSTR._get_m_file(GXContext._get_tls_geo(), in_str.encode(), out_str.value.encode(), index) @classmethod def remove_qualifiers(cls, ifile, ofile): """ Remove file qualifiers from a file name :param ifile: Input file name :param ofile: Output file name (can be same as input) :type ifile: str :type ofile: str_ref .. versionadded:: 7.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ofile.value = gxapi_cy.WrapSTR._remove_qualifiers(GXContext._get_tls_geo(), ifile.encode(), ofile.value.encode()) # Formating @classmethod def format_crc(cls, pul_crc, buff, width): """ Convert a GX CRC value to a string. :param pul_crc: CRC value to format :param buff: Resulting string :param width: Width of the field :type pul_crc: int :type buff: str_ref :type width: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ buff.value = gxapi_cy.WrapSTR._format_crc(GXContext._get_tls_geo(), pul_crc, buff.value.encode(), width) @classmethod def format_date(cls, real, buff, width, type): """ Convert a GX real to a date string. :param real: Date value in decimal years to format :param buff: Resulting string :param width: Width of the field :param type: :ref:`DATE_FORMAT` :type real: float :type buff: str_ref :type width: int :type type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ buff.value = gxapi_cy.WrapSTR._format_date(GXContext._get_tls_geo(), real, buff.value.encode(), width, type) @classmethod def format_i(cls, value, buff, width): """ Convert a GX int to a string. :param value: Value to format :param buff: Resulting string :param width: Width of the field :type value: int :type buff: str_ref :type width: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ buff.value = gxapi_cy.WrapSTR._format_i(GXContext._get_tls_geo(), value, buff.value.encode(), width) @classmethod def format_r(cls, real, buff, width, sig): """ Convert a GX real to a string with significant digits. :param real: Value to format :param buff: Resulting string :param width: Width of the field :param sig: Significant digits :type real: float :type buff: str_ref :type width: int :type sig: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ buff.value = gxapi_cy.WrapSTR._format_r(GXContext._get_tls_geo(), real, buff.value.encode(), width, sig) @classmethod def format_r2(cls, real, buff, width, sig): """ Convert a GX real to a string with given decimals. :param real: Value to format :param buff: Resulting string :param width: Width of the field :param sig: Decimals :type real: float :type buff: str_ref :type width: int :type sig: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ buff.value = gxapi_cy.WrapSTR._format_r2(GXContext._get_tls_geo(), real, buff.value.encode(), width, sig) @classmethod def format_double(cls, real, buff, type, width, dec): """ Convert a GX real to a string. :param real: Value to format :param buff: Resulting string :param type: :ref:`GS_FORMATS` :param width: Width of the field :param dec: Significant digits/decimals :type real: float :type buff: str_ref :type type: int :type width: int :type dec: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ buff.value = gxapi_cy.WrapSTR._format_double(GXContext._get_tls_geo(), real, buff.value.encode(), type, width, dec) @classmethod def format_time(cls, real, buff, width, deci, type): """ Convert a GX real to a time string. :param real: Time value in decimal hours to format :param buff: Resulting string :param width: Width of the field :param deci: Decimals to format with :param type: :ref:`TIME_FORMAT` :type real: float :type buff: str_ref :type width: int :type deci: int :type type: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ buff.value = gxapi_cy.WrapSTR._format_time(GXContext._get_tls_geo(), real, buff.value.encode(), width, deci, type) # General @classmethod def escape(cls, str_val, opt): """ Convert/replace escape sequences in strings. :param str_val: String to modify :param opt: :ref:`STR_ESCAPE` :type str_val: str_ref :type opt: int .. versionadded:: 5.0.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Escape characters: \\a bell \\b backspace \\f formfeed \\n new line \\r carriage return \\t tab \\v vertical tab \\" quote character \\x take 'x' literally \\ backslash \\ooo octal up to 3 characters \\xhh hex up to 2 characters A common use of this function is to convert double-quote characters in a user unput string to \\" so the string can be placed in a tokenized string. """ str_val.value = gxapi_cy.WrapSTR._escape(GXContext._get_tls_geo(), str_val.value.encode(), opt) @classmethod def char_(cls, str_val): """ Returns the ASCII value of a character. :param str_val: String to return ascii value of first character :type str_val: str :returns: ASCII value of first character in string. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSTR._char_(GXContext._get_tls_geo(), str_val.encode()) return ret_val @classmethod def char_n(cls, str_val, c, max): """ Returns the ASCII value of the n'th character. :param str_val: String :param c: Character to get :param max: Maximum string length (unused) :type str_val: str :type c: int :type max: int :returns: ASCII value of n'th character in string. The first character is 0. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSTR._char_n(GXContext._get_tls_geo(), str_val.encode(), c, max) return ret_val @classmethod def justify(cls, in_str, out_str, width, just): """ Justify a string :param in_str: String to justify :param out_str: Result string, can be same as input :param width: Justification width :param just: :ref:`STR_JUSTIFY` :type in_str: str :type out_str: str_ref :type width: int :type just: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the string is too big to fit in the number of display characters, the output string will be "**" justified as specified. """ out_str.value = gxapi_cy.WrapSTR._justify(GXContext._get_tls_geo(), in_str.encode(), out_str.value.encode(), width, just) @classmethod def replacei_match_string(cls, istr, old, new_str): """ Replaces all occurances of match string by replacement string with case insensitive. :param istr: Destination String :param old: Match string to replace :param new_str: Replacement string :type istr: str_ref :type old: str :type new_str: str .. versionadded:: 7.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the replacement string is "" (NULL character) then the string to replace is removed from the input string, and the string is shortened. """ istr.value = gxapi_cy.WrapSTR._replacei_match_string(GXContext._get_tls_geo(), istr.value.encode(), old.encode(), new_str.encode()) @classmethod def replace_match_string(cls, istr, old, new_str): """ Replaces all occurances of match string by replacement string with case sensitive. :param istr: Destination String :param old: Match string to replace :param new_str: Replacement string :type istr: str_ref :type old: str :type new_str: str .. versionadded:: 7.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the replacement string is "" (NULL character) then the string to replace is removed from the input string, and the string is shortened. """ istr.value = gxapi_cy.WrapSTR._replace_match_string(GXContext._get_tls_geo(), istr.value.encode(), old.encode(), new_str.encode()) @classmethod def set_char_n(cls, str_val, c, ascii): """ Set the n'th character of a string using an ASCII value :param str_val: String :param c: Character to set :param ascii: ASCII value :type str_val: str_ref :type c: int :type ascii: int .. versionadded:: 5.1.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ str_val.value = gxapi_cy.WrapSTR._set_char_n(GXContext._get_tls_geo(), str_val.value.encode(), c, ascii) @classmethod def split_string(cls, origstr, ch, split): """ Splits a string in two on a character. :param origstr: Original string :param ch: Split character (first character of string) :param split: Split string past split character. :type origstr: str_ref :type ch: str :type split: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The original string is modified by terminating it at the character split. The part of the string past the character split is copied to the split string. Split characters in quoted strings are ignored. This function is mainly intended to separate comments from control file strings. """ origstr.value, split.value = gxapi_cy.WrapSTR._split_string(GXContext._get_tls_geo(), origstr.value.encode(), ch.encode(), split.value.encode()) @classmethod def strcat(cls, dest, orig): """ This method contatinates a string. :param dest: Destination String :param orig: String to add :type dest: str_ref :type orig: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ dest.value = gxapi_cy.WrapSTR._strcat(GXContext._get_tls_geo(), dest.value.encode(), orig.encode()) @classmethod def strcmp(cls, first, second, case_sensitive): """ This method compares two strings and returns these values :param first: String A :param second: String B :param case_sensitive: :ref:`STR_CASE` :type first: str :type second: str :type case_sensitive: int :returns: A < B -1 A == B 0 A > B 1 :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSTR._strcmp(GXContext._get_tls_geo(), first.encode(), second.encode(), case_sensitive) return ret_val @classmethod def strcpy(cls, dest, orig): """ This method copies a string into another string. :param dest: Destination string :param orig: Origin string :type dest: str_ref :type orig: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ dest.value = gxapi_cy.WrapSTR._strcpy(GXContext._get_tls_geo(), dest.value.encode(), orig.encode()) @classmethod def stri_mask(cls, mask, test): """ Case insensitive comparison of two strings. :param mask: Mask :param test: String to test :type mask: str :type test: str :returns: 0 if string does not match mask. 1 if string matches mask. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Mask characters '*' - matches any one or more up to next character '?' - matches one character Test is case insensitive """ ret_val = gxapi_cy.WrapSTR._stri_mask(GXContext._get_tls_geo(), mask.encode(), test.encode()) return ret_val @classmethod def strins(cls, dest, ins, orig): """ This method inserts a string at a specified position. :param dest: Destination String :param ins: Insert Position :param orig: String to add :type dest: str_ref :type ins: int :type orig: str .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the specified position does not fall within the current string the source string will simply be Concatenated. """ dest.value = gxapi_cy.WrapSTR._strins(GXContext._get_tls_geo(), dest.value.encode(), ins, orig.encode()) @classmethod def strlen(cls, str_val): """ Returns the length of a string. :param str_val: String to find the length of :type str_val: str :returns: String length. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSTR._strlen(GXContext._get_tls_geo(), str_val.encode()) return ret_val @classmethod def str_mask(cls, mask, test): """ Case sensitive comparison of two strings. :param mask: Mask :param test: String to test :type mask: str :type test: str :returns: 0 if string does not match mask. 1 if string matches mask. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Mask characters '*' - matches any one or more up to next character '?' - matches one character Test is case sensitive """ ret_val = gxapi_cy.WrapSTR._str_mask(GXContext._get_tls_geo(), mask.encode(), test.encode()) return ret_val @classmethod def str_min(cls, str_val): """ Remove spaces and tabs and return length :param str_val: String to find the min length of :type str_val: str_ref :returns: String length. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** String may be modified. This function should not be used to determine if a file name string is defined, because a valid file name can contain spaces, and once "tested" the name will be altered. Instead, use `str_min2 <geosoft.gxapi.GXSTR.str_min2>`, or use `GXSYS.file_exist <geosoft.gxapi.GXSYS.file_exist>` to see if the file actually exists. """ ret_val, str_val.value = gxapi_cy.WrapSTR._str_min(GXContext._get_tls_geo(), str_val.value.encode()) return ret_val @classmethod def str_min2(cls, str_val): """ Length less spaces and tabs, string unchanged. :param str_val: String to find the min length of :type str_val: str :returns: String length. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSTR._str_min2(GXContext._get_tls_geo(), str_val.encode()) return ret_val @classmethod def strncmp(cls, first, second, n_char, case_sensitive): """ Compares two strings to a given number of characters. :param first: String A :param second: String B :param n_char: Number of characters to compare :param case_sensitive: :ref:`STR_CASE` :type first: str :type second: str :type n_char: int :type case_sensitive: int :returns: A < B -1 A == B 0 A > B 1 :rtype: int .. versionadded:: 5.0.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSTR._strncmp(GXContext._get_tls_geo(), first.encode(), second.encode(), n_char, case_sensitive) return ret_val @classmethod def str_str(cls, str_val, sub, case_sensitive): """ Scan a string for the occurrence of a given substring. :param str_val: String to scan :param sub: String to look for :param case_sensitive: :ref:`STR_CASE` :type str_val: str :type sub: str :type case_sensitive: int :returns: -1 if the substring does not occur in the string Index of first matching location if found :rtype: int .. versionadded:: 5.1.6 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapSTR._str_str(GXContext._get_tls_geo(), str_val.encode(), sub.encode(), case_sensitive) return ret_val @classmethod def substr(cls, dest, orig, start, length): """ Extract part of a string. :param dest: Destination string :param orig: Origin string :param start: Start location :param length: Number of characters :type dest: str_ref :type orig: str :type start: int :type length: int .. versionadded:: 6.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The destination string length will be less than the requested length if the substring is not fully enclosed in the origin string. """ dest.value = gxapi_cy.WrapSTR._substr(GXContext._get_tls_geo(), dest.value.encode(), orig.encode(), start, length) @classmethod def to_lower(cls, str_val): """ Convert a string to lower case. :param str_val: String :type str_val: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ str_val.value = gxapi_cy.WrapSTR._to_lower(GXContext._get_tls_geo(), str_val.value.encode()) @classmethod def to_upper(cls, str_val): """ Convert a string to upper case. :param str_val: String :type str_val: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ str_val.value = gxapi_cy.WrapSTR._to_upper(GXContext._get_tls_geo(), str_val.value.encode()) @classmethod def xyz_line(cls, line, xyz): """ Make a valid XYZ line name from a valid `GXDB <geosoft.gxapi.GXDB>` line name. :param line: Line name to convert :param xyz: Buffer to hold new line name :type line: str :type xyz: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ xyz.value = gxapi_cy.WrapSTR._xyz_line(GXContext._get_tls_geo(), line.encode(), xyz.value.encode()) @classmethod def make_alpha(cls, str_val): """ Turns all non alpha-numeric characters into an _. :param str_val: String to trim :type str_val: str_ref .. versionadded:: 5.1.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** THE STRING IS MODIFIED. """ str_val.value = gxapi_cy.WrapSTR._make_alpha(GXContext._get_tls_geo(), str_val.value.encode()) @classmethod def printf(cls, dest, mask): """ Variable Argument PrintF function :param dest: Destination string :param mask: Pattern string :type dest: str_ref :type mask: str .. versionadded:: 7.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ dest.value = gxapi_cy.WrapSTR._printf(GXContext._get_tls_geo(), dest.value.encode(), mask.encode()) @classmethod def replace_char(cls, istr, old, new_char): """ Replaces characters in a string. :param istr: String to modify :param old: Character to replace (first character only) :param new_char: Replacement character (first character only) :type istr: str_ref :type old: str :type new_char: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the input replacement character is "", then the string will be truncated at the first character to replace. """ istr.value = gxapi_cy.WrapSTR._replace_char(GXContext._get_tls_geo(), istr.value.encode(), old.encode(), new_char.encode()) @classmethod def replace_char2(cls, istr, old, new_char): """ Replaces characters in a string, supports simple removal. :param istr: String to modify :param old: Character to replace (first character only) :param new_char: Replacement character (first character only) :type istr: str_ref :type old: str :type new_char: str .. versionadded:: 6.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the replacement character is "" (NULL character) then the character to replace is removed from the input string, and the string is shortened. """ istr.value = gxapi_cy.WrapSTR._replace_char2(GXContext._get_tls_geo(), istr.value.encode(), old.encode(), new_char.encode()) @classmethod def replace_multi_char(cls, istr, old, new_char): """ Replaces multiple characters in a string. :param istr: String to modify :param old: Characters to replace :param new_char: Replacement characters :type istr: str_ref :type old: str :type new_char: str .. versionadded:: 5.1.5 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The number of characters to replace must equal the number of replacement characters. """ istr.value = gxapi_cy.WrapSTR._replace_multi_char(GXContext._get_tls_geo(), istr.value.encode(), old.encode(), new_char.encode()) @classmethod def replace_non_ascii(cls, str_val, rpl): """ Replace non-ASCII characters in a string. :param str_val: String to modify :param rpl: Replacement character :type str_val: str_ref :type rpl: str .. versionadded:: 6.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** All characthers > 127 will be replaced by the first character of the replacement string. """ str_val.value = gxapi_cy.WrapSTR._replace_non_ascii(GXContext._get_tls_geo(), str_val.value.encode(), rpl.encode()) @classmethod def set_char(cls, str_val, ascii): """ Set a string's first character using an ASCII value of a character. :param str_val: String :param ascii: ASCII value :type str_val: str_ref :type ascii: int .. versionadded:: 5.1.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ str_val.value = gxapi_cy.WrapSTR._set_char(GXContext._get_tls_geo(), str_val.value.encode(), ascii) @classmethod def trim_quotes(cls, str_val): """ Remove double quotes. :param str_val: String to trim :type str_val: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** THE STRING IS MODIFIED. This method goes through the string and removes all spaces in a string except those enclosed in quotes. It then removes any quotes. It is usfull for trimming unwanted spaces from an input string but allows the user to use quotes as well. If a quote follows a backslash, the quote is retained and the backslash is deleted. These quotes are NOT treated as delimiters. """ str_val.value = gxapi_cy.WrapSTR._trim_quotes(GXContext._get_tls_geo(), str_val.value.encode()) @classmethod def trim_space(cls, str_val, trim): """ Remove leading and/or trailing whitespace. :param str_val: String to trim :param trim: :ref:`STR_TRIM` :type str_val: str_ref :type trim: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** THE STRING IS MODIFIED. Whitespace characters are defined as space, tab, carriage return, new line, vertical tab or formfeed (0x09 to 0x0D, 0x20) """ str_val.value = gxapi_cy.WrapSTR._trim_space(GXContext._get_tls_geo(), str_val.value.encode(), trim) @classmethod def un_quote(cls, str_val): """ Remove double quotes from string :param str_val: String to unquote :type str_val: str_ref .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** THE STRING IS MODIFIED. The pointers will be advanced past a first character quote and a last character quote will be set to .\\0'. Both first and last characters must be quotes for the triming to take place. """ str_val.value = gxapi_cy.WrapSTR._un_quote(GXContext._get_tls_geo(), str_val.value.encode()) # Misc @classmethod def gen_group_name(cls, istr1, istr2, istr3, ostr): """ Generate a group name string from type string, database and channel(optional) strings.. :param istr1: Input type string (static part) :param istr2: Input db string :param istr3: Input ch string (could be 0 length) :param ostr: Output group name string :type istr1: str :type istr2: str :type istr3: str :type ostr: str_ref .. versionadded:: 5.1.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The output group name string is formed in the way of typestr_dbstr_chstr. If the database/channel strings is too long to fit the output string (max total length of 1040, including the NULL ending), then the typestr will always be kept the full length to be the first part, while the dbstr and/or chstr will be shortened to be the second and/or third part of the output string. .. seealso:: GenNewGroupName_MVIEW """ ostr.value = gxapi_cy.WrapSTR._gen_group_name(GXContext._get_tls_geo(), istr1.encode(), istr2.encode(), istr3.encode(), ostr.value.encode()) # Tokenizing @classmethod def count_tokens(cls, str_val, delims): """ Counts number of tokens. :param str_val: String to tokenize :param delims: Delimiter characters :type str_val: str :type delims: str :returns: Number of tokens in the string. :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Delimiters are "soft" in that one or more delimiters is considered a single delimiter, and preceding and trailing delimiters are ignored. DO NOT use this function except in GXC code. The corresponding `get_token <geosoft.gxapi.GXSTR.get_token>` function will not operate correctly in GX.Net code. """ ret_val = gxapi_cy.WrapSTR._count_tokens(GXContext._get_tls_geo(), str_val.encode(), delims.encode()) return ret_val @classmethod def get_token(cls, dest, orig, tok): """ Get a token from a tokenized string. :param dest: Destination string :param orig: Tokenized string :param tok: Token number wanted (0 is the first!) :type dest: str_ref :type orig: str :type tok: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Call `tokens <geosoft.gxapi.GXSTR.tokens>` to prepare the tokenized string. You MUST NOT get tokens beyond number of tokens returned by `tokens <geosoft.gxapi.GXSTR.tokens>` or `tokens2 <geosoft.gxapi.GXSTR.tokens2>`. The first token has index 0. DO NOT use this function except in GXC code. `get_token <geosoft.gxapi.GXSTR.get_token>` function will not operate correctly in GX.Net code. .. seealso:: `tokens <geosoft.gxapi.GXSTR.tokens>`, GetToken_STR """ dest.value = gxapi_cy.WrapSTR._get_token(GXContext._get_tls_geo(), dest.value.encode(), orig.encode(), tok) @classmethod def tokenize(cls, str_val, soft, hard, esc, quote): """ Tokenize a string based on any characters. :param str_val: `GXSTR <geosoft.gxapi.GXSTR>` - String containing token(s) :param soft: szSoft - Soft delimiters (spaces/tabs) :param hard: szHard - Hard delimiters (commas) :param esc: szEsc - Escape delimiters (back-slash) :param quote: szQuote- Quote delimiters (quote characters) :type str_val: str_ref :type soft: str :type hard: str :type esc: str :type quote: str :returns: Number of tokens :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This uses a finite state machine to tokenize on these rules: 1. Any one character following an escape delimiter is treated as a normal character. 2. Any characters inside a quote string are treated as normal characters. 3. Any number of Soft delimiters in sequence without a hard delimiter are treated as one hard delimited. 4. Any number of soft delimiters can preceed or follow a hard delimiter and are ignored. EXAMPLE Soft = [ ] Hard = [,] Escape = [\\] Quote = ["] [this is a , , the "test," of , \\,\\" my delimite fi,] Results in: [this] [is] [a] [] [the] ["test,"] [of] [\\,\\"] [my] [delimite] [fi] [] NOT use this function except in GXC code. The corresponding etToken_STR function will not operate correctly in GX.Net code. .. seealso:: GetToken_STR """ ret_val, str_val.value = gxapi_cy.WrapSTR._tokenize(GXContext._get_tls_geo(), str_val.value.encode(), soft.encode(), hard.encode(), esc.encode(), quote.encode()) return ret_val @classmethod def tokens(cls, str_val, delims): """ Tokenize a string :param str_val: String to tokenize :param delims: Delimiter characters :type str_val: str_ref :type delims: str :returns: Number of tokens, maximum is 2048 :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Delimiters in the string are reduced to a single NULL. Delimiters withing double quoted strings are ignored. Use GetToken_STR to extract tokens. DO NOT use this function except in GXC code. The corresponding `get_token <geosoft.gxapi.GXSTR.get_token>` function will not operate correctly in GX.Net code. .. seealso:: `tokens2 <geosoft.gxapi.GXSTR.tokens2>`, GetToken_STR """ ret_val, str_val.value = gxapi_cy.WrapSTR._tokens(GXContext._get_tls_geo(), str_val.value.encode(), delims.encode()) return ret_val @classmethod def tokens2(cls, str_val, soft, hard, esc, quote): """ General tokenize a string :param str_val: String to tokenize :param soft: szSoft - Soft delimiters (spaces/tabs) :param hard: szHard - Hard delimiters (commas) :param esc: szEsc - Escape delimiters (back-slash) :param quote: szQuote- Quote delimiters (quote characters) :type str_val: str_ref :type soft: str :type hard: str :type esc: str :type quote: str :returns: Number of Tokens :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This function is for old GX compatibility only. See `tokenize <geosoft.gxapi.GXSTR.tokenize>`. DO NOT use this function except in GXC code. The corresponding `get_token <geosoft.gxapi.GXSTR.get_token>` function will not operate correctly in GX.Net code. """ ret_val, str_val.value = gxapi_cy.WrapSTR._tokens2(GXContext._get_tls_geo(), str_val.value.encode(), soft.encode(), hard.encode(), esc.encode(), quote.encode()) return ret_val @classmethod def parse_list(cls, str_val, gvv): """ Parse a tokenized list to get a selection list. :param str_val: String to be parsed :param gvv: Selection Buffer to fill :type str_val: str :type gvv: GXVV .. versionadded:: 5.0.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Given a list such as "1,3,4,6-9,12", it fills the input buffer with 1 if the number is selected, 0 if not. The items are delimited with spaces or commas, and ranges are acceptable, either using a "-" or ":", e.g. 3-6 and 3:6 both mean 3,4,5, and 6. Only values from 0 to one less than the buffer length are used. Out-of-range values are ignored. """ gxapi_cy.WrapSTR._parse_list(GXContext._get_tls_geo(), str_val.encode(), gvv) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXMESH.rst .. _GXMESH: GXMESH class ================================== .. autoclass:: geosoft.gxapi.GXMESH :members: .. _ATTRIBUTE_DATA_TYPE: ATTRIBUTE_DATA_TYPE constants ----------------------------------------------------------------------- Data Type of Attribute .. autodata:: geosoft.gxapi.ATTRIBUTE_DOUBLE :annotation: .. autoattribute:: geosoft.gxapi.ATTRIBUTE_DOUBLE .. autodata:: geosoft.gxapi.ATTRIBUTE_THEMATIC :annotation: .. autoattribute:: geosoft.gxapi.ATTRIBUTE_THEMATIC .. autodata:: geosoft.gxapi.ATTRIBUTE_VECTOR :annotation: .. autoattribute:: geosoft.gxapi.ATTRIBUTE_VECTOR .. _ATTRIBUTE_TYPE: ATTRIBUTE_TYPE constants ----------------------------------------------------------------------- Data Type of Attribute .. autodata:: geosoft.gxapi.ATTRIBUTE_SINGLE :annotation: .. autoattribute:: geosoft.gxapi.ATTRIBUTE_SINGLE .. autodata:: geosoft.gxapi.ATTRIBUTE_SURFACE_SIDES :annotation: .. autoattribute:: geosoft.gxapi.ATTRIBUTE_SURFACE_SIDES .. autodata:: geosoft.gxapi.ATTRIBUTE_VERTICES :annotation: .. autoattribute:: geosoft.gxapi.ATTRIBUTE_VERTICES .. autodata:: geosoft.gxapi.ATTRIBUTE_FACES :annotation: .. autoattribute:: geosoft.gxapi.ATTRIBUTE_FACES <file_sep>/geosoft/gxapi/GXRGRD.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXIMG import GXIMG ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXRGRD(gxapi_cy.WrapRGRD): """ GXRGRD class. The `GXRGRD <geosoft.gxapi.GXRGRD>` object is used as a storage place for the control parameters which the Rangrid (minimum curvature) program needs to execute. The Run_RGRD function executes the Rangrid program using the `GXRGRD <geosoft.gxapi.GXRGRD>` object. """ def __init__(self, handle=0): super(GXRGRD, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXRGRD <geosoft.gxapi.GXRGRD>` :returns: A null `GXRGRD <geosoft.gxapi.GXRGRD>` :rtype: GXRGRD """ return GXRGRD() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def clear(self): """ Clears all the parameters in a `GXRGRD <geosoft.gxapi.GXRGRD>` object .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** DLL name `clear <geosoft.gxapi.GXRGRD.clear>` """ self._clear() @classmethod def create(cls): """ Create a handle to a Rangrid object :returns: `GXRGRD <geosoft.gxapi.GXRGRD>` Object :rtype: GXRGRD .. versionadded:: 5.0 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** The Rangrid object is initially empty. It will store the control file parameters which the Rangrid program needs to execute. Use the LoadParms_RGRD method to get the control file parameters into the `GXRGRD <geosoft.gxapi.GXRGRD>` object. """ ret_val = gxapi_cy.WrapRGRD._create(GXContext._get_tls_geo()) return GXRGRD(ret_val) @classmethod def create_img(cls, vv_x, vv_y, vv_z, ipj, ctl, grid): """ Run Rangrid directly on XYZ `GXVV <geosoft.gxapi.GXVV>` data, output to an `GXIMG <geosoft.gxapi.GXIMG>`. :param vv_x: X data (any numeric `GXVV <geosoft.gxapi.GXVV>` type) :param vv_y: Y data (any numeric `GXVV <geosoft.gxapi.GXVV>` type) :param vv_z: Z (grid value) data (any numeric `GXVV <geosoft.gxapi.GXVV>` type) :param ipj: Projection to apply to the output `GXIMG <geosoft.gxapi.GXIMG>` :param ctl: RANGRID control file. :param grid: Output grid name (optional) :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type ipj: GXIPJ :type ctl: str :type grid: str :returns: `GXIMG <geosoft.gxapi.GXIMG>` object :rtype: GXIMG .. versionadded:: 7.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If the grid file name is defined, the `GXIMG <geosoft.gxapi.GXIMG>` is tied to a new output file. If the grid file name is not defined, the `GXIMG <geosoft.gxapi.GXIMG>` is memory-based; not tied to a file. """ ret_val = gxapi_cy.WrapRGRD._create_img(GXContext._get_tls_geo(), vv_x, vv_y, vv_z, ipj, ctl.encode(), grid.encode()) return GXIMG(ret_val) def default(self, zchan, in_dat): """ Set the defaults. :param zchan: Name of Z Channel to perfrom gridding on :param in_dat: Handle to source `GXDAT <geosoft.gxapi.GXDAT>` object (from database) :type zchan: str :type in_dat: GXDAT :returns: 0 OK, 1 Error. :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._default(zchan.encode(), in_dat) return ret_val def load_parms(self, file): """ Retrieves a Rangrid object's control parameters from a file, or sets the parameters to default if the file doesn't exist. :param file: Name of file to get the parameter settings from :type file: str :returns: 0 OK, 1 Error. :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If the control file name passed into this function is a file which does not exist, then the defaults for a Rangrid control file will be generated and put into the `GXRGRD <geosoft.gxapi.GXRGRD>` object. Otherwise, the control file's settings are retrieved from the file and loaded into the `GXRGRD <geosoft.gxapi.GXRGRD>` object. """ ret_val = self._load_parms(file.encode()) return ret_val def run(self, in_dat, out_dat): """ Executes the Rangrid program, using the input channel and output file parameters. :param in_dat: Handle to source `GXDAT <geosoft.gxapi.GXDAT>` object (from database) :param out_dat: Handle to output grid file `GXDAT <geosoft.gxapi.GXDAT>` :type in_dat: GXDAT :type out_dat: GXDAT :returns: 0 OK, 1 Error. :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = self._run(in_dat, out_dat) return ret_val @classmethod def run2(cls, db, x, y, z, ctl, grd): """ Executes the Rangrid program directly on a database. :param db: Handle to a database :param x: Y Channel :param y: X Channel :param z: Data channel :param ctl: RANGRID control file. :param grd: Output grid name :type db: GXDB :type x: str :type y: str :type z: str :type ctl: str :type grd: str :returns: 0, always. :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ ret_val = gxapi_cy.WrapRGRD._run2(GXContext._get_tls_geo(), db, x.encode(), y.encode(), z.encode(), ctl.encode(), grd.encode()) return ret_val def save_parms(self, name): """ Puts the Rangrid object's control parameters back into its control file. :param name: Name of file to put the parameter settings into :type name: str :returns: 0 OK, 1 Error. :rtype: int .. versionadded:: 6.0.1 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ **Note:** If the control file did not previously exist, it will be created. Otherwise, the old file will be overwritten. """ ret_val = self._save_parms(name.encode()) return ret_val @classmethod def run_vv(cls, vv_x, vv_y, vv_z, ipj, ctl, grd): """ Executes the Rangrid program directly on input data VVs. :param vv_x: X data :param vv_y: Y data :param vv_z: Z (grid value) data :param ipj: Projection to put into grid :param ctl: RANGRID control file. :param grd: Output grid name :type vv_x: GXVV :type vv_y: GXVV :type vv_z: GXVV :type ipj: GXIPJ :type ctl: str :type grd: str .. versionadded:: 6.3 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapRGRD._run_vv(GXContext._get_tls_geo(), vv_x, vv_y, vv_z, ipj, ctl.encode(), grd.encode()) @classmethod def run_list(cls, dbs, zch, ipj, ctl, grd): """ Executes the Rangrid program from a list of databases. :param dbs: List of databases (using | separator) :param zch: Z Channel :param ipj: Projection to put into grid :param ctl: RANGRID control file. :param grd: Output grid name :type dbs: str :type zch: str :type ipj: GXIPJ :type ctl: str :type grd: str .. versionadded:: 9.4 **License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_ """ gxapi_cy.WrapRGRD._run_list(GXContext._get_tls_geo(), dbs.encode(), zch.encode(), ipj, ctl.encode(), grd.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXDSEL.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXDSEL(gxapi_cy.WrapDSEL): """ GXDSEL class. The `GXDSEL <geosoft.gxapi.GXDSEL>` object is used to select subsets of data from the DATA object """ def __init__(self, handle=0): super(GXDSEL, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXDSEL <geosoft.gxapi.GXDSEL>` :returns: A null `GXDSEL <geosoft.gxapi.GXDSEL>` :rtype: GXDSEL """ return GXDSEL() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls): """ Create a Selection object :returns: `GXDSEL <geosoft.gxapi.GXDSEL>` handle, terminates if creation fails :rtype: GXDSEL .. versionadded:: 5.0.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val = gxapi_cy.WrapDSEL._create(GXContext._get_tls_geo()) return GXDSEL(ret_val) def data_significant_figures(self, sf): """ Specify the data significant figures required :param sf: Significant figures (positive, can be fractional) :type sf: float .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** This is the number of significant figures that you require for the data. You can reduce this number to achieve better compression ratios. This should only be used when there is one data type in the data. See sSpatialResolution_DSEL to set the desired spatial resolution. """ self._data_significant_figures(sf) def meta_query(self, query): """ Specify a metadata query string. :param query: Meta query string :type query: str .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._meta_query(query.encode()) def picture_quality(self, quality): """ Specify the quality of pictures being returned. :param quality: Quality :type quality: int .. versionadded:: 5.1.4 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Affected Data Types: PICTURE """ self._picture_quality(quality) def request_all_info(self, request): """ Request that all meta-data info be sent :param request: TRUE to for all data, FALSE - for normal data :type request: int .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._request_all_info(request) def select_area(self, pply): """ Select a complex clipping area :param pply: `GXPLY <geosoft.gxapi.GXPLY>` containing complex area (must contain a projection) :type pply: GXPLY .. versionadded:: 5.1.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The DAP server may not handle clipping and may return more data than requested. """ self._select_area(pply) def select_rect(self, min_x, min_y, max_x, max_y): """ Select a rectangular area. :param min_x: Min X :param min_y: Min Y :param max_x: Max X :param max_y: Max Y :type min_x: float :type min_y: float :type max_x: float :type max_y: float .. versionadded:: 5.0.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._select_rect(min_x, min_y, max_x, max_y) def select_resolution(self, res, force): """ Specify the resolution desired :param res: Minimum Resolution :param force: TRUE to force this resolution, if possible :type res: float :type force: int .. versionadded:: 5.0.3 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Resolution must be specified in the units of the selection `GXIPJ <geosoft.gxapi.GXIPJ>`. This will be the optimum data resoulution. (grid cell for grids, data separation for other data types). You will normally get a reasonable resolution as near to or smaller than this unless sRequireResolution_DSEL has been set. Call sRequireResolution_DSEL with TRUE to force the client to re-sample the data to the resolution requested. """ self._select_resolution(res, force) def select_size(self, width, height): """ Specify the image size desired :param width: Image width in pixels :param height: Image height in pixels :type width: int :type height: int .. versionadded:: 7.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._select_size(width, height) def set_extract_as_document(self, value): """ Specify that we want to extract this file as a document :param value: TRUE (1) if we want as a document :type value: int .. versionadded:: 8.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_extract_as_document(value) def set_ipj(self, ipj, force): """ Set the desired projection :param ipj: `GXIPJ <geosoft.gxapi.GXIPJ>` to set :param force: TRUE to force reprojection, if possible :type ipj: GXIPJ :type force: int .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** If the server supports reprojection, the data will be reprojected at the server. If reprojection is not forced, the data may come in any projection. The spatial resolution and accuracy are accumed to be in the coordinate system defined by this `GXIPJ <geosoft.gxapi.GXIPJ>`. """ self._set_ipj(ipj, force) def spatial_accuracy(self, acc): """ Specify the spatial accuracy required. :param acc: Spatial accuracy desired :type acc: float .. versionadded:: 5.0.8 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** Must be specified in the units of the selection `GXIPJ <geosoft.gxapi.GXIPJ>`. The spatial accuracy is used improve compression performance for the spatial component of the data returned. You can reduce this number to achieve better compression ratios. This should only be used when there is one data type in the data. """ self._spatial_accuracy(acc) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/geosoft/gxapi/GXGER.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXGER(gxapi_cy.WrapGER): """ GXGER class. Allows access to a Geosoft format error message file. This class does not in itself produce an error message, but retrieves a selected message from the file, and allows the setting of replacement parameters within the message. It is up to the user to display or use the message. **Note:** `GXGER <geosoft.gxapi.GXGER>` message files contain numbered messages that can be used within GXs. Following is an example from the file GEOSOFT.`GXGER <geosoft.gxapi.GXGER>`: #20008 ! Invalid password. The product installation has failed. #20009 ! Unable to find INI file: %1 ! See the documentation for details A '#' character in column 1 indicates a message number. The message follows on lines that begin with a '!' character. Strings in the message may be replaced at run time with values using the `set_string <geosoft.gxapi.GXGER.set_string>`, `set_int <geosoft.gxapi.GXGER.set_int>` and `set_double <geosoft.gxapi.GXGER.set_double>` methods. The iGet_GER will return the message with strings replaced by their settings. By convention, we recommend that you use "%1", "%2", etc. as replacement strings. """ def __init__(self, handle=0): super(GXGER, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXGER <geosoft.gxapi.GXGER>` :returns: A null `GXGER <geosoft.gxapi.GXGER>` :rtype: GXGER """ return GXGER() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous @classmethod def create(cls, file): """ Opens an ASCII error file to read from. :param file: `GXGER <geosoft.gxapi.GXGER>` file name. :type file: str :returns: `GXGER <geosoft.gxapi.GXGER>` Object :rtype: GXGER .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Note:** The `GXGER <geosoft.gxapi.GXGER>` file may be in the local directory or the GEOSOFT directory. """ ret_val = gxapi_cy.WrapGER._create(GXContext._get_tls_geo(), file.encode()) return GXGER(ret_val) def get(self, num, message): """ Get a message string. :param num: Message number :param message: Message string returned, replacements filtered :type num: int :type message: str_ref :returns: 0 if message found 1 if no message, passed message remains unchanged :rtype: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ ret_val, message.value = self._get(num, message.value.encode()) return ret_val def set_int(self, parm, set): """ Set a replacement string value to an int. :param parm: Replacement string (ie. "%1") :param set: Setting :type parm: str :type set: int .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_int(parm.encode(), set) def set_double(self, parm, set): """ Set a replacement string value to a real. :param parm: Replacement string (ie. "%1") :param set: Setting :type parm: str :type set: float .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_double(parm.encode(), set) def set_string(self, parm, set): """ Set a replacement string value. :param parm: Replacement string (ie. "%1") :param set: Setting :type parm: str :type set: str .. versionadded:: 5.0 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ """ self._set_string(parm.encode(), set.encode()) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/examples/tutorial/Geosoft Project/grid_vertical_derivative.py import geosoft.gxpy.project as gxpj import geosoft.gxpy.grid as gxgrd import geosoft.gxapi as gxapi def rungx(): project = gxpj.Geosoft_project() # there must be grids in the project if len(project.project_grids) == 0: raise Exception('This project contains no grids.') # default grid will be the current grid, or the first grid in the list of project grids if project.current_grid: default_grid = project.current_grid else: default_grid = project.project_grids[0] # ask the user to select a grid grid_name = gxpj.get_user_input(title='Vertical derivative of a grid', prompt='Grid to process', kind='list', items=project.project_grids, default=default_grid) # ask for a new grid file name new_grid = gxpj.get_user_input(title='Vertical derivative of a grid', prompt='Output vertical derivative grid name', kind='file', filemask='*.grd') # calculate vertical derivative with gxgrd.Grid(grid_name) as g_input: with gxgrd.Grid.new(new_grid, properties=g_input.properties(), overwrite=True) as g_output: gxapi.GXIMU.grid_vd(g_input.gximg, g_output.gximg) # open the vertical derivative grid gxpj.add_document(new_grid) <file_sep>/geosoft/gxapi/GXDCOL.py ### extends 'class_empty.py' ### block ClassImports # NOTICE: Do not edit anything here, it is generated code from . import gxapi_cy from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref from .GXST import GXST from .GXVV import GXVV ### endblock ClassImports ### block Header # NOTICE: The code generator will not replace the code in this block ### endblock Header ### block ClassImplementation # NOTICE: Do not edit anything here, it is generated code class GXDCOL(gxapi_cy.WrapDCOL): """ GXDCOL class. Object to interface with 2D map and 3D view objects that supports colour tool editing. """ def __init__(self, handle=0): super(GXDCOL, self).__init__(GXContext._get_tls_geo(), handle) @classmethod def null(cls): """ A null (undefined) instance of `GXDCOL <geosoft.gxapi.GXDCOL>` :returns: A null `GXDCOL <geosoft.gxapi.GXDCOL>` :rtype: GXDCOL """ return GXDCOL() def is_null(self): """ Check if this is a null (undefined) instance :returns: True if this is a null (undefined) instance, False otherwise. :rtype: bool """ return self._internal_handle() == 0 # Miscellaneous def number_of_layers(self): """ Get the number of layers. :returns: The number of layers (often just one). :rtype: int .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._number_of_layers() return ret_val def get_type(self): """ Get a layer's type :returns: :ref:`DCOL_TYPE` :rtype: int .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_type() return ret_val def get_layer_info(self, index, itr, layer_name): """ Get a layer's information :param index: Index of layer :param itr: `GXITR <geosoft.gxapi.GXITR>` Handle :param layer_name: Name returned :type index: int :type itr: GXITR :type layer_name: str_ref .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ layer_name.value = self._get_layer_info(index, itr, layer_name.value.encode()) def get_layer_itr(self, index, itr): """ Get a layer's ITR :param index: Index of layer :param itr: `GXITR <geosoft.gxapi.GXITR>` Handle :type index: int :type itr: GXITR .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._get_layer_itr(index, itr) def set_layer_itr(self, index, itr, redrawMap): """ Set a layer's ITR :param index: Index of layer :param itr: `GXITR <geosoft.gxapi.GXITR>` Handle :param redrawMap: Force redraw of map (0: No, 1: Yes)? :type index: int :type itr: GXITR :type redrawMap: int .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_layer_itr(index, itr, redrawMap) def set_itr_transform_from_layer(self, index, itr, transformType): """ Set the input ITR transform to the provided type, based on the statistics of the chosen layer. :param index: Index of layer :param itr: `GXITR <geosoft.gxapi.GXITR>` Handle :param transformType: :ref:`ITR_ZONE_MODEL` :type index: int :type itr: GXITR :type transformType: int :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the input tranform type is ITR_ZONE_MODEL_NOZONE or ITR_ZONE_MODEL_NONE then the user-preference default will be used. NOTE: This alters the input ITR, not the selected layer's own ITR. The layer is accessed purely to get the current statistics. """ ret_val = self._set_itr_transform_from_layer(index, itr, transformType) return ret_val def update_zone_transform_type(self, index, transformType): """ Recalculate the layer's ITR to the provided type, based on the statistics of the chosen layer. :param index: Index of layer :param transformType: :ref:`ITR_ZONE_MODEL` :type index: int :type transformType: int :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** If the input tranform type is ITR_ZONE_MODEL_NOZONE, ITR_ZONE_MODEL_NONE then nothing will happen and the function will return 1. The dialogs to enter parameters are shown for Linear, Log, Normal and Equal. """ ret_val = self._update_zone_transform_type(index, transformType) return ret_val def update_zone_transform_parameters(self, index): """ Recalculate the layer's ITR, based on the current type of the `GXDCOL <geosoft.gxapi.GXDCOL>`. Launches anappropriate zone transform type parameter GUI based on the current selection. :param index: Index of layer :type index: int :returns: 0 - Ok 1 - Cancel :rtype: int .. versionadded:: 2022.1 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** The transform type assumed is the current transform type for the `GXDCOL <geosoft.gxapi.GXDCOL>`. The dialogs to enter parameters are shown for Linear, Log, Normal and Equal. """ ret_val = self._update_zone_transform_parameters(index) return ret_val def get_layer_statistics(self, index): """ Get a `GXST <geosoft.gxapi.GXST>` filled with layer statistics :param index: Index of layer :type index: int :returns: `GXST <geosoft.gxapi.GXST>` object :rtype: GXST .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_layer_statistics(index) return GXST(ret_val) def get_layer_histogram(self, index, incr, min): """ Get a `GXVV <geosoft.gxapi.GXVV>` filled with histogram bin counts for each zone of the ITR :param index: Index of layer :param incr: width of bin increment :param min: Min (value at start of 2nd bin) :type index: int :type incr: float_ref :type min: float_ref :returns: `GXVV <geosoft.gxapi.GXVV>` object :rtype: GXVV .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val, incr.value, min.value = self._get_layer_histogram(index, incr.value, min.value) return GXVV(ret_val) def save_layer_itr(self, index): """ Save the layer's ITR to a file. A dialog prompts for the file name. :param index: Index of layer :type index: int .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._save_layer_itr(index) def get_brightness_type(self): """ Is brightness set separately by layer and by object or just by object? :returns: BRIGHTNESS_ALL - Set brightness for the object as a whole only BRIGHTNESS_ALL_AND_LAYERS - Set brightness either for the object as a whole or by layer :rtype: int .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. **Note:** For some objects (like AGG) brightness can be set for each layer, or for the object as a whole, while in others (like CSYMB) it can be set only for the object as a whole. """ ret_val = self._get_brightness_type() return ret_val def set_brightness(self, brightness, layerSelection, layer): """ Set the brightness of a single layer, or all the layers :param brightness: Brightness value (-1.0 (black) <= brightness <= 1.0 (white)) :param layerSelection: :ref:`BRIGHT` :param layer: layer index (required for BRIGHT_LAYER :type brightness: float :type layerSelection: int :type layer: int .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_brightness(brightness, layerSelection, layer) def get_brightness(self, layerSelection, layer): """ Get the brightness of a single layer, or all the layers :param layerSelection: :ref:`BRIGHT` :param layer: layer index (required for BRIGHT_LAYER :type layerSelection: int :type layer: int :rtype: float .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_brightness(layerSelection, layer) return ret_val def set_transparency(self, transparency): """ Set the transparency. This is set for the entire map group. :param transparency: Transparency value (1.0 - Opaque, 0.0 - Transparent) :type transparency: float .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._set_transparency(transparency) def get_transparency(self): """ Get the transparency. This is returned for the entire map group. :rtype: float .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ ret_val = self._get_transparency() return ret_val def reset(self): """ Reset the AGG back to its initial state. Same as cancelling out of the colour tool and restarting; all layers are reset. .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._reset() def end(self, apply_changes): """ TODO :param apply_changes: Apply changes to map. :type apply_changes: bool .. versionadded:: 2021.2 **License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_ **Limitations:** May not be available while executing a command line program. """ self._end(apply_changes) ### endblock ClassImplementation ### block ClassExtend # NOTICE: The code generator will not replace the code in this block ### endblock ClassExtend ### block Footer # NOTICE: The code generator will not replace the code in this block ### endblock Footer<file_sep>/docs/GXPGU.rst .. _GXPGU: GXPGU class ================================== .. autoclass:: geosoft.gxapi.GXPGU :members: .. _BLAKEY_TEST: BLAKEY_TEST constants ----------------------------------------------------------------------- Types of BLAKEY tests .. autodata:: geosoft.gxapi.BLAKEY_TEST_ONESIDE :annotation: .. autoattribute:: geosoft.gxapi.BLAKEY_TEST_ONESIDE .. autodata:: geosoft.gxapi.BLAKEY_TEST_TWOSIDE :annotation: .. autoattribute:: geosoft.gxapi.BLAKEY_TEST_TWOSIDE .. autodata:: geosoft.gxapi.BLAKEY_TEST_THREESIDE :annotation: .. autoattribute:: geosoft.gxapi.BLAKEY_TEST_THREESIDE .. autodata:: geosoft.gxapi.BLAKEY_TEST_FOURSIDE :annotation: .. autoattribute:: geosoft.gxapi.BLAKEY_TEST_FOURSIDE .. _PGU_CORR: PGU_CORR constants ----------------------------------------------------------------------- Correlation (must be synchronized with :ref:`ST2_CORRELATION`) .. autodata:: geosoft.gxapi.PGU_CORR_SIMPLE :annotation: .. autoattribute:: geosoft.gxapi.PGU_CORR_SIMPLE .. autodata:: geosoft.gxapi.PGU_CORR_PEARSON :annotation: .. autoattribute:: geosoft.gxapi.PGU_CORR_PEARSON .. _PGU_DIRECTGRID: PGU_DIRECTGRID constants ----------------------------------------------------------------------- Type of statistic to use on the data points in each cell. .. autodata:: geosoft.gxapi.PGU_DIRECTGRID_MINIMUM :annotation: .. autoattribute:: geosoft.gxapi.PGU_DIRECTGRID_MINIMUM .. autodata:: geosoft.gxapi.PGU_DIRECTGRID_MAXIMUM :annotation: .. autoattribute:: geosoft.gxapi.PGU_DIRECTGRID_MAXIMUM .. autodata:: geosoft.gxapi.PGU_DIRECTGRID_MEAN :annotation: .. autoattribute:: geosoft.gxapi.PGU_DIRECTGRID_MEAN .. autodata:: geosoft.gxapi.PGU_DIRECTGRID_ITEMS :annotation: .. autoattribute:: geosoft.gxapi.PGU_DIRECTGRID_ITEMS .. _PGU_DIRECTION: PGU_DIRECTION constants ----------------------------------------------------------------------- Direction .. autodata:: geosoft.gxapi.PGU_FORWARD :annotation: .. autoattribute:: geosoft.gxapi.PGU_FORWARD .. autodata:: geosoft.gxapi.PGU_BACKWARD :annotation: .. autoattribute:: geosoft.gxapi.PGU_BACKWARD .. _PGU_TRANS: PGU_TRANS constants ----------------------------------------------------------------------- Transform methods for the columns .. autodata:: geosoft.gxapi.PGU_TRANS_NONE :annotation: .. autoattribute:: geosoft.gxapi.PGU_TRANS_NONE .. autodata:: geosoft.gxapi.PGU_TRANS_LOG :annotation: .. autoattribute:: geosoft.gxapi.PGU_TRANS_LOG .. _PGU_INTERP_ORDER: PGU_INTERP_ORDER constants ----------------------------------------------------------------------- Interpolation direction order .. autodata:: geosoft.gxapi.PGU_INTERP_ORDER_XYZ :annotation: .. autoattribute:: geosoft.gxapi.PGU_INTERP_ORDER_XYZ .. autodata:: geosoft.gxapi.PGU_INTERP_ORDER_XZY :annotation: .. autoattribute:: geosoft.gxapi.PGU_INTERP_ORDER_XZY .. autodata:: geosoft.gxapi.PGU_INTERP_ORDER_YXZ :annotation: .. autoattribute:: geosoft.gxapi.PGU_INTERP_ORDER_YXZ .. autodata:: geosoft.gxapi.PGU_INTERP_ORDER_YZX :annotation: .. autoattribute:: geosoft.gxapi.PGU_INTERP_ORDER_YZX .. autodata:: geosoft.gxapi.PGU_INTERP_ORDER_ZXY :annotation: .. autoattribute:: geosoft.gxapi.PGU_INTERP_ORDER_ZXY .. autodata:: geosoft.gxapi.PGU_INTERP_ORDER_ZYX :annotation: .. autoattribute:: geosoft.gxapi.PGU_INTERP_ORDER_ZYX <file_sep>/geosoft/gxpy/tests/test_vox.py import unittest import os import numpy as np import geosoft import geosoft.gxpy.gx as gx import geosoft.gxpy.system as gsys import geosoft.gxpy.coordinate_system as gxcs import geosoft.gxpy.vox as gxvox import geosoft.gxpy.gdb as gxgdb from base import GXPYTest class Test(GXPYTest): @classmethod def setUpClass(cls): cls.setUpGXPYTest() cls.folder, files = gsys.unzip(os.path.join(os.path.dirname(cls._test_case_py), 'testvoxset.zip'), folder=cls._gx.temp_folder()) cls.vox_file = os.path.join(cls.folder, 'test.geosoft_voxel') cls.vectorvox_file = os.path.join(cls.folder, 'mvi.geosoft_vectorvoxel') @classmethod def tearDownClass(cls): cls.tearDownGXPYTest() pass #gxvox.delete_files(cls.vox_file) def test_voxProperties(self): self.start() with gxvox.Vox.open(self.vox_file) as vox: self.assertEqual(vox.name, 'test') self.assertEqual(len(vox.locations_x), vox.nx) self.assertEqual(len(vox.locations_y), vox.ny) self.assertEqual(len(vox.locations_z), vox.nz) self.assertEqual(vox.xyz(0, 0, 0), (vox.origin_x, vox.origin_y, vox.origin_z)) self.assertEqual(vox.extent_xyz, (438550.0, 6126150.0, -1022.3323879241943, 441500.0, 6129500.0, 575.0)) self.assertEqual(vox.extent_xy, (438550.0, 6126150.0, 441500.0, 6129500.0)) self.assertEqual((vox.nx, vox.ny, vox.nz), (59, 67, 26)) self.assertEqual(str(vox.coordinate_system), 'NAD83 / UTM zone 20N') vox.is_depth = True self.assertFalse(vox.is_elevation) vox.is_elevation = True self.assertFalse(vox.is_depth) vox.is_depth = True self.assertEqual(vox.xyz(0, 0, 0), (vox.origin_x, vox.origin_y, vox.origin_z)) self.assertEqual(vox.extent_xyz, (438550.0, 6126150.0, -575.0, 441500.0, 6129500.0, 1022.3323879241943)) self.assertEqual(vox.extent_xy, (438550.0, 6126150.0, 441500.0, 6129500.0)) self.assertEqual((vox.nx, vox.ny, vox.nz), (59, 67, 26)) def test_iter(self): self.start() with gxvox.Vox.open(self.vox_file) as vox: valid = 0 dummy = 0 sum = 0. for x, y, z, v in vox: if v is not None: valid += 1 sum += v else: dummy += 1 self.assertAlmostEqual(sum, 45.9709323711) self.assertEqual(valid + dummy, vox.nx * vox.ny * vox.nz) self.assertEqual(vox[50, 65, 18], (441075.0, 6129425.0, 370.34108924865723, 0.00019816514181249432)) self.assertEqual(vox[0, 0, 0], (vox.origin_x, vox.origin_y, vox.origin_z, None)) with gxvox.Vox.open(self.vox_file, dtype=np.int) as vox: valid = 0 dummy = 0 sum = 0. for x, y, z, v in vox: if v is not None: valid += 1 sum += v else: dummy += 1 self.assertAlmostEqual(sum, 0) self.assertEqual(valid + dummy, vox.nx * vox.ny * vox.nz) self.assertEqual(vox[50, 65, 18], (441075.0, 6129425.0, 370.34108924865723, 0.0)) self.assertEqual(vox[0, 0, 0], (vox.origin_x, vox.origin_y, vox.origin_z, None)) data = vox.np() self.assertEqual(data.dtype, np.int) data = vox.np(dtype=np.float64) self.assertEqual(data.dtype, np.float64) data = gxvox.Vox.open(self.vox_file, dtype=np.int).np(dtype=np.float64) self.assertEqual(data.dtype, np.float64) data = gxvox.Vox.open(self.vox_file, dtype=np.float64).np(dtype=np.int) self.assertEqual(data.dtype, np.int) def test_value(self): self.start() with gxvox.Vox.open(self.vox_file) as vox: self.assertEqual(vox.value_at_location(vox.xyz(50, 65, 18)), 0.00019816514181249432) self.assertEqual(vox.value_at_location((441075.0, 6129425.0, 370.34108924865723)), 0.00019816514181249432) self.assertEqual(vox.value_at_location((441076, 6129426, 370), interpolate=gxvox.INTERP_NEAREST), 0.00019816514181249432) self.assertEqual(vox.value_at_location((441076, 6129426, 370)), 0.0002534898842353971) self.assertEqual(vox.value_at_location((441100, 6129400, 225.895), interpolate=gxvox.INTERP_SMOOTH), 0.003535265803154243) self.assertEqual(vox.value_at_location((0, 0, 0)), None) self.assertEqual(vox.value_at_location((-1.0e25, 0, 1e25)), None) def test_np(self): self.start() with gxvox.Vox.open(self.vox_file) as vox: npv = vox.np() self.assertEqual(npv.shape, (vox.nz, vox.ny, vox.nx)) sum = npv[np.isfinite(npv)].sum() self.assertAlmostEqual(sum, 45.9709323711) size = (5, 8, 14) with gxvox.Vox.open(self.vox_file) as vox: npv = vox.np(subset=((30, 50, 9), size)) self.assertEqual(npv.shape, (size[2], size[1], size[0])) sum = npv[np.isfinite(npv)].sum() self.assertAlmostEqual(sum, 0.56577674920814858) with gxvox.Vox.open(self.vox_file) as vox: npv = vox.np(subset=(None, (2, 3, 4))) self.assertEqual(npv.shape, (4, 3, 2)) sum = npv[np.isfinite(npv)].sum() self.assertAlmostEqual(sum, 0.0) with gxvox.Vox.open(self.vox_file) as vox: npv = vox.np(subset=((None, None, -3), (None, None, 1))) self.assertEqual(npv.shape, (1, vox.ny, vox.nx)) sum = npv[np.isfinite(npv)].sum() self.assertAlmostEqual(sum, 1.00971206805) with gxvox.Vox.open(self.vox_file) as vox: npv = vox.np(subset=((None, None, -1), (None, None, 1))) self.assertEqual(npv.shape, (1, vox.ny, vox.nx)) sum = npv[np.isfinite(npv)].sum() self.assertAlmostEqual(sum, 0.01212498417) def test_metadata(self): self.start() with gxvox.Vox.open(self.vox_file, mode=gxvox.MODE_READWRITE) as vox: m = vox.metadata gm = m['geosoft'] self.assertTrue('dataset' in gm) self.assertTrue('georeference' in gm['dataset']) newstuff = {'maki': {'a': 1, 'b': (4, 5, 6), 'units': 'nT'}} vox.metadata = newstuff vox.unit_of_measure = 'billy_bob' with gxvox.Vox.open(self.vox_file) as vox: m = vox.metadata maki = m['maki'] self.assertEqual(maki['b'], ['4', '5', '6']) self.assertEqual(maki['units'], 'nT') self.assertEqual(vox.unit_of_measure, 'billy_bob') def test_new(self): self.start() npd = np.zeros((12, 50, 35), dtype=np.float64) npd[:] = np.nan with gxvox.Vox.new("test_new", npd, temp=True) as vox: self.assertEqual((vox.nx, vox.ny, vox.nz), (35, 50, 12)) npv = vox.np() self.assertEqual(np.sum(npv[np.isfinite(npv)]), 0) self.assertEqual(list(vox.locations_x[0:2]), [0., 1.]) self.assertEqual(list(vox.locations_y[0:2]), [0., 1.]) self.assertEqual(list(vox.locations_z[0:2]), [0., 1.]) npd[:] = 1 with gxvox.Vox.new("test_new", npd, origin=(1, 2, 3), cell_size=(0.1, 0.2, 10), temp=True) as vox: self.assertEqual((vox.nx, vox.ny, vox.nz), (35, 50, 12)) npv = vox.np() self.assertEqual(np.sum(npv[np.isfinite(npv)]), vox.nx * vox.ny * vox.nz) self.assertEqual(list(vox.locations_x[0:2]), [1., 1.1]) self.assertEqual(list(vox.locations_y[0:2]), [2., 2.2]) self.assertEqual(list(vox.locations_z[0:2]), [3., 13.]) cx = (1, 2, 3, 2, 1) cy = (10, 10, 10) cz = (5, 4, 3, 2) npd = np.ones((len(cz), len(cy), len(cx))) with gxvox.Vox.new("test_new", npd, origin=(1, 2, 3), cell_size=(cx, cy, cz), temp=True) as vox: self.assertEqual((vox.nx, vox.ny, vox.nz), (5, 3, 4)) npv = vox.np() self.assertEqual(np.sum(npv[np.isfinite(npv)]), vox.nx * vox.ny * vox.nz) self.assertEqual(list(vox.locations_x), [1.0, 2.5, 5.0, 7.5, 9.0]) self.assertEqual(list(vox.locations_y), [2.0, 12.0, 22.0]) self.assertEqual(list(vox.locations_z), [3.0, 7.5, 11.0, 13.5]) with gxvox.Vox.new("test_new", npd, origin=(0.5, 5.0, 2.5), cell_size=(cx, cy, cz), temp=True, depth=True) as vox: self.assertEqual((vox.nx, vox.ny, vox.nz), (5, 3, 4)) npv = vox.np() self.assertEqual(np.sum(npv[np.isfinite(npv)]), vox.nx * vox.ny * vox.nz) self.assertEqual(list(vox.locations_x), [0.5, 2.0, 4.5, 7.0, 8.5]) self.assertEqual(list(vox.locations_y), [5.0, 15.0, 25.0]) self.assertEqual(list(vox.locations_z), [2.5, 7.0, 10.5, 13.0]) self.assertRaises(gxvox.VoxException, gxvox.Vox.new, "test", np.zeros((2,3,15)), cell_size=(cx, cy, cz)) def test_new_data(self): self.start() with gxvox.Vox.open(self.vox_file) as vox: npv = vox.np() test_edge = list(vox.np(subset=((vox.nx - 1, 6, vox.nz - 8), (1, 10, 1))).flatten()) with gxvox.Vox.new("test_data", npv, temp=True, origin=(vox.origin_x, vox.origin_y, vox.origin_z), cell_size=(vox.cells_x, vox.cells_y, vox.cells_z), coordinate_system=vox.coordinate_system) as vox_copy: npv = vox_copy.np() self.assertEqual(npv.shape, (vox.nz, vox.ny, vox.nx)) sum = npv[np.isfinite(npv)].sum() self.assertAlmostEqual(sum, 45.9709323711) test_copy = list(vox_copy.np(subset=((vox_copy.nx - 1, 6, vox_copy.nz - 8), (1, 10, 1))).flatten()) self.assertEqual(test_edge, test_copy) ez = vox_copy[57, 62, 8][3] vox_copy.is_depth = True self.assertEqual(vox_copy[57, 62, vox_copy.nz - 9][3], ez) test_copy = list(vox_copy.np(subset=((vox_copy.nx - 1, 6, 7), (1, 10, 1))).flatten()) self.assertEqual(test_edge, test_copy) def test_vectorvoxel(self): self.start() with gxvox.Vox.open(self.vectorvox_file) as vox: npv = vox.np(dtype=np.float64) self.assertEqual(npv.shape, (38, 56, 55, 3)) self.assertEqual(tuple(npv[25, 25, 25]), (-0.16268515586853027, -0.02528655156493187, 1.6525727510452271)) self.assertAlmostEqual(tuple(vox[25, 25, 25][3])[0], -0.16268516) self.assertAlmostEqual(tuple(vox[25, 25, 25][3])[1], -0.025286552) self.assertAlmostEqual(tuple(vox[25, 25, 25][3])[2], 1.6525728) npv[25, 25, 25] = (1., 2., np.nan) with gxvox.Vox.new('vox_', npv, temp=True, overwrite=True) as newvox: self.assertAlmostEqual(tuple(newvox[25, 25, 25][3])[0], 1.) self.assertAlmostEqual(tuple(newvox[25, 25, 25][3])[1], 2.) self.assertAlmostEqual(tuple(newvox[25, 25, 25][3])[2], None) with gxvox.Vox.copy_vox('vox_', vox, npv, temp=True, overwrite=True) as newvox: self.assertAlmostEqual(tuple(newvox[25, 25, 25][3])[0], 1.) self.assertAlmostEqual(tuple(newvox[25, 25, 25][3])[1], 2.) self.assertAlmostEqual(tuple(newvox[25, 25, 25][3])[2], None) def test_uom(self): self.start() uom = gxvox.Vox.open(self.vectorvox_file).unit_of_measure with gxvox.Vox.open(self.vectorvox_file) as g: g.unit_of_measure = 'maki' self.assertEqual(g.unit_of_measure, 'maki') self.assertEqual(gxvox.Vox.open(self.vectorvox_file).unit_of_measure, uom) with gxvox.Vox.open(self.vectorvox_file, mode=gxvox.MODE_READWRITE) as g: g.unit_of_measure = 'maki' self.assertEqual(g.unit_of_measure, 'maki') self.assertEqual(gxvox.Vox.open(self.vectorvox_file).unit_of_measure, 'maki') @unittest.skip('WIP') def test_rbf(self): self.start() def feed_data(n): if n >= len(nxyzv): return None return nxyzv[n] def gdb_from_callback(callback): _gdb = gxgdb.Geosoft_gdb.new() channels = ('x', 'y', 'z', 'v') il = 0 xyzv_kist = callback(il) while xyzv_kist is not None: _gdb.write_line('L{}'.format(il), xyzv_kist, channels=channels) il += 1 xyzv_kist = callback(il) _gdb.xyz_channels = channels[:3] return _gdb xyzv = [(45., 10., 0., 100), (60., 25., 0., 77.), (50., 8., 5., 80.), (55., 18., 12., 90.)] with gxvox.Vox.rbf(xyzv, cs=1.) as vox: self.assertEqual((vox.nx, vox.ny), (9, 9)) self.assertAlmostEqual(vox.statistics()['sd'], 8.708599, 5) # a callback, used for very large data, or to feed data efficiently from some other source. nxyzv = np.array([[(45., 10., 0., 100), (60., 25., 10., 77.), (50., 8., 10., 81.), (55., 11., 25., 66.)], [(20., 15., 5., 108), (25., 5., 12., 77.), (33., 9., 10., np.nan), (28., 2., 20., 22.)], [(35., 18., 8., 110), (40., 31., 18., 77.), (13., 4., 10., 83.), (44., 4., 18., 7.)]]) with gxvox.Vox.rbf(feed_data, cs=0.25) as vox: self.assertEqual((vox.nx, vox.ny), (189, 117)) self.assertAlmostEqual(vox.statistics()['sd'], 22.320659139902336, 5) with gdb_from_callback(feed_data) as gdb: with gxvox.Vox.rbf((gdb, 'v'), cs=0.25) as vox: self.assertEqual((vox.nx, vox.ny), (189, 117)) self.assertAlmostEqual(vox.statistics()['sd'], 22.320659139902336, 5) ############################################################################################### if __name__ == '__main__': unittest.main()
9abee058b0b2b3cb1a50afa462cbdf95c6eab7a5
[ "Markdown", "Python", "Text", "reStructuredText" ]
393
Python
GeosoftInc/gxpy
d762149f470ef49c6c18ddb2797bcd825398a03c
23fb2b998103aeff2ce68ea8fc62163c55847a45
refs/heads/master
<file_sep>import React, { Component } from "react"; import logo from "./logo.svg"; import "./App.css"; import NavigationBar from "./components/NavigationBar"; import { BrowserRouter as Router, Route } from "react-router-dom"; import Landing from "./components/Landing"; import Intro from "./components/Intro"; import Skills from "./components/Skills"; import Contact from "./components/Contact"; class App extends Component { constructor(props) { super(props); this.state = {}; this.connecToServer = this.connecToServer.bind(this); } connecToServer() { fetch("/"); } componentDidMount() { this.connecToServer(); } render() { return ( <Router> <div className="App"> <NavigationBar /> <Route exact path="/" component={Landing} /> <div className="container"></div> {/* <Route exact path="/skills" component={Skills} /> <Route exact path="/contact" component={Contact} /> <Route exact path="/intro" component={Intro} /> */} </div> </Router> ); } } export default App; <file_sep># Live @ www.johnpwajok.com # <NAME> - Personal web Site Personal website to show my projects and skills. Built with React and Express.js. <file_sep>import React, { Component } from "react"; import styles from "./styles/styles.css"; import FadeIn from "react-fade-in"; import data from "../images/data.jpg"; import artificialIntelligence from "../images/artificialIntelligence.jpg"; import database from "../images/database.jpg"; export default class Skills extends Component { render() { return ( <FadeIn> <div class="skills"> <FadeIn> <h2>My Skills</h2> <div class="skill-row"> <img class="web-design" src={data} alt="web-design" /> <h3>Web&nbsp;Development and Design</h3> <p> I studied web development both during my time in university as well as for my own personal projects, and have gained experience working with:{" "} </p> <ul className="skillList"> <li className="skillItem"> {" "} <i class="fab fa-html5"></i> HTML </li> <li className="skillItem"> <i class="fab fa-css3-alt"></i> CSS </li> <li className="skillItem"> <i class="fab fa-js-square"></i> JavaScript </li> <li className="skillItem"> <i class="fab fa-bootstrap"></i> Bootstrap </li> </ul> <ul className="skillList"> <li className="skillItem"> <i class="fab fa-react"></i> React </li> <li className="skillItem"> <i class="fab fa-node"></i> Node </li> <li className="skillItem"> <i class="fab fa-node-js"></i> Express </li> <li className="skillItem"> <i class="fas fa-pepper-hot"></i> Flask </li> </ul> <ul className="skillList"> <li className="skillItem"> <i class="fas fa-database"></i> MongoDB </li> <li className="skillItem"> <i class="fas fa-database"></i> PostgreSQL </li> <li className="skillItem"> <i class="fab fa-aws"></i> AWS RDS </li> <li className="skillItem"> <i class="fas fa-rocket"></i> Postman </li> </ul> </div> <div class="skill-row"> <br></br> <img class="machine-learning " src={artificialIntelligence} alt="machine-learning" /> <h3>Desktop Applications & Systems&nbsp;Software</h3> <p> I have gained experience working with various languages to create desktop applications and to handle backend processing for web applications including: </p> <ul className="skillList"> <li className="skillItem"> {" "} <i class="fab fa-python"></i> Python </li> <li className="skillItem"> <i class="fab fa-cuttlefish"></i> C </li> </ul> <ul className="skillList"> <li className="skillItem"> <i class="fab fa-java"></i> Java (+ Exposure to Android) </li> <li className="skillItem"> <i class="fab fa-r-project"></i> R (Data Visualisation) </li> </ul> </div> <div class="skill-row"> <img class="web-design " src={database} alt="web-design" /> <h3>Infrastructure</h3> <p> From my time in university and through self-learning, I have been able to gain a solid foundation working in multiple environments and with technologies including: </p> <ul className="skillList"> <li className="skillItem"> <i class="fab fa-windows"></i> Windows </li> <li className="skillItem"> <i class="fab fa-linux"></i> Linux </li> <li className="skillItem"> <i class="fab fa-apple"></i> MacOS </li> </ul> <ul className="skillList"> <li className="skillItem"> <i class="fab fa-android"></i> Android </li> <li className="skillItem"> <i class="fas fa-server"></i> Networking technologies </li> <li className="skillItem"> <i class="fas fa-database"></i> databases (MySQL, Oracle Database &amp; PostgreSQL) </li> </ul> </div> </FadeIn> </div> </FadeIn> ); } } <file_sep>import React, { Component } from "react"; import FadeIn from "react-fade-in"; import styles from "./styles/styles.css"; import gitOverviews from "../images/gitOverviews.png"; import chefPalace from "../images/chefPalace.png"; import IKAP from "../images/IKAP.png"; import fyjal from "../images/fyjal.png"; import stickyNotes from "../images/sticky-notes.png"; export default class Projects extends Component { render() { return ( <FadeIn> <div class="projects"> <h2>Featured Projects</h2> <div class="portfolioContainer"> <div class="row row-eq-height"> <div class="col-lg-6 col-md-6 col-sm-12 colItem"> <div class="jumbotron"> <h2 className="projectTitle">Chef Palace</h2> <img src={chefPalace} alt="ChefPalace" class="img-responsive" ></img> <br></br> <div className="projectInformation"> <p> Chef Palace is currently under development and is being built to be an online platform for ordering food from the fictional Chef Palace restaurant chain. The application is being developed with the MERN stack. The base of the front-end and account functionality has been completed and is now live, being hosted on a Heroku server. </p> <br></br> <center> <a href="https://chefpalace.herokuapp.com/" class="btn btn-primary liveBtn" target="_blank" rel="noopener noreferrer" > <i class="fas fa-hand-point-right"></i> Live Site </a> </center> <center> <a href="https://github.com/johnpwajok/Chef-Palace-React" class="btn btn-primary gitBtn" target="_blank" rel="noopener noreferrer" > <i class="fab fa-github"></i> View on GitHub </a> </center> </div> </div> </div> <div class="col-lg-6 col-md-6 col-sm-12 colItem"> <div class="jumbotron"> <h2 className="projectTitle">Git-Overviews</h2> <img src={gitOverviews} alt="git overview" class="img-responsive" ></img> <br></br> <div className="projectInformation"> <p> The Git-Overviews application was built to enable users to search for users on Github and see a concise and straightforward overview of their public projects. The app enables the user to see the GitHub users contact information along with all their repositories with their descriptions and a button that links directly to any project. </p> <br></br> <center> <a href="https://git-overviews.herokuapp.com/" class="btn btn-primary liveBtn" target="_blank" rel="noopener noreferrer" > <i class="fas fa-hand-point-right"></i> Live Site </a> </center> <center> <a href="https://github.com/johnpwajok/Utilising-GitHub-API" class="btn btn-primary gitBtn" target="_blank" rel="noopener noreferrer" > <i class="fab fa-github"></i> View on GitHub </a> </center> </div> </div> </div> </div> <div class="row row-eq-height"> <div class="col-lg-6 col-md-6 col-sm-12 colItem"> <div class="jumbotron"> <h2 className="projectTitle">Fyjal</h2> <img src={fyjal} alt="ChefPalace" class="img-responsive" ></img> <br></br> <div className="projectInformation"> <p> Fyjal is a website created to promote a fictional financial management mobile app. The website was created using React, Express as well as Bootstrap and the react-bootstrap libraries. The website is intended only to show the design and isn't functional (download buttons don't perform any action). Fyjal is hosted on Heroku. </p> <br></br> <center> <a href="https://fyjal.herokuapp.com/" class="btn btn-primary liveBtn" target="_blank" rel="noopener noreferrer" > <i class="fas fa-hand-point-right"></i> Live Site </a> </center> <center> <a href="https://github.com/johnpwajok/fyjal" class="btn btn-primary gitBtn" target="_blank" rel="noopener noreferrer" > <i class="fab fa-github"></i> View on GitHub </a> </center> </div> </div> </div> <div class="col-lg-6 col-md-6 col-sm-12 colItem"> <div class="jumbotron"> <h2 className="projectTitle">IKnowAPlace</h2> <img src={IKAP} alt="IKAP" class="img-responsive"></img> <br></br> <div className="projectInformation"> <p> IKnowAPlace was built as my final year project. The project is a recommendation system for bars and restaurants. IKnowAPlace was created using React, Python, Flask and PostgreSQL. It generates recommendations through a combination of content-based (using the users preferences) and collaborative based techniques. Users can also search for locations by name or category. </p> <br></br> <center> <a href="https://youtu.be/ZpA0rp0-JG8" class="btn btn-primary liveBtn" target="_blank" rel="noopener noreferrer" > <i class="fab fa-youtube"></i> Video Demo </a> </center> </div> </div> </div> </div> <div class="row row-eq-height"> <div class="col-lg-6 col-md-6 col-sm-12 colItem"> <div class="jumbotron"> <h2 className="projectTitle">Sticky Notes</h2> <img src={stickyNotes} alt="StickyNotes" class="img-responsive" ></img> <br></br> <div className="projectInformation"> <p> Sticky notes is an application built to allow users to quickly create notes within their browsers. It allows the user to choose the background colour of each note from a drop-down list. The user can edit each note individually as well as delete any note on the page. This application was developed with HTML, CSS and JavaScript. </p> <br></br> <center> <a href="https://amazing-hugle-7b7a9e.netlify.app/" class="btn btn-primary liveBtn" target="_blank" rel="noopener noreferrer" > <i class="fas fa-hand-point-right"></i> Live Site </a> </center> <center> <a href="https://github.com/johnpwajok/Sticky-Notes-Application/" class="btn btn-primary gitBtn" target="_blank" rel="noopener noreferrer" > <i class="fab fa-github"></i> View on GitHub </a> </center> </div> </div> </div> </div> </div> </div> </FadeIn> ); } } <file_sep>import React, { Component } from "react"; //hashlink lib from https://github.com/rafgraph/react-router-hash-link import { HashLink } from "react-router-hash-link"; import styles from "./styles/styles.css"; export default class NavigationBar extends Component { render() { return ( <nav class="navbar sticky-top navbar-expand-lg navbar-dark navbar-custom"> <a class="navbar-brand" href="#"> <NAME> </a> <button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbarText" aria-controls="navbarText" aria-expanded="false" aria-label="Toggle navigation" > <span class="navbar-toggler-icon"></span> </button> <div class="collapse navbar-collapse justify-content-end" id="navbarText" > <ul class="navbar-nav d-flex"> <li class="nav-item active"> <HashLink class="HashLink" smooth to="/#introSection"> Home </HashLink> </li> <li class="nav-item active"> <HashLink class="HashLink" smooth to="/#skillsSection"> Skills </HashLink> </li> <li class="nav-item active"> <HashLink class="HashLink" smooth to="/#projectsSection"> Projects </HashLink> </li> <li class="nav-item"> <HashLink class="HashLink" smooth to="/#contactSection"> Contact </HashLink> </li> </ul> </div> </nav> ); } }
d5f0161816c5077d4c2f12e3ca405a61e9ed9a3a
[ "JavaScript", "Markdown" ]
5
JavaScript
johnpwajok/Personal-Site
abfcefc0cf6c751aef00a265a7a6e2bca0d73c1d
4651891c3a9e787769df86e564051573e0148040
refs/heads/main
<repo_name>CWCHIANG201803/comp90025_project1<file_sep>/src/kseqalign.cpp // CPP program to solve the sequence alignment // problem. Adapted from https://www.geeksforgeeks.org/sequence-alignment-problem/ #include <sys/time.h> #include <string> #include <cstring> #include <omp.h> #include <iostream> #include "sha512.hh" using namespace std; std::string getMinimumPenalties(std::string *genes, int k, int pxy, int pgap, int *penalties, int di, int dj); int getMinimumPenalty(std::string x, std::string y, int pxy, int pgap, int *xans, int *yans,int di, int dj); /* Examples of sha512 which returns a std::string sw::sha512::calculate("SHA512 of std::string") // hash of a string, or sw::sha512::file(path) // hash of a file specified by its path, or sw::sha512::calculate(&data, sizeof(data)) // hash of any block of data */ // Return current wallclock time, for performance measurement uint64_t GetTimeStamp() { struct timeval tv; gettimeofday(&tv,NULL); return tv.tv_sec*(uint64_t)1000000+tv.tv_usec; } int main(int argc, char **argv){ int di, dj, cur; int misMatchPenalty; int gapPenalty; int k; std::cin >> misMatchPenalty; std::cin >> gapPenalty; std::cin >> k; std::string genes[k]; for(int i=0;i<k;i++){ std::cin >> genes[i]; } di = 0, cur = 0; while(argv[1][cur]){ di = di*10 + (argv[1][cur++]-'0'); } dj = 0, cur = 0; while(argv[2][cur]){ dj = dj*10 + (argv[2][cur++]-'0'); } int numPairs= k*(k-1)/2; int penalties[numPairs]; uint64_t start = GetTimeStamp (); // return all the penalties and the hash of all allignments std::string alignmentHash = getMinimumPenalties(genes, k,misMatchPenalty, gapPenalty, penalties, di, dj); // print the time taken to do the computation printf("Time: %ld us\n", (uint64_t) (GetTimeStamp() - start)); // print the alginment hash std::cout<<alignmentHash<<std::endl; for(int i=0;i<numPairs;i++){ std::cout<<penalties[i] << " "; } std::cout << std::endl; return 0; } int min3(int a, int b, int c) { if (a <= b && a <= c) { return a; } else if (b <= a && b <= c) { return b; } else { return c; } } // equivalent of int *dp[width] = new int[height][width] // but works for width not known at compile time. // (Delete structure by delete[] dp[0]; delete[] dp;) int **new2d (int width, int height) { int **dp = new int *[width]; size_t size = width; size *= height; int *dp0 = new int [size]; if (!dp || !dp0) { std::cerr << "getMinimumPenalty: new failed" << std::endl; exit(1); } dp[0] = dp0; for (int i = 1; i < width; i++) dp[i] = dp[i-1] + height; return dp; } std::string getMinimumPenalties(std::string *genes, int k, int pxy, int pgap, int *penalties, int di, int dj) { int probNum=0; std::string alignmentHash=""; for(int i=1;i<k;i++){ for(int j=0;j<i;j++){ std::string gene1 = genes[i]; std::string gene2 = genes[j]; int m = gene1.length(); // length of gene1 int n = gene2.length(); // length of gene2 int l = m+n; int xans[l+1], yans[l+1]; penalties[probNum]=getMinimumPenalty(gene1,gene2,pxy,pgap,xans,yans, di, dj); // Since we have assumed the answer to be n+m long, // we need to remove the extra gaps in the starting // id represents the index from which the arrays // xans, yans are useful int id = 1; int a; for (a = l; a >= 1; a--) { if ((char)yans[a] == '_' && (char)xans[a] == '_') { id = a + 1; break; } } std::string align1=""; std::string align2=""; #pragma omp task shared(align1, xans, a, id, l) for (a = id; a <= l; a++) { align1.append(1,(char)xans[a]); } #pragma omp task shared(align2, yans, a, id, l) for (a = id; a <= l; a++) { align2.append(1,(char)yans[a]); } #pragma omp taskwait std::string align1hash = ""; std::string align2hash = ""; std::string problemhash = ""; #pragma omp task shared(align1hash, align1) align1hash = sw::sha512::calculate(align1); #pragma omp task shared(align2hash, align2) align2hash = sw::sha512::calculate(align2); #pragma omp taskwait problemhash = sw::sha512::calculate(align1hash.append(align2hash)); alignmentHash=sw::sha512::calculate(alignmentHash.append(problemhash)); // Uncomment for testing purposes std::cout << penalties[probNum] << std::endl; std::cout << align1 << std::endl; std::cout << align2 << std::endl; std::cout << std::endl; probNum++; } } return alignmentHash; } // function to find out the minimum penalty // return the minimum penalty and put the aligned sequences in xans and yans int getMinimumPenalty(std::string x, std::string y, int pxy, int pgap, int *xans, int *yans, int di, int dj) { int i, j; // intialising variables int m = x.length(); // length of gene1 int n = y.length(); // length of gene2 // table for storing optimal substructure answers int **dp = new2d (m+1, n+1); size_t size = m + 1; size *= n + 1; memset (dp[0], 0, size); // intialising the table #pragma omp task shared(i, m, dp, pgap) for (i = 0; i <= m; i++) { dp[i][0] = i * pgap; } #pragma omp task shared(i, m, dp, pgap) for (i = 0; i <= n; i++) { dp[0][i] = i * pgap; } // calcuting the minimum penalty int width = n + 1; int height = m + 1; int diagonals = 0; diagonals = (width/dj) + (height/di); diagonals += ((height% di) + (width %dj)) > 0 ? 1 : 0; i = 0, j = 0; int d, iter, length, imax, jmax, ii, jj, iii, jjj; for(d = 0 ; d < diagonals; ++d){ length = min((d+1), height - i); #pragma omp parallel for schedule(dynamic) num_threads(length) shared(dp, i, j, width, height, length, di, dj) private(iii, jjj, ii, jj, imax, jmax, iter) for(iter = 0 ; iter < length; ++iter ){ ii = i + iter*di; jj = j - iter*dj; imax = std::min(ii+di, height); jmax = std::min(jj+dj, width); for(iii = ii; iii < imax; iii++){ for(jjj = jj; jjj < jmax; jjj++){ if(iii > 0 && jjj > 0){ if(x[iii-1] == y[jjj-1]){ dp[iii][jjj] = dp[iii-1][jjj-1]; }else{ dp[iii][jjj] = min3( dp[iii-1][jjj-1]+pxy, dp[iii - 1][jjj] + pgap, dp[iii][jjj - 1] + pgap ); } } } } } j += dj; if( j >= width){ j = width - dj; i += di; } } // Reconstructing the solution int l = n + m; // maximum possible length i = m; j = n; int xpos = l; int ypos = l; while ( !(i == 0 || j == 0)) { if (x[i - 1] == y[j - 1]) { xans[xpos--] = (int)x[i - 1]; yans[ypos--] = (int)y[j - 1]; i--; j--; } else if (dp[i - 1][j - 1] + pxy == dp[i][j]) { xans[xpos--] = (int)x[i - 1]; yans[ypos--] = (int)y[j - 1]; i--; j--; } else if (dp[i - 1][j] + pgap == dp[i][j]) { xans[xpos--] = (int)x[i - 1]; yans[ypos--] = (int)'_'; i--; } else if (dp[i][j - 1] + pgap == dp[i][j]) { xans[xpos--] = (int)'_'; yans[ypos--] = (int)y[j - 1]; j--; } } #pragma omp task shared(xpos, i, xans, x) while (xpos > 0) { if (i > 0) xans[xpos--] = (int)x[--i]; else xans[xpos--] = (int)'_'; } #pragma omp task shared(ypos, j, yans, y) while (ypos > 0) { if (j > 0) yans[ypos--] = (int)y[--j]; else yans[ypos--] = (int)'_'; } int ret = dp[m][n]; #pragma omp taskwait delete[] dp[0]; delete[] dp; return ret; } <file_sep>/src/CMakeLists.txt add_executable( project1_app kseqalign.cpp ) target_link_libraries( project1_app PUBLIC OpenMP::OpenMP_CXX )<file_sep>/chaowei_project1.slurm #!/bin/bash #SBATCH --partition=physical #SBATCH --constraint=physg5 #SBATCH --time=00:15:00 #SBATCH --ntasks=1 #SBATCH --cpus-per-task=4 #SBATCH --mem=32G #SBATCH --job-name=chaowei module load gcc module load cmake for i in {1..5};do cat ./data/mseq-big13-example.dat | ./build/project1_app 1000 1000; done <file_sep>/README.md # comp90025_project1 This is the code for the project 1 in comp90025 To start, use cmake to build the project and then run the program Under unix system (with generator GNU make) mkdir build cd build cmake .. make cat ../data/mseq.dat | ./project1_app <file_sep>/CMakeLists.txt cmake_minimum_required(VERSION 3.10 FATAL_ERROR) project(project1 CXX) include(GNUInstallDirs) set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}) message("The cmake binary dir is ${CMAKE_BINARY_DIR}") set(CMAKE_CXX_STANDARD 11) set(CMAKE_CXX_STANDARD_REQUIRED ON) set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O3") find_package(OpenMP) add_subdirectory(src)
1216aefdbf887779b0a3e4e7c84901435435f6dc
[ "Markdown", "CMake", "C++", "Shell" ]
5
C++
CWCHIANG201803/comp90025_project1
41eed5ca0a3aa927ee8aa78c8cf1c5bad835b0d8
2317365860ecf9d3cbc6212f52ab692604dd159c
refs/heads/master
<repo_name>notatestuser/headbang.app<file_sep>/README.md # Headbang.app ![Screenshot](screenshot.png) ## Features * [x] Ultra fast, even with large libraries (>100gB) * [x] Separate client and server components. Run the server at home, stream from everywhere * [x] Standalone web client packaged as a desktop app (using node-webkit) * [ ] Keyboard Shortcuts * [ ] Library management * [ ] Discover * [ ] Radio Mode * [ ] Automatic metadata downloading * [ ] Last.fm * [ ] Artists * [x] Albums * [ ] Discogs * [ ] Genres * [ ] Labels * [ ] fanart.tv * [ ] Artists * [ ] Albums * [ ] Labels ## Running 1. git clone <EMAIL>:knoopx/headbang.app.git headbang-app 2. cd headbang-app 3. bundle 4. rake dist ## Requirements * `npm install -g nodewebkit` * `vendor/ffmpegsumo.so` with [MP3 playback support](https://github.com/rogerwang/node-webkit/wiki/Support-mp3-and-h264-in-video-and-audio-tag) ## Development 1. git clone <EMAIL>:knoopx/headbang.app.git headbang-app 2. cd headbang-app 3. bundle 4. middleman server <file_sep>/Rakefile require 'fileutils' task :dist => :web do puts "Cleaning previous build" FileUtils.rm_r("dist/osx") if File.directory?("dist/osx") FileUtils.mkdir_p("dist/osx") puts "Copying node-webkit skeleton app" FileUtils.cp_r("/usr/local/lib/node_modules/nodewebkit/nodewebkit", "dist/osx/Headbang.app") puts "Copying app.nw" FileUtils.cp_r("build", "dist/osx/Headbang.app/Contents/Resources/app.nw") puts "Copying nw.icns" FileUtils.cp("resources/icon.icns", "dist/osx/Headbang.app/Contents/Resources/nw.icns") if File.exist?("vendor/ffmpegsumo.so") puts "Copying ffmpegsumo.so" FileUtils.cp("vendor/ffmpegsumo.so", "dist/osx/Headbang.app/Contents/Frameworks/node-webkit Framework.framework/Libraries") end system "open dist/osx/Headbang.app" end task :web do puts "Building assets" system "middleman build" end <file_sep>/Gemfile source 'http://rubygems.org' source 'http://rails-assets.org' gem "middleman" gem 'slim' gem 'sass' gem 'bootstrap-sass', require: false gem 'font-awesome-sass', require: false gem 'rails-assets-angular' gem 'rails-assets-angular-animate' gem 'rails-assets-angular-bootstrap' gem 'rails-assets-angular-route' gem 'rails-assets-angular-ui' gem 'rails-assets-angular-ui-router' gem 'rails-assets-ngstorage' gem 'rails-assets-angular-media-player' gem 'rails-assets-mousetrap' <file_sep>/config.rb set :css_dir, 'stylesheets' set :js_dir, 'javascripts' set :images_dir, 'images' configure :build do set :http_prefix, "app://headbang" end helpers do def image_url(path) asset_url(path, "images") end end
45e1f2d2db0eb5beffa9cc4c7a514ce250993af9
[ "Markdown", "Ruby" ]
4
Markdown
notatestuser/headbang.app
e8de97a79c9b12b598bf449841d4ef5ae180ee3c
61c1eea1d776d30a90214e6b72ebeac49c2233b5
refs/heads/main
<file_sep># command-the-weather ### How do I use this? - Download this repository and open it in the IDE or text editor of your choice. - Open your terminal/command line in this directory. - If you haven't already, download [Node.js](https://nodejs.org/en/download/). - Run Node.js in the command line by typing `node .` - The command `weather in <city>` prints the current weather in the given city to the command line. `weather help` prints out the list of commands in case you forget. - The city parameter can be any city on Earth! Have fun **command**ing the weather 😉! <file_sep>#!/usr/bin/env node const program = require('commander') const { fetchCurrentWeather } = require('./commands') program .version('0.0.1') .description('Command the Weather!') program .command('in <city>') .description('view the current weather in the specified city') .action(city => fetchCurrentWeather(city)) program.parse(process.argv)
f643c6c60bd168c9d6f8d1d82c28770c2d0eff97
[ "Markdown", "JavaScript" ]
2
Markdown
yayabosh/command-the-weather
0627c3da02335b5d7c0dd3624a76a2f4383e7e7c
667585d4e4d92af68a591664befc70be1d3e090f
refs/heads/master
<repo_name>columbiagsapp/books.gsapp.org<file_sep>/README.md #GSAPP Books Website documenting the publications of GSAPP, including journals, magazines, book series and one-off books from across the departments and research centers of the school. A sober site, it winks at the user upon scrolling, revealing a random shape behind the logo. ![GSAPP Books shapes](https://raw.github.com/columbiagsapp/books.gsapp.org/master/docs/scroll.gif) The site opens on an index of all books in the archive. ![GSAPP Books Index by year](https://raw.github.com/columbiagsapp/books.gsapp.org/master/docs/index_by_year.png) The index can also be sorted by title. ![GSAPP Books Index by title](https://raw.github.com/columbiagsapp/books.gsapp.org/master/docs/index_by_title.png) Users can also view by subcategories, like Buell Center books and serials. ![GSAPP Books Serials](https://raw.github.com/columbiagsapp/books.gsapp.org/master/docs/serial.png) Individual book pages feature a synopsis and a series of spreads. ![GSAPP Books book](https://raw.github.com/columbiagsapp/books.gsapp.org/master/docs/book_McHale.png) ![GSAPP Books book spread](https://raw.github.com/columbiagsapp/books.gsapp.org/master/docs/book_McHale_spread.png) <file_sep>/books-tumblr/misreading/misreading.js $(document).ready(function () { if(!$('body').hasClass('permalink-page')){ var $container = $('#main .brickwall'); var buildWall = function(){ console.log('buildwall(2)'); $container.imagesLoaded( function(){ $container.masonry({ itemSelector: '.post:visible', columnWidth: 400, gutterWidth: 120, isAnimated: false, isFitWidth: true }); }); } var initInfiniteScroll = function(){ $container.infinitescroll({ navSelector : '#page-nav', // selector for the paged navigation nextSelector : '#page-nav a', // selector for the NEXT link (to page 2) itemSelector : '.postwrapper', // selector for all items you'll retrieve loading: { finishedMsg: 'No more pages to load.', img: 'http://i.imgur.com/6RMhx.gif' } }, // trigger Masonry as a callback function( newElements ) { // hide new items while they are loading var $newElems = $( newElements ).css({ opacity: 0 }); $newElems.find('.caption').truncate({max_length: MAX_CAPTION_LENGTH}); $newElems.find('.text-body').truncate({max_length: MAX_TEXTPOST_LENGTH}); $newElems.find('.post.quote .realpost').truncate({max_length: MAX_QUOTEPOST_LENGTH}); //$newElems.truncation(); // ensure that images load before adding to masonry layout $newElems.imagesLoaded(function(){ // show elems now they're ready $newElems.animate({ opacity: 1 }); $container.masonry( 'appended', $newElems, true ); }); } ); } var MAX_CAPTION_LENGTH = 400; var MAX_TEXTPOST_LENGTH = 500; var MAX_QUOTEPOST_LENGTH = 200; var POSTS = 10; var truncate = function(){ $('body:not(.permalink-page) .caption').truncate({max_length: MAX_CAPTION_LENGTH}); $('body:not(.permalink-page) .text-body').truncate({max_length: MAX_TEXTPOST_LENGTH}); $('body:not(.permalink-page) .post.quote').truncate({max_length: MAX_QUOTEPOST_LENGTH}); } truncate(); buildWall(); initInfiniteScroll(); } $('.post .caption a').each(function(){ if($(this).children('img').length = 1){ $(this).css('border', 'none'); } }); $('#misreading-filter').bind('click', function(){ $('.misreading').toggle(); $container.masonry( 'reload' ); if($('.misreading').is(':visible')){ $('body').addClass('misreading-visible').removeClass('misreading-hidden'); }else{ $('body').addClass('misreading-hidden').removeClass('misreading-visible'); } return false; }); $('#reading-filter').bind('click', function(){ $('.reading').toggle(); $container.masonry( 'reload' ); if($('.reading').is(':visible')){ $('body').addClass('reading-visible').removeClass('reading-hidden'); }else{ $('body').addClass('reading-hidden').removeClass('reading-visible'); } return false; }); //add reading or misreading class to each post $('.post').each(function(){ var readingClass = ''; $('.tags li a.tag', this).each(function(){ if($(this).text() == 'reading'){ readingClass = 'reading'; }else if($(this).text() == 'misreading'){ readingClass = 'misreading'; } }); if(readingClass.length > 0){ $(this).addClass(readingClass); } }); });
528ca54db03007b2005c9c6d1ba1d251d500dc76
[ "Markdown", "JavaScript" ]
2
Markdown
columbiagsapp/books.gsapp.org
4dd6c71741f0e707727c2ae47abc6711c091f1f0
d941d10f24639d103245ba8eede1892fdd4aa87b
refs/heads/master
<file_sep># Web Video Player ## 简介 一个播放在线视频的 Android Application - 视频来源:”[学习强国](https://www.xuexi.cn/)“ APP - 主题:人与自然 ## 实现 - Fragement - Adapter - SQLiteDatabase - CardView - ImageButton - VideoView ## 展示 - 着陆页 可退出或进入视频列表页。 <img src="img/first_activity.png" width = "324" height = "576" alt="图片名称" /> - 列表页(Phone) 加载网络图片作为封面,可手动刷新;点击列表子项进入播放页面。 <img src="img/list_activity_0.png" width = "324" height = "576" alt="图片名称" /> <img src="img/list_activity_1.png" width = "324" height = "576" alt="图片名称" /> - 列表页(Pad) 左侧视频列表,右侧播放页面。 <img src="img/list_activity_2.png" width = "480" height = "768" alt="图片名称" /> - 播放页(vertical) 轻触屏幕按键可见7s,期间再次轻触,按键不可见;可暂停、继续播放、播放下一个、播放上一个。 <img src="img/play_activity_0.png" width = "324" height = "576" alt="图片名称" /> <img src="img/play_activity_1.png" width = "324" height = "576" alt="图片名称" /> - 播放页(horizontal) 横屏时视频自动全屏,其余同上。 <img src="img/play_activity_2.png" width = "576" height = "324" alt="图片名称" /><file_sep>package com.dry.webvideoplayer; import android.bluetooth.BluetoothDevice; import android.content.Context; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; import android.widget.Toast; public class VideoDBHelper extends SQLiteOpenHelper { private static final String CREATE_VIDEO_TABLE = "create table video (" + "id integer primary key autoincrement, " + "name text, " + "video_url text, " + "img_url text, " + "type text," + "size text," + "duration text," + "dimensions text)"; private Context context; public VideoDBHelper(Context context, String name, SQLiteDatabase.CursorFactory factory, int version) { super(context, name, factory, version); this.context = context; } @Override public void onCreate(SQLiteDatabase db) { db.execSQL(CREATE_VIDEO_TABLE); Toast.makeText(this.context, "Create video table", Toast.LENGTH_SHORT).show(); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { db.execSQL("drop table if exists video"); onCreate(db); } } <file_sep>package com.dry.webvideoplayer; import android.util.Log; public class Video { private int id = -1; private String name = ""; private String videoURL; private String imgURL; private String type = "MP4 video"; private String size = "28.2 MB"; private String duration = "2 minutes 53 seconds"; private String dimensions = "1920 x 1080"; public Video(int id, String name, String videoURL, String imgURL, String type, String size, String duration, String dimensions) { this.id = id; this.name = name; this.videoURL = videoURL; this.imgURL = imgURL; this.type = type; this.size = size; this.duration = duration; this.dimensions = dimensions; Log.v("110", "New an instance of Video"); } public Video(int id, String name, String videoURL, String imgURL) { this.id = id; this.name = name; this.videoURL = videoURL; this.imgURL = imgURL; Log.v("110", "New an instance of Video"); } public int getID() { return this.id; } public String getName() { return this.name; } public String getVideoURL() { return this.videoURL; } public String getImgURL() { return this.imgURL; } public String getType() { return this.type; } public String getSize() { return this.size; } public String getDuration() { return this.duration; } public String getDimensions() { return this.dimensions; } public String getInfo() { return this.type + "\t" + this.size + "\t" + this.duration + "\t" + this.dimensions; } } <file_sep>package com.dry.webvideoplayer; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.os.AsyncTask; import android.view.View; import android.widget.ImageView; import android.widget.ProgressBar; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; public class ImgLoadAsyncTask extends AsyncTask<String, Void, Bitmap> { private ProgressBar progressBar; private ImageView imageView; public ImgLoadAsyncTask(ProgressBar progressBar, ImageView imageView) { this.progressBar = progressBar; this.imageView = imageView; } @Override protected Bitmap doInBackground(String... params) { Bitmap bitmap = null; // 待返回的结果 String url = params[0]; // 获取URL URLConnection connection; // 网络连接对象 InputStream inStream; // 数据输入流 try { connection = new URL(url).openConnection(); inStream = connection.getInputStream(); //获取输入流 BufferedInputStream buf = new BufferedInputStream(inStream); // 解析输入流 bitmap = BitmapFactory.decodeStream(buf); inStream.close(); buf.close(); } catch (MalformedURLException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } // 返回给后面调用的方法 return bitmap; } @Override protected void onPreExecute() { //显示等待圆环 this.progressBar.setVisibility(View.VISIBLE); } @Override protected void onPostExecute(Bitmap result) { //下载完毕,隐藏等待圆环 this.progressBar.setVisibility(View.GONE); this.imageView.setImageBitmap(result); } } <file_sep>package com.dry.webvideoplayer; import android.net.Uri; import android.os.Bundle; import android.os.Handler; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v4.app.Fragment; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.widget.ImageButton; import android.widget.LinearLayout; import android.widget.VideoView; public class PlayFragment extends Fragment { private View view; private int id; private String videoURL; private Uri uri; private boolean playing = true; private VideoView videoView; private LinearLayout videoButtonView; // private PlayActivityTitle playActivityTitle; private ImageButton playPauseButton; private Handler handler; @Override public void onResume() { super.onResume(); // uri = Uri.parse(videoURL); // videoView.setVideoURI(uri); // videoView.start(); videoView.setOnTouchListener(mTouchListener); playPauseButton.setOnClickListener(mClickListener); } @Override public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); } @Nullable @Override public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { view = inflater.inflate(R.layout.fragment_play, container, false); View visibilityLayout = view.findViewById(R.id.visibility_layout); visibilityLayout.setVisibility(View.VISIBLE); // playActivityTitle = (PlayActivityTitle) view.findViewById(R.id.play_activity_title); videoView = (VideoView) view.findViewById(R.id.video_view); videoButtonView = (LinearLayout) view.findViewById(R.id.video_button_view); playPauseButton = (ImageButton) view.findViewById(R.id.play_pause_button); handler = new Handler(); videoButtonView.setVisibility(View.GONE); return view; } public void setData(int id, String videoURL) { this.id = id; this.videoURL = videoURL; uri = Uri.parse(videoURL); videoView.setVideoURI(uri); videoView.start(); // View visibilityLayout = view.findViewById(R.id.visibility_layout); // visibilityLayout.setVisibility(View.VISIBLE); } private View.OnClickListener mClickListener = new View.OnClickListener() { @Override public void onClick(View v) { switch (v.getId()) { case R.id.play_pause_button: if (playing) { playPauseButton.setImageDrawable(getResources().getDrawable(R.drawable.ic_play2_48dp)); videoView.pause(); playing = false; } else { playPauseButton.setImageDrawable(getResources().getDrawable(R.drawable.ic_pause2_48dp)); videoView.start(); playing = true; } break; default: break; } } }; private View.OnTouchListener mTouchListener = new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { switch (v.getId()) { case R.id.video_view: if (videoButtonView.getVisibility() == View.VISIBLE) { videoButtonView.setVisibility(View.GONE); } else { videoButtonView.setVisibility(View.VISIBLE); handler.postDelayed(runnable, 7000); } break; default: break; } return false; } }; private Runnable runnable = new Runnable() { @Override public void run() { try { videoButtonView.setVisibility(View.GONE); } catch (Exception e) { e.printStackTrace(); } } }; }
16d48662c92ccfbe1709e70dff0fa529258cbd1e
[ "Markdown", "Java" ]
5
Markdown
graycat0918/video-player-app
69ac1eef1d491138f0c2a7891aacc556a2bee470
d46460c8ba677cda22274db981b4012b299cf934
refs/heads/master
<repo_name>a6289413/CustomScrollBarScrollLayout<file_sep>/README.md # CustomScrollBarScrollLayout 此工程包含两个(scrolllayout包和widget包)自定义view, 两个自定义view都可以自定义ScrollView的ScrollBar的长短, 解决了系统默认ScrollView包裹内容太长导致ScrollBar短的问题 <file_sep>/app/src/main/java/com/example/myapplication/scrolllayout/CustomScrollBarScrollLayout.java package com.example.myapplication.scrolllayout; import android.content.Context; import android.content.res.TypedArray; import android.os.Handler; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.util.AttributeSet; import android.view.LayoutInflater; import android.view.View; import android.view.animation.AlphaAnimation; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.RelativeLayout; import com.example.myapplication.R; public class CustomScrollBarScrollLayout extends FrameLayout { private ImageView mIvScrollBar; private ProvideScrollStateScrollView mPsssvScrollView; private Handler mHandler = new Handler(); private int mScrollBarX; private float mScrollViewCanScrollHeight; private float mScrollBarCanScrollHeight; private int mScrollBarHeight; private int mScrollBarWidth; private int mScrollBarMarginScrollView; public CustomScrollBarScrollLayout(@NonNull Context context) { super(context); initView(context, null, 0); } public CustomScrollBarScrollLayout(@NonNull Context context, @Nullable AttributeSet attrs) { super(context, attrs); initView(context, attrs, 0); } public CustomScrollBarScrollLayout(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); initView(context, attrs, defStyleAttr); } private void initView(Context context, AttributeSet attrs, int defStyleAttr) { View view = LayoutInflater.from(context).inflate(R.layout.custom_scrollbar_scroll_layout, this); mIvScrollBar = view.findViewById(R.id.iv_scroll_bar); mPsssvScrollView = view.findViewById(R.id.psssv_srcoll_view); setScollChangedListener(); if (attrs != null) { final TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.CustomScrollBarScrollLayout); mIvScrollBar.setImageDrawable(a.getDrawable(R.styleable.CustomScrollBarScrollLayout_scrollBar)); mScrollBarMarginScrollView = (int) a.getDimension(R.styleable.CustomScrollBarScrollLayout_scrollBarMarginScrollView, 0); a.recycle(); } } @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { super.onLayout(changed, left, top, right, bottom); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); int viewHeight = getMeasuredHeight(); mScrollBarWidth = mIvScrollBar.getMeasuredWidth(); mScrollBarHeight = mIvScrollBar.getMeasuredHeight(); int range = mPsssvScrollView.getChildAt(0).getMeasuredHeight(); mScrollViewCanScrollHeight = range - viewHeight; mScrollBarCanScrollHeight = viewHeight - mScrollBarHeight; RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) mIvScrollBar.getLayoutParams(); params.leftMargin = mScrollBarMarginScrollView; mIvScrollBar.setLayoutParams(params); } private void setScollChangedListener() { mPsssvScrollView.setScrollChangedListener(new ProvideScrollStateScrollView.OnScrollChangedListener() { @Override public void onScrollChanged(int l, int t, int oldl, int oldt) { if (t >= 0 && t <= mScrollViewCanScrollHeight) { mIvScrollBar.layout(mScrollBarX, (int) (mScrollBarCanScrollHeight / mScrollViewCanScrollHeight * t), mScrollBarX + mScrollBarWidth, (int) (mScrollBarCanScrollHeight / mScrollViewCanScrollHeight * t + mScrollBarHeight)); } } @Override public void onShowScrollBar() { mHandler.removeCallbacksAndMessages(null); if (mIvScrollBar.getVisibility() == View.INVISIBLE) { if (mIvScrollBar.getAnimation() != null) { mIvScrollBar.clearAnimation(); } mIvScrollBar.setVisibility(View.VISIBLE); } } @Override public void onHideScrollBar() { mHandler.removeCallbacksAndMessages(null); mHandler.postDelayed(new Runnable() { @Override public void run() { if (mIvScrollBar.getVisibility() == VISIBLE) { hideOrShowScrollBar(1, 0, 200, INVISIBLE); } } }, 200); } }); } @Override protected void onWindowVisibilityChanged(int visibility) { super.onWindowVisibilityChanged(visibility); if (visibility == VISIBLE) { if (mIvScrollBar.getAnimation() != null) { mIvScrollBar.clearAnimation(); } if (mIvScrollBar.getVisibility() == INVISIBLE) { mIvScrollBar.setVisibility(View.VISIBLE); } mHandler.postDelayed(new Runnable() { @Override public void run() { hideOrShowScrollBar(1, 0, 200, INVISIBLE); } }, 2000); } } private void hideOrShowScrollBar(int from, int to, int duration, int viewState) { AlphaAnimation alphaAnimation = new AlphaAnimation(from, to); alphaAnimation.setFillAfter(true); alphaAnimation.setDuration(duration); mIvScrollBar.setAnimation(alphaAnimation); mIvScrollBar.setVisibility(viewState); } // view 绘制完成后才会被调用 @Override public void onWindowFocusChanged(boolean hasWindowFocus) { super.onWindowFocusChanged(hasWindowFocus); mScrollBarX = (int) mIvScrollBar.getX(); } public void addScrollViewContent(View view) { mPsssvScrollView.addView(view); } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); mHandler.removeCallbacksAndMessages(null); mHandler = null; } }
a7ecbf5cd4d1640490323404531ea96d446b20f7
[ "Markdown", "Java" ]
2
Markdown
a6289413/CustomScrollBarScrollLayout
0ab6222c9ced79a6782b066d1c0b5155bd2e7e89
9ae4a776e9e275e393fede78367a6d044b68f0b0